]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
stop using deprecated python-apt functions
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_list
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     elif f['architecture'] == 'source' and f["type"] == 'unreadable':
81         utils.warn('unreadable source file (will continue and hope for the best)')
82         return f["type"]
83     else:
84         file_type = f["type"]
85         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
86
87     # Validate the override type
88     type_id = get_override_type(file_type, session)
89     if type_id is None:
90         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
91
92     return file_type
93
94 ################################################################################
95
96 # Determine what parts in a .changes are NEW
97
98 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
99     """
100     Determine what parts in a C{changes} file are NEW.
101
102     @type filename: str
103     @param filename: changes filename
104
105     @type changes: Upload.Pkg.changes dict
106     @param changes: Changes dictionary
107
108     @type files: Upload.Pkg.files dict
109     @param files: Files dictionary
110
111     @type warn: bool
112     @param warn: Warn if overrides are added for (old)stable
113
114     @type dsc: Upload.Pkg.dsc dict
115     @param dsc: (optional); Dsc dictionary
116
117     @type new: dict
118     @param new: new packages as returned by a previous call to this function, but override information may have changed
119
120     @rtype: dict
121     @return: dictionary of NEW components.
122
123     """
124     # TODO: This should all use the database instead of parsing the changes
125     # file again
126     byhand = {}
127     if new is None:
128         new = {}
129
130     dbchg = get_dbchange(filename, session)
131     if dbchg is None:
132         print "Warning: cannot find changes file in database; won't check byhand"
133
134     # Try to get the Package-Set field from an included .dsc file (if possible).
135     if dsc:
136         for package, entry in build_package_list(dsc, session).items():
137             if package not in new:
138                 new[package] = entry
139
140     # Build up a list of potentially new things
141     for name, f in files.items():
142         # Keep a record of byhand elements
143         if f["section"] == "byhand":
144             byhand[name] = 1
145             continue
146
147         pkg = f["package"]
148         priority = f["priority"]
149         section = f["section"]
150         file_type = get_type(f, session)
151         component = f["component"]
152
153         if file_type == "dsc":
154             priority = "source"
155
156         if not new.has_key(pkg):
157             new[pkg] = {}
158             new[pkg]["priority"] = priority
159             new[pkg]["section"] = section
160             new[pkg]["type"] = file_type
161             new[pkg]["component"] = component
162             new[pkg]["files"] = []
163         else:
164             old_type = new[pkg]["type"]
165             if old_type != file_type:
166                 # source gets trumped by deb or udeb
167                 if old_type == "dsc":
168                     new[pkg]["priority"] = priority
169                     new[pkg]["section"] = section
170                     new[pkg]["type"] = file_type
171                     new[pkg]["component"] = component
172
173         new[pkg]["files"].append(name)
174
175         if f.has_key("othercomponents"):
176             new[pkg]["othercomponents"] = f["othercomponents"]
177
178     # Fix up the list of target suites
179     cnf = Config()
180     for suite in changes["suite"].keys():
181         oldsuite = get_suite(suite, session)
182         if not oldsuite:
183             print "WARNING: Invalid suite %s found" % suite
184             continue
185
186         if oldsuite.overridesuite:
187             newsuite = get_suite(oldsuite.overridesuite, session)
188
189             if newsuite:
190                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
191                     oldsuite.overridesuite, suite)
192                 del changes["suite"][suite]
193                 changes["suite"][oldsuite.overridesuite] = 1
194             else:
195                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
196                     oldsuite.overridesuite, suite)
197
198     # Check for unprocessed byhand files
199     if dbchg is not None:
200         for b in byhand.keys():
201             # Find the file entry in the database
202             found = False
203             for f in dbchg.files:
204                 if f.filename == b:
205                     found = True
206                     # If it's processed, we can ignore it
207                     if f.processed:
208                         del byhand[b]
209                     break
210
211             if not found:
212                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
213
214     # Check for new stuff
215     for suite in changes["suite"].keys():
216         for pkg in new.keys():
217             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
218             if len(ql) > 0:
219                 for file_entry in new[pkg]["files"]:
220                     if files[file_entry].has_key("new"):
221                         del files[file_entry]["new"]
222                 del new[pkg]
223
224     if warn:
225         for s in ['stable', 'oldstable']:
226             if changes["suite"].has_key(s):
227                 print "WARNING: overrides will be added for %s!" % s
228         for pkg in new.keys():
229             if new[pkg].has_key("othercomponents"):
230                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
231
232     return new, byhand
233
234 ################################################################################
235
236 def check_valid(new, session = None):
237     """
238     Check if section and priority for NEW packages exist in database.
239     Additionally does sanity checks:
240       - debian-installer packages have to be udeb (or source)
241       - non debian-installer packages can not be udeb
242       - source priority can only be assigned to dsc file types
243
244     @type new: dict
245     @param new: Dict of new packages with their section, priority and type.
246
247     """
248     for pkg in new.keys():
249         section_name = new[pkg]["section"]
250         priority_name = new[pkg]["priority"]
251         file_type = new[pkg]["type"]
252
253         section = get_section(section_name, session)
254         if section is None:
255             new[pkg]["section id"] = -1
256         else:
257             new[pkg]["section id"] = section.section_id
258
259         priority = get_priority(priority_name, session)
260         if priority is None:
261             new[pkg]["priority id"] = -1
262         else:
263             new[pkg]["priority id"] = priority.priority_id
264
265         # Sanity checks
266         di = section_name.find("debian-installer") != -1
267
268         # If d-i, we must be udeb and vice-versa
269         if     (di and file_type not in ("udeb", "dsc")) or \
270            (not di and file_type == "udeb"):
271             new[pkg]["section id"] = -1
272
273         # If dsc we need to be source and vice-versa
274         if (priority == "source" and file_type != "dsc") or \
275            (priority != "source" and file_type == "dsc"):
276             new[pkg]["priority id"] = -1
277
278 ###############################################################################
279
280 # Used by Upload.check_timestamps
281 class TarTime(object):
282     def __init__(self, future_cutoff, past_cutoff):
283         self.reset()
284         self.future_cutoff = future_cutoff
285         self.past_cutoff = past_cutoff
286
287     def reset(self):
288         self.future_files = {}
289         self.ancient_files = {}
290
291     def callback(self, member, data):
292         if member.mtime > self.future_cutoff:
293             self.future_files[Name] = member.mtime
294         if member.mtime < self.past_cutoff:
295             self.ancient_files[Name] = member.mtime
296
297 ###############################################################################
298
299 def prod_maintainer(notes, upload):
300     cnf = Config()
301
302     # Here we prepare an editor and get them ready to prod...
303     (fd, temp_filename) = utils.temp_filename()
304     temp_file = os.fdopen(fd, 'w')
305     for note in notes:
306         temp_file.write(note.comment)
307     temp_file.close()
308     editor = os.environ.get("EDITOR","vi")
309     answer = 'E'
310     while answer == 'E':
311         os.system("%s %s" % (editor, temp_filename))
312         temp_fh = utils.open_file(temp_filename)
313         prod_message = "".join(temp_fh.readlines())
314         temp_fh.close()
315         print "Prod message:"
316         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
317         prompt = "[P]rod, Edit, Abandon, Quit ?"
318         answer = "XXX"
319         while prompt.find(answer) == -1:
320             answer = utils.our_raw_input(prompt)
321             m = re_default_answer.search(prompt)
322             if answer == "":
323                 answer = m.group(1)
324             answer = answer[:1].upper()
325     os.unlink(temp_filename)
326     if answer == 'A':
327         return
328     elif answer == 'Q':
329         end()
330         sys.exit(0)
331     # Otherwise, do the proding...
332     user_email_address = utils.whoami() + " <%s>" % (
333         cnf["Dinstall::MyAdminAddress"])
334
335     Subst = upload.Subst
336
337     Subst["__FROM_ADDRESS__"] = user_email_address
338     Subst["__PROD_MESSAGE__"] = prod_message
339     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
340
341     prod_mail_message = utils.TemplateSubst(
342         Subst,cnf["Dir::Templates"]+"/process-new.prod")
343
344     # Send the prod mail
345     utils.send_mail(prod_mail_message)
346
347     print "Sent prodding message"
348
349 ################################################################################
350
351 def edit_note(note, upload, session, trainee=False):
352     # Write the current data to a temporary file
353     (fd, temp_filename) = utils.temp_filename()
354     editor = os.environ.get("EDITOR","vi")
355     answer = 'E'
356     while answer == 'E':
357         os.system("%s %s" % (editor, temp_filename))
358         temp_file = utils.open_file(temp_filename)
359         newnote = temp_file.read().rstrip()
360         temp_file.close()
361         print "New Note:"
362         print utils.prefix_multi_line_string(newnote,"  ")
363         prompt = "[D]one, Edit, Abandon, Quit ?"
364         answer = "XXX"
365         while prompt.find(answer) == -1:
366             answer = utils.our_raw_input(prompt)
367             m = re_default_answer.search(prompt)
368             if answer == "":
369                 answer = m.group(1)
370             answer = answer[:1].upper()
371     os.unlink(temp_filename)
372     if answer == 'A':
373         return
374     elif answer == 'Q':
375         end()
376         sys.exit(0)
377
378     comment = NewComment()
379     comment.package = upload.pkg.changes["source"]
380     comment.version = upload.pkg.changes["version"]
381     comment.comment = newnote
382     comment.author  = utils.whoami()
383     comment.trainee = trainee
384     session.add(comment)
385     session.commit()
386
387 ###############################################################################
388
389 # FIXME: Should move into the database
390 # suite names DMs can upload to
391 dm_suites = ['unstable', 'experimental', 'squeeze-backports']
392
393 def get_newest_source(source, session):
394     'returns the newest DBSource object in dm_suites'
395     ## the most recent version of the package uploaded to unstable or
396     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
397     ## section of its control file
398     q = session.query(DBSource).filter_by(source = source). \
399         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
400         order_by(desc('source.version'))
401     return q.first()
402
403 def get_suite_version_by_source(source, session):
404     'returns a list of tuples (suite_name, version) for source package'
405     q = session.query(Suite.suite_name, DBSource.version). \
406         join(Suite.sources).filter_by(source = source)
407     return q.all()
408
409 def get_source_by_package_and_suite(package, suite_name, session):
410     '''
411     returns a DBSource query filtered by DBBinary.package and this package's
412     suite_name
413     '''
414     return session.query(DBSource). \
415         join(DBSource.binaries).filter_by(package = package). \
416         join(DBBinary.suites).filter_by(suite_name = suite_name)
417
418 def get_suite_version_by_package(package, arch_string, session):
419     '''
420     returns a list of tuples (suite_name, version) for binary package and
421     arch_string
422     '''
423     return session.query(Suite.suite_name, DBBinary.version). \
424         join(Suite.binaries).filter_by(package = package). \
425         join(DBBinary.architecture). \
426         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
427
428 class Upload(object):
429     """
430     Everything that has to do with an upload processed.
431
432     """
433     def __init__(self):
434         self.logger = None
435         self.pkg = Changes()
436         self.reset()
437
438     ###########################################################################
439
440     def reset (self):
441         """ Reset a number of internal variables."""
442
443         # Initialize the substitution template map
444         cnf = Config()
445         self.Subst = {}
446         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
447         if cnf.has_key("Dinstall::BugServer"):
448             self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
449         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
450         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
451
452         self.rejects = []
453         self.warnings = []
454         self.notes = []
455
456         self.later_check_files = []
457
458         self.pkg.reset()
459
460     def package_info(self):
461         """
462         Format various messages from this Upload to send to the maintainer.
463         """
464
465         msgs = (
466             ('Reject Reasons', self.rejects),
467             ('Warnings', self.warnings),
468             ('Notes', self.notes),
469         )
470
471         msg = ''
472         for title, messages in msgs:
473             if messages:
474                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
475         msg += '\n\n'
476
477         return msg
478
479     ###########################################################################
480     def update_subst(self):
481         """ Set up the per-package template substitution mappings """
482
483         cnf = Config()
484
485         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
486         if not self.pkg.changes.has_key("architecture") or not \
487            isinstance(self.pkg.changes["architecture"], dict):
488             self.pkg.changes["architecture"] = { "Unknown" : "" }
489
490         # and maintainer2047 may not exist.
491         if not self.pkg.changes.has_key("maintainer2047"):
492             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
493
494         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
495         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
496         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
497
498         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
499         if self.pkg.changes["architecture"].has_key("source") and \
500            self.pkg.changes["changedby822"] != "" and \
501            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
502
503             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
504             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
505             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
506         else:
507             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
508             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
509             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
510
511         # Process policy doesn't set the fingerprint field and I don't want to make it
512         # do it for now as I don't want to have to deal with the case where we accepted
513         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
514         # the meantime so the package will be remarked as rejectable.  Urgh.
515         # TODO: Fix this properly
516         if self.pkg.changes.has_key('fingerprint'):
517             session = DBConn().session()
518             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
519             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
520                 if self.pkg.changes.has_key("sponsoremail"):
521                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
522             session.close()
523
524         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
525             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
526
527         # Apply any global override of the Maintainer field
528         if cnf.get("Dinstall::OverrideMaintainer"):
529             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
530             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
531
532         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
533         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
534         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
535         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
536
537     ###########################################################################
538     def load_changes(self, filename):
539         """
540         Load a changes file and setup a dictionary around it. Also checks for mandantory
541         fields  within.
542
543         @type filename: string
544         @param filename: Changes filename, full path.
545
546         @rtype: boolean
547         @return: whether the changes file was valid or not.  We may want to
548                  reject even if this is True (see what gets put in self.rejects).
549                  This is simply to prevent us even trying things later which will
550                  fail because we couldn't properly parse the file.
551         """
552         Cnf = Config()
553         self.pkg.changes_file = filename
554
555         # Parse the .changes field into a dictionary
556         try:
557             self.pkg.changes.update(parse_changes(filename))
558         except CantOpenError:
559             self.rejects.append("%s: can't read file." % (filename))
560             return False
561         except ParseChangesError as line:
562             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
563             return False
564         except ChangesUnicodeError:
565             self.rejects.append("%s: changes file not proper utf-8" % (filename))
566             return False
567
568         # Parse the Files field from the .changes into another dictionary
569         try:
570             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
571         except ParseChangesError as line:
572             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
573             return False
574         except UnknownFormatError as format:
575             self.rejects.append("%s: unknown format '%s'." % (filename, format))
576             return False
577
578         # Check for mandatory fields
579         for i in ("distribution", "source", "binary", "architecture",
580                   "version", "maintainer", "files", "changes", "description"):
581             if not self.pkg.changes.has_key(i):
582                 # Avoid undefined errors later
583                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
584                 return False
585
586         # Strip a source version in brackets from the source field
587         if re_strip_srcver.search(self.pkg.changes["source"]):
588             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
589
590         # Ensure the source field is a valid package name.
591         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
592             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
593
594         # Split multi-value fields into a lower-level dictionary
595         for i in ("architecture", "distribution", "binary", "closes"):
596             o = self.pkg.changes.get(i, "")
597             if o != "":
598                 del self.pkg.changes[i]
599
600             self.pkg.changes[i] = {}
601
602             for j in o.split():
603                 self.pkg.changes[i][j] = 1
604
605         # Fix the Maintainer: field to be RFC822/2047 compatible
606         try:
607             (self.pkg.changes["maintainer822"],
608              self.pkg.changes["maintainer2047"],
609              self.pkg.changes["maintainername"],
610              self.pkg.changes["maintaineremail"]) = \
611                    fix_maintainer (self.pkg.changes["maintainer"])
612         except ParseMaintError as msg:
613             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
614                    % (filename, self.pkg.changes["maintainer"], msg))
615
616         # ...likewise for the Changed-By: field if it exists.
617         try:
618             (self.pkg.changes["changedby822"],
619              self.pkg.changes["changedby2047"],
620              self.pkg.changes["changedbyname"],
621              self.pkg.changes["changedbyemail"]) = \
622                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
623         except ParseMaintError as msg:
624             self.pkg.changes["changedby822"] = ""
625             self.pkg.changes["changedby2047"] = ""
626             self.pkg.changes["changedbyname"] = ""
627             self.pkg.changes["changedbyemail"] = ""
628
629             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
630                    % (filename, self.pkg.changes["changed-by"], msg))
631
632         # Ensure all the values in Closes: are numbers
633         if self.pkg.changes.has_key("closes"):
634             for i in self.pkg.changes["closes"].keys():
635                 if re_isanum.match (i) == None:
636                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
637
638         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
639         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
640         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
641
642         # Check the .changes is non-empty
643         if not self.pkg.files:
644             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
645             return False
646
647         # Changes was syntactically valid even if we'll reject
648         return True
649
650     ###########################################################################
651
652     def check_distributions(self):
653         "Check and map the Distribution field"
654
655         Cnf = Config()
656
657         # Handle suite mappings
658         for m in Cnf.value_list("SuiteMappings"):
659             args = m.split()
660             mtype = args[0]
661             if mtype == "map" or mtype == "silent-map":
662                 (source, dest) = args[1:3]
663                 if self.pkg.changes["distribution"].has_key(source):
664                     del self.pkg.changes["distribution"][source]
665                     self.pkg.changes["distribution"][dest] = 1
666                     if mtype != "silent-map":
667                         self.notes.append("Mapping %s to %s." % (source, dest))
668                 if self.pkg.changes.has_key("distribution-version"):
669                     if self.pkg.changes["distribution-version"].has_key(source):
670                         self.pkg.changes["distribution-version"][source]=dest
671             elif mtype == "map-unreleased":
672                 (source, dest) = args[1:3]
673                 if self.pkg.changes["distribution"].has_key(source):
674                     for arch in self.pkg.changes["architecture"].keys():
675                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
676                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
677                             del self.pkg.changes["distribution"][source]
678                             self.pkg.changes["distribution"][dest] = 1
679                             break
680             elif mtype == "ignore":
681                 suite = args[1]
682                 if self.pkg.changes["distribution"].has_key(suite):
683                     del self.pkg.changes["distribution"][suite]
684                     self.warnings.append("Ignoring %s as a target suite." % (suite))
685             elif mtype == "reject":
686                 suite = args[1]
687                 if self.pkg.changes["distribution"].has_key(suite):
688                     self.rejects.append("Uploads to %s are not accepted." % (suite))
689             elif mtype == "propup-version":
690                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
691                 #
692                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
693                 if self.pkg.changes["distribution"].has_key(args[1]):
694                     self.pkg.changes.setdefault("distribution-version", {})
695                     for suite in args[2:]:
696                         self.pkg.changes["distribution-version"][suite] = suite
697
698         # Ensure there is (still) a target distribution
699         if len(self.pkg.changes["distribution"].keys()) < 1:
700             self.rejects.append("No valid distribution remaining.")
701
702         # Ensure target distributions exist
703         for suite in self.pkg.changes["distribution"].keys():
704             if not get_suite(suite.lower()):
705                 self.rejects.append("Unknown distribution `%s'." % (suite))
706
707     ###########################################################################
708
709     def binary_file_checks(self, f, session):
710         cnf = Config()
711         entry = self.pkg.files[f]
712
713         # Extract package control information
714         deb_file = utils.open_file(f)
715         try:
716             control = apt_pkg.TagSection(utils.deb_extract_control(deb_file))
717         except:
718             self.rejects.append("%s: deb_extract_control() raised %s." % (f, sys.exc_info()[0]))
719             deb_file.close()
720             # Can't continue, none of the checks on control would work.
721             return
722
723         deb_file.close()
724
725         # Check for mandatory fields
726         for field in [ "Package", "Architecture", "Version", "Description" ]:
727             if field not in control:
728                 # Can't continue
729                 self.rejects.append("%s: No %s field in control." % (f, field))
730                 return
731
732         # Ensure the package name matches the one give in the .changes
733         if not self.pkg.changes["binary"].has_key(control.find("Package", "")):
734             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.find("Package", "")))
735
736         # Validate the package field
737         package = control["Package"]
738         if not re_valid_pkg_name.match(package):
739             self.rejects.append("%s: invalid package name '%s'." % (f, package))
740
741         # Validate the version field
742         version = control["Version"]
743         if not re_valid_version.match(version):
744             self.rejects.append("%s: invalid version number '%s'." % (f, version))
745
746         # Ensure the architecture of the .deb is one we know about.
747         default_suite = cnf.get("Dinstall::DefaultSuite", "unstable")
748         architecture = control["Architecture"]
749         upload_suite = self.pkg.changes["distribution"].keys()[0]
750
751         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
752             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
753             self.rejects.append("Unknown architecture '%s'." % (architecture))
754
755         # Ensure the architecture of the .deb is one of the ones
756         # listed in the .changes.
757         if not self.pkg.changes["architecture"].has_key(architecture):
758             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
759
760         # Sanity-check the Depends field
761         depends = control.find("Depends")
762         if depends == '':
763             self.rejects.append("%s: Depends field is empty." % (f))
764
765         # Sanity-check the Provides field
766         provides = control.find("Provides")
767         if provides is not None:
768             provide = re_spacestrip.sub('', provides)
769             if provide == '':
770                 self.rejects.append("%s: Provides field is empty." % (f))
771             prov_list = provide.split(",")
772             for prov in prov_list:
773                 if not re_valid_pkg_name.match(prov):
774                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
775
776         # If there is a Built-Using field, we need to check we can find the
777         # exact source version
778         built_using = control.find("Built-Using")
779         if built_using is not None:
780             try:
781                 entry["built-using"] = []
782                 for dep in apt_pkg.parse_depends(built_using):
783                     bu_s, bu_v, bu_e = dep[0]
784                     # Check that it's an exact match dependency and we have
785                     # some form of version
786                     if bu_e != "=" or len(bu_v) < 1:
787                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
788                     else:
789                         # Find the source id for this version
790                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
791                         if len(bu_so) != 1:
792                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
793                         else:
794                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
795
796             except ValueError as e:
797                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
798
799
800         # Check the section & priority match those given in the .changes (non-fatal)
801         if control.find("Section") and entry["section"] != "" \
802            and entry["section"] != control.find("Section"):
803             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
804                                 (f, control.find("Section", ""), entry["section"]))
805         if control.find("Priority") and entry["priority"] != "" \
806            and entry["priority"] != control.find("Priority"):
807             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
808                                 (f, control.find("Priority", ""), entry["priority"]))
809
810         entry["package"] = package
811         entry["architecture"] = architecture
812         entry["version"] = version
813         entry["maintainer"] = control.find("Maintainer", "")
814
815         if f.endswith(".udeb"):
816             self.pkg.files[f]["dbtype"] = "udeb"
817         elif f.endswith(".deb"):
818             self.pkg.files[f]["dbtype"] = "deb"
819         else:
820             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
821
822         entry["source"] = control.find("Source", entry["package"])
823
824         # Get the source version
825         source = entry["source"]
826         source_version = ""
827
828         if source.find("(") != -1:
829             m = re_extract_src_version.match(source)
830             source = m.group(1)
831             source_version = m.group(2)
832
833         if not source_version:
834             source_version = self.pkg.files[f]["version"]
835
836         entry["source package"] = source
837         entry["source version"] = source_version
838
839         # Ensure the filename matches the contents of the .deb
840         m = re_isadeb.match(f)
841
842         #  package name
843         file_package = m.group(1)
844         if entry["package"] != file_package:
845             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
846                                 (f, file_package, entry["dbtype"], entry["package"]))
847         epochless_version = re_no_epoch.sub('', control.find("Version"))
848
849         #  version
850         file_version = m.group(2)
851         if epochless_version != file_version:
852             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
853                                 (f, file_version, entry["dbtype"], epochless_version))
854
855         #  architecture
856         file_architecture = m.group(3)
857         if entry["architecture"] != file_architecture:
858             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
859                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
860
861         # Check for existent source
862         source_version = entry["source version"]
863         source_package = entry["source package"]
864         if self.pkg.changes["architecture"].has_key("source"):
865             if source_version != self.pkg.changes["version"]:
866                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
867                                     (source_version, f, self.pkg.changes["version"]))
868         else:
869             # Check in the SQL database
870             if not source_exists(source_package, source_version, suites = \
871                 self.pkg.changes["distribution"].keys(), session = session):
872                 # Check in one of the other directories
873                 source_epochless_version = re_no_epoch.sub('', source_version)
874                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
875
876                 byhand_dir = get_policy_queue('byhand', session).path
877                 new_dir = get_policy_queue('new', session).path
878
879                 if os.path.exists(os.path.join(byhand_dir, dsc_filename)):
880                     entry["byhand"] = 1
881                 elif os.path.exists(os.path.join(new_dir, dsc_filename)):
882                     entry["new"] = 1
883                 else:
884                     dsc_file_exists = False
885                     # TODO: Don't hardcode this list: use all relevant queues
886                     #       The question is how to determine what is relevant
887                     for queue_name in ["embargoed", "unembargoed", "proposedupdates", "oldproposedupdates"]:
888                         queue = get_policy_queue(queue_name, session)
889                         if queue:
890                             if os.path.exists(os.path.join(queue.path, dsc_filename)):
891                                 dsc_file_exists = True
892                                 break
893
894                     if not dsc_file_exists:
895                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
896
897         # Check the version and for file overwrites
898         self.check_binary_against_db(f, session)
899
900     def source_file_checks(self, f, session):
901         entry = self.pkg.files[f]
902
903         m = re_issource.match(f)
904         if not m:
905             return
906
907         entry["package"] = m.group(1)
908         entry["version"] = m.group(2)
909         entry["type"] = m.group(3)
910
911         # Ensure the source package name matches the Source filed in the .changes
912         if self.pkg.changes["source"] != entry["package"]:
913             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
914
915         # Ensure the source version matches the version in the .changes file
916         if re_is_orig_source.match(f):
917             changes_version = self.pkg.changes["chopversion2"]
918         else:
919             changes_version = self.pkg.changes["chopversion"]
920
921         if changes_version != entry["version"]:
922             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
923
924         # Ensure the .changes lists source in the Architecture field
925         if not self.pkg.changes["architecture"].has_key("source"):
926             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
927
928         # Check the signature of a .dsc file
929         if entry["type"] == "dsc":
930             # check_signature returns either:
931             #  (None, [list, of, rejects]) or (signature, [])
932             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
933             for j in rejects:
934                 self.rejects.append(j)
935
936         entry["architecture"] = "source"
937
938     def per_suite_file_checks(self, f, suite, session):
939         cnf = Config()
940         entry = self.pkg.files[f]
941
942         # Skip byhand
943         if entry.has_key("byhand"):
944             return
945
946         # Check we have fields we need to do these checks
947         oktogo = True
948         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
949             if not entry.has_key(m):
950                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
951                 oktogo = False
952
953         if not oktogo:
954             return
955
956         # Handle component mappings
957         for m in cnf.value_list("ComponentMappings"):
958             (source, dest) = m.split()
959             if entry["component"] == source:
960                 entry["original component"] = source
961                 entry["component"] = dest
962
963         # Ensure the component is valid for the target suite
964         if entry["component"] not in get_component_names(session):
965             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
966             return
967
968         # Validate the component
969         if not get_component(entry["component"], session):
970             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
971             return
972
973         # See if the package is NEW
974         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
975             entry["new"] = 1
976
977         # Validate the priority
978         if entry["priority"].find('/') != -1:
979             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
980
981         # Determine the location
982         location = cnf["Dir::Pool"]
983         l = get_location(location, entry["component"], session=session)
984         if l is None:
985             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
986             entry["location id"] = -1
987         else:
988             entry["location id"] = l.location_id
989
990         # Check the md5sum & size against existing files (if any)
991         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
992
993         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
994                                          entry["size"], entry["md5sum"], entry["location id"])
995
996         if found is None:
997             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
998         elif found is False and poolfile is not None:
999             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1000         else:
1001             if poolfile is None:
1002                 entry["files id"] = None
1003             else:
1004                 entry["files id"] = poolfile.file_id
1005
1006         # Check for packages that have moved from one component to another
1007         entry['suite'] = suite
1008         arch_list = [entry["architecture"], 'all']
1009         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1010             [suite], arch_list = arch_list, session = session)
1011         if component is not None:
1012             entry["othercomponents"] = component
1013
1014     def check_files(self, action=True):
1015         file_keys = self.pkg.files.keys()
1016         holding = Holding()
1017         cnf = Config()
1018
1019         if action:
1020             cwd = os.getcwd()
1021             os.chdir(self.pkg.directory)
1022             for f in file_keys:
1023                 ret = holding.copy_to_holding(f)
1024                 if ret is not None:
1025                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1026
1027             os.chdir(cwd)
1028
1029         # check we already know the changes file
1030         # [NB: this check must be done post-suite mapping]
1031         base_filename = os.path.basename(self.pkg.changes_file)
1032
1033         session = DBConn().session()
1034
1035         try:
1036             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1037             # if in the pool or in a queue other than unchecked, reject
1038             if (dbc.in_queue is None) \
1039                    or (dbc.in_queue is not None
1040                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1041                 self.rejects.append("%s file already known to dak" % base_filename)
1042         except NoResultFound as e:
1043             # not known, good
1044             pass
1045
1046         has_binaries = False
1047         has_source = False
1048
1049         for f, entry in self.pkg.files.items():
1050             # Ensure the file does not already exist in one of the accepted directories
1051             # TODO: Dynamically generate this list
1052             for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
1053                 queue = get_policy_queue(queue_name, session)
1054                 if queue and os.path.exists(os.path.join(queue.path, f)):
1055                     self.rejects.append("%s file already exists in the %s queue." % (f, queue_name))
1056
1057             if not re_taint_free.match(f):
1058                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1059
1060             # Check the file is readable
1061             if os.access(f, os.R_OK) == 0:
1062                 # When running in -n, copy_to_holding() won't have
1063                 # generated the reject_message, so we need to.
1064                 if action:
1065                     if os.path.exists(f):
1066                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1067                     else:
1068                         # Don't directly reject, mark to check later to deal with orig's
1069                         # we can find in the pool
1070                         self.later_check_files.append(f)
1071                 entry["type"] = "unreadable"
1072                 continue
1073
1074             # If it's byhand skip remaining checks
1075             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1076                 entry["byhand"] = 1
1077                 entry["type"] = "byhand"
1078
1079             # Checks for a binary package...
1080             elif re_isadeb.match(f):
1081                 has_binaries = True
1082                 entry["type"] = "deb"
1083
1084                 # This routine appends to self.rejects/warnings as appropriate
1085                 self.binary_file_checks(f, session)
1086
1087             # Checks for a source package...
1088             elif re_issource.match(f):
1089                 has_source = True
1090
1091                 # This routine appends to self.rejects/warnings as appropriate
1092                 self.source_file_checks(f, session)
1093
1094             # Not a binary or source package?  Assume byhand...
1095             else:
1096                 entry["byhand"] = 1
1097                 entry["type"] = "byhand"
1098
1099             # Per-suite file checks
1100             entry["oldfiles"] = {}
1101             for suite in self.pkg.changes["distribution"].keys():
1102                 self.per_suite_file_checks(f, suite, session)
1103
1104         session.close()
1105
1106         # If the .changes file says it has source, it must have source.
1107         if self.pkg.changes["architecture"].has_key("source"):
1108             if not has_source:
1109                 self.rejects.append("no source found and Architecture line in changes mention source.")
1110
1111             if (not has_binaries) and (not cnf.find_b("Dinstall::AllowSourceOnlyUploads")):
1112                 self.rejects.append("source only uploads are not supported.")
1113
1114     ###########################################################################
1115
1116     def __dsc_filename(self):
1117         """
1118         Returns: (Status, Dsc_Filename)
1119         where
1120           Status: Boolean; True when there was no error, False otherwise
1121           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1122         """
1123         dsc_filename = None
1124
1125         # find the dsc
1126         for name, entry in self.pkg.files.items():
1127             if entry.has_key("type") and entry["type"] == "dsc":
1128                 if dsc_filename:
1129                     return False, "cannot process a .changes file with multiple .dsc's."
1130                 else:
1131                     dsc_filename = name
1132
1133         if not dsc_filename:
1134             return False, "source uploads must contain a dsc file"
1135
1136         return True, dsc_filename
1137
1138     def load_dsc(self, action=True, signing_rules=1):
1139         """
1140         Find and load the dsc from self.pkg.files into self.dsc
1141
1142         Returns: (Status, Reason)
1143         where
1144           Status: Boolean; True when there was no error, False otherwise
1145           Reason: String; When Status is False this describes the error
1146         """
1147
1148         # find the dsc
1149         (status, dsc_filename) = self.__dsc_filename()
1150         if not status:
1151             # If status is false, dsc_filename has the reason
1152             return False, dsc_filename
1153
1154         try:
1155             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1156         except CantOpenError:
1157             if not action:
1158                 return False, "%s: can't read file." % (dsc_filename)
1159         except ParseChangesError as line:
1160             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1161         except InvalidDscError as line:
1162             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1163         except ChangesUnicodeError:
1164             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1165
1166         return True, None
1167
1168     ###########################################################################
1169
1170     def check_dsc(self, action=True, session=None):
1171         """Returns bool indicating whether or not the source changes are valid"""
1172         # Ensure there is source to check
1173         if not self.pkg.changes["architecture"].has_key("source"):
1174             return True
1175
1176         if session is None:
1177             session = DBConn().session()
1178
1179         (status, reason) = self.load_dsc(action=action)
1180         if not status:
1181             self.rejects.append(reason)
1182             return False
1183         (status, dsc_filename) = self.__dsc_filename()
1184         if not status:
1185             # If status is false, dsc_filename has the reason
1186             self.rejects.append(dsc_filename)
1187             return False
1188
1189         # Build up the file list of files mentioned by the .dsc
1190         try:
1191             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1192         except NoFilesFieldError:
1193             self.rejects.append("%s: no Files: field." % (dsc_filename))
1194             return False
1195         except UnknownFormatError as format:
1196             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1197             return False
1198         except ParseChangesError as line:
1199             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1200             return False
1201
1202         # Enforce mandatory fields
1203         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1204             if not self.pkg.dsc.has_key(i):
1205                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1206                 return False
1207
1208         # Validate the source and version fields
1209         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1210             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1211         if not re_valid_version.match(self.pkg.dsc["version"]):
1212             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1213
1214         # Only a limited list of source formats are allowed in each suite
1215         for dist in self.pkg.changes["distribution"].keys():
1216             suite = get_suite(dist, session=session)
1217             if not suite:
1218                 self.rejects.append("%s: cannot find suite %s when checking source formats" % (dsc_filename, dist))
1219                 continue
1220             allowed = [ x.format_name for x in suite.srcformats ]
1221             if self.pkg.dsc["format"] not in allowed:
1222                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1223
1224         # Validate the Maintainer field
1225         try:
1226             # We ignore the return value
1227             fix_maintainer(self.pkg.dsc["maintainer"])
1228         except ParseMaintError as msg:
1229             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1230                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1231
1232         # Validate the build-depends field(s)
1233         for field_name in [ "build-depends", "build-depends-indep" ]:
1234             field = self.pkg.dsc.get(field_name)
1235             if field:
1236                 # Have apt try to parse them...
1237                 try:
1238                     apt_pkg.parse_src_depends(field)
1239                 except:
1240                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1241
1242         # Ensure the version number in the .dsc matches the version number in the .changes
1243         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1244         changes_version = self.pkg.files[dsc_filename]["version"]
1245
1246         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1247             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1248
1249         # Ensure the Files field contain only what's expected
1250         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1251
1252         # Ensure source is newer than existing source in target suites
1253         session = DBConn().session()
1254         self.check_source_against_db(dsc_filename, session)
1255         self.check_dsc_against_db(dsc_filename, session)
1256
1257         dbchg = get_dbchange(self.pkg.changes_file, session)
1258
1259         # Finally, check if we're missing any files
1260         for f in self.later_check_files:
1261             print 'XXX: %s' % f
1262             # Check if we've already processed this file if we have a dbchg object
1263             ok = False
1264             if dbchg:
1265                 for pf in dbchg.files:
1266                     if pf.filename == f and pf.processed:
1267                         self.notes.append('%s was already processed so we can go ahead' % f)
1268                         ok = True
1269                         del self.pkg.files[f]
1270             if not ok:
1271                 self.rejects.append("Could not find file %s references in changes" % f)
1272
1273         session.close()
1274
1275         return (len(self.rejects) == 0)
1276
1277     ###########################################################################
1278
1279     def get_changelog_versions(self, source_dir):
1280         """Extracts a the source package and (optionally) grabs the
1281         version history out of debian/changelog for the BTS."""
1282
1283         cnf = Config()
1284
1285         # Find the .dsc (again)
1286         dsc_filename = None
1287         for f in self.pkg.files.keys():
1288             if self.pkg.files[f]["type"] == "dsc":
1289                 dsc_filename = f
1290
1291         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1292         if not dsc_filename:
1293             return
1294
1295         # Create a symlink mirror of the source files in our temporary directory
1296         for f in self.pkg.files.keys():
1297             m = re_issource.match(f)
1298             if m:
1299                 src = os.path.join(source_dir, f)
1300                 # If a file is missing for whatever reason, give up.
1301                 if not os.path.exists(src):
1302                     return
1303                 ftype = m.group(3)
1304                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1305                    self.pkg.orig_files[f].has_key("path"):
1306                     continue
1307                 dest = os.path.join(os.getcwd(), f)
1308                 os.symlink(src, dest)
1309
1310         # If the orig files are not a part of the upload, create symlinks to the
1311         # existing copies.
1312         for orig_file in self.pkg.orig_files.keys():
1313             if not self.pkg.orig_files[orig_file].has_key("path"):
1314                 continue
1315             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1316             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1317
1318         # Extract the source
1319         try:
1320             unpacked = UnpackedSource(dsc_filename)
1321         except Exception as e:
1322             self.rejects.append("'dpkg-source -x' failed for %s. (%s)" % (dsc_filename, str(e)))
1323             return
1324
1325         if not cnf.find("Dir::BTSVersionTrack"):
1326             return
1327
1328         # Get the upstream version
1329         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1330         if re_strip_revision.search(upstr_version):
1331             upstr_version = re_strip_revision.sub('', upstr_version)
1332
1333         # Ensure the changelog file exists
1334         changelog_file = unpacked.get_changelog_file()
1335         if changelog_file is None:
1336             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1337             return
1338
1339         # Parse the changelog
1340         self.pkg.dsc["bts changelog"] = ""
1341         for line in changelog_file.readlines():
1342             m = re_changelog_versions.match(line)
1343             if m:
1344                 self.pkg.dsc["bts changelog"] += line
1345         changelog_file.close()
1346         unpacked.cleanup()
1347
1348         # Check we found at least one revision in the changelog
1349         if not self.pkg.dsc["bts changelog"]:
1350             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1351
1352     def check_source(self):
1353         # Bail out if:
1354         #    a) there's no source
1355         if not self.pkg.changes["architecture"].has_key("source"):
1356             return
1357
1358         tmpdir = utils.temp_dirname()
1359
1360         # Move into the temporary directory
1361         cwd = os.getcwd()
1362         os.chdir(tmpdir)
1363
1364         # Get the changelog version history
1365         self.get_changelog_versions(cwd)
1366
1367         # Move back and cleanup the temporary tree
1368         os.chdir(cwd)
1369
1370         try:
1371             shutil.rmtree(tmpdir)
1372         except OSError as e:
1373             if e.errno != errno.EACCES:
1374                 print "foobar"
1375                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1376
1377             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1378             # We probably have u-r or u-w directories so chmod everything
1379             # and try again.
1380             cmd = "chmod -R u+rwx %s" % (tmpdir)
1381             result = os.system(cmd)
1382             if result != 0:
1383                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1384             shutil.rmtree(tmpdir)
1385         except Exception as e:
1386             print "foobar2 (%s)" % e
1387             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1388
1389     ###########################################################################
1390     def ensure_hashes(self):
1391         # Make sure we recognise the format of the Files: field in the .changes
1392         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1393         if len(format) == 2:
1394             format = int(format[0]), int(format[1])
1395         else:
1396             format = int(float(format[0])), 0
1397
1398         # We need to deal with the original changes blob, as the fields we need
1399         # might not be in the changes dict serialised into the .dak anymore.
1400         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1401
1402         # Copy the checksums over to the current changes dict.  This will keep
1403         # the existing modifications to it intact.
1404         for field in orig_changes:
1405             if field.startswith('checksums-'):
1406                 self.pkg.changes[field] = orig_changes[field]
1407
1408         # Check for unsupported hashes
1409         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1410             self.rejects.append(j)
1411
1412         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1413             self.rejects.append(j)
1414
1415         # We have to calculate the hash if we have an earlier changes version than
1416         # the hash appears in rather than require it exist in the changes file
1417         for hashname, hashfunc, version in utils.known_hashes:
1418             # TODO: Move _ensure_changes_hash into this class
1419             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1420                 self.rejects.append(j)
1421             if "source" in self.pkg.changes["architecture"]:
1422                 # TODO: Move _ensure_dsc_hash into this class
1423                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1424                     self.rejects.append(j)
1425
1426     def check_hashes(self):
1427         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1428             self.rejects.append(m)
1429
1430         for m in utils.check_size(".changes", self.pkg.files):
1431             self.rejects.append(m)
1432
1433         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1434             self.rejects.append(m)
1435
1436         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1437             self.rejects.append(m)
1438
1439         self.ensure_hashes()
1440
1441     ###########################################################################
1442
1443     def ensure_orig(self, target_dir='.', session=None):
1444         """
1445         Ensures that all orig files mentioned in the changes file are present
1446         in target_dir. If they do not exist, they are symlinked into place.
1447
1448         An list containing the symlinks that were created are returned (so they
1449         can be removed).
1450         """
1451
1452         symlinked = []
1453         cnf = Config()
1454
1455         for filename, entry in self.pkg.dsc_files.iteritems():
1456             if not re_is_orig_source.match(filename):
1457                 # File is not an orig; ignore
1458                 continue
1459
1460             if os.path.exists(filename):
1461                 # File exists, no need to continue
1462                 continue
1463
1464             def symlink_if_valid(path):
1465                 f = utils.open_file(path)
1466                 md5sum = apt_pkg.md5sum(f)
1467                 f.close()
1468
1469                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1470                 expected = (int(entry['size']), entry['md5sum'])
1471
1472                 if fingerprint != expected:
1473                     return False
1474
1475                 dest = os.path.join(target_dir, filename)
1476
1477                 os.symlink(path, dest)
1478                 symlinked.append(dest)
1479
1480                 return True
1481
1482             session_ = session
1483             if session is None:
1484                 session_ = DBConn().session()
1485
1486             found = False
1487
1488             # Look in the pool
1489             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1490                 poolfile_path = os.path.join(
1491                     poolfile.location.path, poolfile.filename
1492                 )
1493
1494                 if symlink_if_valid(poolfile_path):
1495                     found = True
1496                     break
1497
1498             if session is None:
1499                 session_.close()
1500
1501             if found:
1502                 continue
1503
1504             # Look in some other queues for the file
1505             queue_names = ['new', 'byhand',
1506                            'proposedupdates', 'oldproposedupdates',
1507                            'embargoed', 'unembargoed']
1508
1509             for queue_name in queue_names:
1510                 queue = get_policy_queue(queue_name, session)
1511                 if not queue:
1512                     continue
1513
1514                 queuefile_path = os.path.join(queue.path, filename)
1515
1516                 if not os.path.exists(queuefile_path):
1517                     # Does not exist in this queue
1518                     continue
1519
1520                 if symlink_if_valid(queuefile_path):
1521                     break
1522
1523         return symlinked
1524
1525     ###########################################################################
1526
1527     def check_lintian(self):
1528         """
1529         Extends self.rejects by checking the output of lintian against tags
1530         specified in Dinstall::LintianTags.
1531         """
1532
1533         cnf = Config()
1534
1535         # Don't reject binary uploads
1536         if not self.pkg.changes['architecture'].has_key('source'):
1537             return
1538
1539         # Only check some distributions
1540         for dist in ('unstable', 'experimental'):
1541             if dist in self.pkg.changes['distribution']:
1542                 break
1543         else:
1544             return
1545
1546         # If we do not have a tagfile, don't do anything
1547         tagfile = cnf.get("Dinstall::LintianTags")
1548         if not tagfile:
1549             return
1550
1551         # Parse the yaml file
1552         sourcefile = file(tagfile, 'r')
1553         sourcecontent = sourcefile.read()
1554         sourcefile.close()
1555
1556         try:
1557             lintiantags = yaml.load(sourcecontent)['lintian']
1558         except yaml.YAMLError as msg:
1559             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1560             return
1561
1562         # Try and find all orig mentioned in the .dsc
1563         symlinked = self.ensure_orig()
1564
1565         # Setup the input file for lintian
1566         fd, temp_filename = utils.temp_filename()
1567         temptagfile = os.fdopen(fd, 'w')
1568         for tags in lintiantags.values():
1569             temptagfile.writelines(['%s\n' % x for x in tags])
1570         temptagfile.close()
1571
1572         try:
1573             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1574                 (temp_filename, self.pkg.changes_file)
1575
1576             result, output = commands.getstatusoutput(cmd)
1577         finally:
1578             # Remove our tempfile and any symlinks we created
1579             os.unlink(temp_filename)
1580
1581             for symlink in symlinked:
1582                 os.unlink(symlink)
1583
1584         if result == 2:
1585             utils.warn("lintian failed for %s [return code: %s]." % \
1586                 (self.pkg.changes_file, result))
1587             utils.warn(utils.prefix_multi_line_string(output, \
1588                 " [possible output:] "))
1589
1590         def log(*txt):
1591             if self.logger:
1592                 self.logger.log(
1593                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1594                 )
1595
1596         # Generate messages
1597         parsed_tags = parse_lintian_output(output)
1598         self.rejects.extend(
1599             generate_reject_messages(parsed_tags, lintiantags, log=log)
1600         )
1601
1602     ###########################################################################
1603     def check_urgency(self):
1604         cnf = Config()
1605         if self.pkg.changes["architecture"].has_key("source"):
1606             if not self.pkg.changes.has_key("urgency"):
1607                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1608             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1609             if self.pkg.changes["urgency"] not in cnf.value_list("Urgency::Valid"):
1610                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1611                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1612                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1613
1614     ###########################################################################
1615
1616     # Sanity check the time stamps of files inside debs.
1617     # [Files in the near future cause ugly warnings and extreme time
1618     #  travel can cause errors on extraction]
1619
1620     def check_timestamps(self):
1621         Cnf = Config()
1622
1623         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1624         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1625         tar = TarTime(future_cutoff, past_cutoff)
1626
1627         for filename, entry in self.pkg.files.items():
1628             if entry["type"] == "deb":
1629                 tar.reset()
1630                 try:
1631                     deb = apt_inst.DebFile(filename)
1632                     deb.control.go(tar.callback)
1633
1634                     future_files = tar.future_files.keys()
1635                     if future_files:
1636                         num_future_files = len(future_files)
1637                         future_file = future_files[0]
1638                         future_date = tar.future_files[future_file]
1639                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1640                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1641
1642                     ancient_files = tar.ancient_files.keys()
1643                     if ancient_files:
1644                         num_ancient_files = len(ancient_files)
1645                         ancient_file = ancient_files[0]
1646                         ancient_date = tar.ancient_files[ancient_file]
1647                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1648                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1649                 except:
1650                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_info()[0], sys.exc_info()[1]))
1651
1652     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1653         for key in "maintaineremail", "changedbyemail", "maintainername", "changedbyname":
1654             if not self.pkg.changes.has_key(key):
1655                 return False
1656         uid_email = '@'.join(uid_email.split('@')[:2])
1657         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1658             sponsored = False
1659         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1660             sponsored = False
1661             if uid_name == "":
1662                 sponsored = True
1663         else:
1664             sponsored = True
1665             sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1666             debian_emails = filter(lambda addr: addr.endswith('@debian.org'), sponsor_addresses)
1667             if uid_email not in debian_emails:
1668                 if debian_emails:
1669                     uid_email = debian_emails[0]
1670             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1671                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1672                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1673                         self.pkg.changes["sponsoremail"] = uid_email
1674
1675         return sponsored
1676
1677
1678     ###########################################################################
1679     # check_signed_by_key checks
1680     ###########################################################################
1681
1682     def check_signed_by_key(self):
1683         """Ensure the .changes is signed by an authorized uploader."""
1684         session = DBConn().session()
1685
1686         # First of all we check that the person has proper upload permissions
1687         # and that this upload isn't blocked
1688         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1689
1690         if fpr is None:
1691             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1692             return
1693
1694         # TODO: Check that import-keyring adds UIDs properly
1695         if not fpr.uid:
1696             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1697             return
1698
1699         # Check that the fingerprint which uploaded has permission to do so
1700         self.check_upload_permissions(fpr, session)
1701
1702         # Check that this package is not in a transition
1703         self.check_transition(session)
1704
1705         session.close()
1706
1707
1708     def check_upload_permissions(self, fpr, session):
1709         # Check any one-off upload blocks
1710         self.check_upload_blocks(fpr, session)
1711
1712         # If the source_acl is None, source is never allowed
1713         if fpr.source_acl is None:
1714             if self.pkg.changes["architecture"].has_key("source"):
1715                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1716                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1717                 self.rejects.append(rej)
1718                 return
1719         # Do DM as a special case
1720         # DM is a special case unfortunately, so we check it first
1721         # (keys with no source access get more access than DMs in one
1722         #  way; DMs can only upload for their packages whether source
1723         #  or binary, whereas keys with no access might be able to
1724         #  upload some binaries)
1725         elif fpr.source_acl.access_level == 'dm':
1726             self.check_dm_upload(fpr, session)
1727         else:
1728             # If not a DM, we allow full upload rights
1729             uid_email = "%s@debian.org" % (fpr.uid.uid)
1730             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1731
1732
1733         # Check binary upload permissions
1734         # By this point we know that DMs can't have got here unless they
1735         # are allowed to deal with the package concerned so just apply
1736         # normal checks
1737         if fpr.binary_acl.access_level == 'full':
1738             return
1739
1740         # Otherwise we're in the map case
1741         tmparches = self.pkg.changes["architecture"].copy()
1742         tmparches.pop('source', None)
1743
1744         for bam in fpr.binary_acl_map:
1745             tmparches.pop(bam.architecture.arch_string, None)
1746
1747         if len(tmparches.keys()) > 0:
1748             if fpr.binary_reject:
1749                 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1750                 if len(tmparches.keys()) == 1:
1751                     rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1752                 else:
1753                     rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1754                 self.rejects.append(rej)
1755             else:
1756                 # TODO: This is where we'll implement reject vs throw away binaries later
1757                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1758                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1759                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1760                 self.rejects.append(rej)
1761
1762
1763     def check_upload_blocks(self, fpr, session):
1764         """Check whether any upload blocks apply to this source, source
1765            version, uid / fpr combination"""
1766
1767         def block_rej_template(fb):
1768             rej = 'Manual upload block in place for package %s' % fb.source
1769             if fb.version is not None:
1770                 rej += ', version %s' % fb.version
1771             return rej
1772
1773         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1774             # version is None if the block applies to all versions
1775             if fb.version is None or fb.version == self.pkg.changes['version']:
1776                 # Check both fpr and uid - either is enough to cause a reject
1777                 if fb.fpr is not None:
1778                     if fb.fpr.fingerprint == fpr.fingerprint:
1779                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1780                 if fb.uid is not None:
1781                     if fb.uid == fpr.uid:
1782                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1783
1784
1785     def check_dm_upload(self, fpr, session):
1786         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1787         ## none of the uploaded packages are NEW
1788         rej = False
1789         for f in self.pkg.files.keys():
1790             if self.pkg.files[f].has_key("byhand"):
1791                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1792                 rej = True
1793             if self.pkg.files[f].has_key("new"):
1794                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1795                 rej = True
1796
1797         if rej:
1798             return
1799
1800         r = get_newest_source(self.pkg.changes["source"], session)
1801
1802         if r is None:
1803             rej = "Could not find existing source package %s in the DM allowed suites and this is a DM upload" % self.pkg.changes["source"]
1804             self.rejects.append(rej)
1805             return
1806
1807         if not r.dm_upload_allowed:
1808             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1809             self.rejects.append(rej)
1810             return
1811
1812         ## the Maintainer: field of the uploaded .changes file corresponds with
1813         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1814         ## uploads)
1815         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1816             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1817
1818         ## the most recent version of the package uploaded to unstable or
1819         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1820         ## non-developer maintainers cannot NMU or hijack packages)
1821
1822         # uploader includes the maintainer
1823         accept = False
1824         for uploader in r.uploaders:
1825             (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1826             # Eww - I hope we never have two people with the same name in Debian
1827             if email == fpr.uid.uid or name == fpr.uid.name:
1828                 accept = True
1829                 break
1830
1831         if not accept:
1832             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1833             return
1834
1835         ## none of the packages are being taken over from other source packages
1836         for b in self.pkg.changes["binary"].keys():
1837             for suite in self.pkg.changes["distribution"].keys():
1838                 for s in get_source_by_package_and_suite(b, suite, session):
1839                     if s.source != self.pkg.changes["source"]:
1840                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1841
1842
1843
1844     def check_transition(self, session):
1845         cnf = Config()
1846
1847         sourcepkg = self.pkg.changes["source"]
1848
1849         # No sourceful upload -> no need to do anything else, direct return
1850         # We also work with unstable uploads, not experimental or those going to some
1851         # proposed-updates queue
1852         if "source" not in self.pkg.changes["architecture"] or \
1853            "unstable" not in self.pkg.changes["distribution"]:
1854             return
1855
1856         # Also only check if there is a file defined (and existant) with
1857         # checks.
1858         transpath = cnf.get("Dinstall::ReleaseTransitions", "")
1859         if transpath == "" or not os.path.exists(transpath):
1860             return
1861
1862         # Parse the yaml file
1863         sourcefile = file(transpath, 'r')
1864         sourcecontent = sourcefile.read()
1865         try:
1866             transitions = yaml.load(sourcecontent)
1867         except yaml.YAMLError as msg:
1868             # This shouldn't happen, there is a wrapper to edit the file which
1869             # checks it, but we prefer to be safe than ending up rejecting
1870             # everything.
1871             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1872             return
1873
1874         # Now look through all defined transitions
1875         for trans in transitions:
1876             t = transitions[trans]
1877             source = t["source"]
1878             expected = t["new"]
1879
1880             # Will be None if nothing is in testing.
1881             current = get_source_in_suite(source, "testing", session)
1882             if current is not None:
1883                 compare = apt_pkg.version_compare(current.version, expected)
1884
1885             if current is None or compare < 0:
1886                 # This is still valid, the current version in testing is older than
1887                 # the new version we wait for, or there is none in testing yet
1888
1889                 # Check if the source we look at is affected by this.
1890                 if sourcepkg in t['packages']:
1891                     # The source is affected, lets reject it.
1892
1893                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1894                         sourcepkg, trans)
1895
1896                     if current is not None:
1897                         currentlymsg = "at version %s" % (current.version)
1898                     else:
1899                         currentlymsg = "not present in testing"
1900
1901                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1902
1903                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1904 is part of a testing transition designed to get %s migrated (it is
1905 currently %s, we need version %s).  This transition is managed by the
1906 Release Team, and %s is the Release-Team member responsible for it.
1907 Please mail debian-release@lists.debian.org or contact %s directly if you
1908 need further assistance.  You might want to upload to experimental until this
1909 transition is done."""
1910                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1911
1912                     self.rejects.append(rejectmsg)
1913                     return
1914
1915     ###########################################################################
1916     # End check_signed_by_key checks
1917     ###########################################################################
1918
1919     def build_summaries(self):
1920         """ Build a summary of changes the upload introduces. """
1921
1922         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1923
1924         short_summary = summary
1925
1926         # This is for direport's benefit...
1927         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1928
1929         summary += "\n\nChanges:\n" + f
1930
1931         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1932
1933         summary += self.announce(short_summary, 0)
1934
1935         return (summary, short_summary)
1936
1937     ###########################################################################
1938
1939     def close_bugs(self, summary, action):
1940         """
1941         Send mail to close bugs as instructed by the closes field in the changes file.
1942         Also add a line to summary if any work was done.
1943
1944         @type summary: string
1945         @param summary: summary text, as given by L{build_summaries}
1946
1947         @type action: bool
1948         @param action: Set to false no real action will be done.
1949
1950         @rtype: string
1951         @return: summary. If action was taken, extended by the list of closed bugs.
1952
1953         """
1954
1955         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1956
1957         bugs = self.pkg.changes["closes"].keys()
1958
1959         if not bugs:
1960             return summary
1961
1962         bugs.sort()
1963         summary += "Closing bugs: "
1964         for bug in bugs:
1965             summary += "%s " % (bug)
1966             if action:
1967                 self.update_subst()
1968                 self.Subst["__BUG_NUMBER__"] = bug
1969                 if self.pkg.changes["distribution"].has_key("stable"):
1970                     self.Subst["__STABLE_WARNING__"] = """
1971 Note that this package is not part of the released stable Debian
1972 distribution.  It may have dependencies on other unreleased software,
1973 or other instabilities.  Please take care if you wish to install it.
1974 The update will eventually make its way into the next released Debian
1975 distribution."""
1976                 else:
1977                     self.Subst["__STABLE_WARNING__"] = ""
1978                 mail_message = utils.TemplateSubst(self.Subst, template)
1979                 utils.send_mail(mail_message)
1980
1981                 # Clear up after ourselves
1982                 del self.Subst["__BUG_NUMBER__"]
1983                 del self.Subst["__STABLE_WARNING__"]
1984
1985         if action and self.logger:
1986             self.logger.log(["closing bugs"] + bugs)
1987
1988         summary += "\n"
1989
1990         return summary
1991
1992     ###########################################################################
1993
1994     def announce(self, short_summary, action):
1995         """
1996         Send an announce mail about a new upload.
1997
1998         @type short_summary: string
1999         @param short_summary: Short summary text to include in the mail
2000
2001         @type action: bool
2002         @param action: Set to false no real action will be done.
2003
2004         @rtype: string
2005         @return: Textstring about action taken.
2006
2007         """
2008
2009         cnf = Config()
2010
2011         # Skip all of this if not sending mail to avoid confusing people
2012         if cnf.has_key("Dinstall::Options::No-Mail") and cnf["Dinstall::Options::No-Mail"]:
2013             return ""
2014
2015         # Only do announcements for source uploads with a recent dpkg-dev installed
2016         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2017            self.pkg.changes["architecture"].has_key("source"):
2018             return ""
2019
2020         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2021
2022         lists_todo = {}
2023         summary = ""
2024
2025         # Get a unique list of target lists
2026         for dist in self.pkg.changes["distribution"].keys():
2027             suite = get_suite(dist)
2028             if suite is None: continue
2029             for tgt in suite.announce:
2030                 lists_todo[tgt] = 1
2031
2032         self.Subst["__SHORT_SUMMARY__"] = short_summary
2033
2034         for announce_list in lists_todo.keys():
2035             summary += "Announcing to %s\n" % (announce_list)
2036
2037             if action:
2038                 self.update_subst()
2039                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2040                 if cnf.get("Dinstall::TrackingServer") and \
2041                    self.pkg.changes["architecture"].has_key("source"):
2042                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2043                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2044
2045                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2046                 utils.send_mail(mail_message)
2047
2048                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2049
2050         if cnf.find_b("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"):
2051             summary = self.close_bugs(summary, action)
2052
2053         del self.Subst["__SHORT_SUMMARY__"]
2054
2055         return summary
2056
2057     ###########################################################################
2058     @session_wrapper
2059     def accept (self, summary, short_summary, session=None):
2060         """
2061         Accept an upload.
2062
2063         This moves all files referenced from the .changes into the pool,
2064         sends the accepted mail, announces to lists, closes bugs and
2065         also checks for override disparities. If enabled it will write out
2066         the version history for the BTS Version Tracking and will finally call
2067         L{queue_build}.
2068
2069         @type summary: string
2070         @param summary: Summary text
2071
2072         @type short_summary: string
2073         @param short_summary: Short summary
2074         """
2075
2076         cnf = Config()
2077         stats = SummaryStats()
2078
2079         print "Installing."
2080         self.logger.log(["installing changes", self.pkg.changes_file])
2081
2082         binaries = []
2083         poolfiles = []
2084
2085         # Add the .dsc file to the DB first
2086         for newfile, entry in self.pkg.files.items():
2087             if entry["type"] == "dsc":
2088                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2089                 for j in pfs:
2090                     poolfiles.append(j)
2091
2092         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2093         for newfile, entry in self.pkg.files.items():
2094             if entry["type"] == "deb":
2095                 b, pf = add_deb_to_db(self, newfile, session)
2096                 binaries.append(b)
2097                 poolfiles.append(pf)
2098
2099         # If this is a sourceful diff only upload that is moving
2100         # cross-component we need to copy the .orig files into the new
2101         # component too for the same reasons as above.
2102         # XXX: mhy: I think this should be in add_dsc_to_db
2103         if self.pkg.changes["architecture"].has_key("source"):
2104             for orig_file in self.pkg.orig_files.keys():
2105                 if not self.pkg.orig_files[orig_file].has_key("id"):
2106                     continue # Skip if it's not in the pool
2107                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2108                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2109                     continue # Skip if the location didn't change
2110
2111                 # Do the move
2112                 oldf = get_poolfile_by_id(orig_file_id, session)
2113                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2114                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2115                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2116
2117                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2118
2119                 # TODO: Care about size/md5sum collisions etc
2120                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2121
2122                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2123                 if newf is None:
2124                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2125                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2126
2127                     session.flush()
2128
2129                     # Don't reference the old file from this changes
2130                     for p in poolfiles:
2131                         if p.file_id == oldf.file_id:
2132                             poolfiles.remove(p)
2133
2134                     poolfiles.append(newf)
2135
2136                     # Fix up the DSC references
2137                     toremove = []
2138
2139                     for df in source.srcfiles:
2140                         if df.poolfile.file_id == oldf.file_id:
2141                             # Add a new DSC entry and mark the old one for deletion
2142                             # Don't do it in the loop so we don't change the thing we're iterating over
2143                             newdscf = DSCFile()
2144                             newdscf.source_id = source.source_id
2145                             newdscf.poolfile_id = newf.file_id
2146                             session.add(newdscf)
2147
2148                             toremove.append(df)
2149
2150                     for df in toremove:
2151                         session.delete(df)
2152
2153                     # Flush our changes
2154                     session.flush()
2155
2156                     # Make sure that our source object is up-to-date
2157                     session.expire(source)
2158
2159         # Add changelog information to the database
2160         self.store_changelog()
2161
2162         # Install the files into the pool
2163         for newfile, entry in self.pkg.files.items():
2164             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2165             utils.move(newfile, destination)
2166             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2167             stats.accept_bytes += float(entry["size"])
2168
2169         # Copy the .changes file across for suite which need it.
2170         copy_changes = dict([(x.copychanges, '')
2171                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2172                              if x.copychanges is not None])
2173
2174         for dest in copy_changes.keys():
2175             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2176
2177         # We're done - commit the database changes
2178         session.commit()
2179         # Our SQL session will automatically start a new transaction after
2180         # the last commit
2181
2182         # Now ensure that the metadata has been added
2183         # This has to be done after we copy the files into the pool
2184         # For source if we have it:
2185         if self.pkg.changes["architecture"].has_key("source"):
2186             import_metadata_into_db(source, session)
2187
2188         # Now for any of our binaries
2189         for b in binaries:
2190             import_metadata_into_db(b, session)
2191
2192         session.commit()
2193
2194         # Move the .changes into the 'done' directory
2195         ye, mo, da = time.gmtime()[0:3]
2196         donedir = os.path.join(cnf["Dir::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2197         if not os.path.isdir(donedir):
2198             os.makedirs(donedir)
2199
2200         utils.move(self.pkg.changes_file,
2201                    os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2202
2203         if self.pkg.changes["architecture"].has_key("source"):
2204             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2205
2206         self.update_subst()
2207         self.Subst["__SUMMARY__"] = summary
2208         mail_message = utils.TemplateSubst(self.Subst,
2209                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2210         utils.send_mail(mail_message)
2211         self.announce(short_summary, 1)
2212
2213         ## Helper stuff for DebBugs Version Tracking
2214         if cnf.find("Dir::BTSVersionTrack"):
2215             if self.pkg.changes["architecture"].has_key("source"):
2216                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2217                 version_history = os.fdopen(fd, 'w')
2218                 version_history.write(self.pkg.dsc["bts changelog"])
2219                 version_history.close()
2220                 filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2221                                       self.pkg.changes_file[:-8]+".versions")
2222                 os.rename(temp_filename, filename)
2223                 os.chmod(filename, 0o644)
2224
2225             # Write out the binary -> source mapping.
2226             (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2227             debinfo = os.fdopen(fd, 'w')
2228             for name, entry in sorted(self.pkg.files.items()):
2229                 if entry["type"] == "deb":
2230                     line = " ".join([entry["package"], entry["version"],
2231                                      entry["architecture"], entry["source package"],
2232                                      entry["source version"]])
2233                     debinfo.write(line+"\n")
2234             debinfo.close()
2235             filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2236                                   self.pkg.changes_file[:-8]+".debinfo")
2237             os.rename(temp_filename, filename)
2238             os.chmod(filename, 0o644)
2239
2240         session.commit()
2241
2242         # Set up our copy queues (e.g. buildd queues)
2243         for suite_name in self.pkg.changes["distribution"].keys():
2244             suite = get_suite(suite_name, session)
2245             for q in suite.copy_queues:
2246                 for f in poolfiles:
2247                     q.add_file_from_pool(f)
2248
2249         session.commit()
2250
2251         # Finally...
2252         stats.accept_count += 1
2253
2254     def check_override(self):
2255         """
2256         Checks override entries for validity. Mails "Override disparity" warnings,
2257         if that feature is enabled.
2258
2259         Abandons the check if
2260           - override disparity checks are disabled
2261           - mail sending is disabled
2262         """
2263
2264         cnf = Config()
2265
2266         # Abandon the check if override disparity checks have been disabled
2267         if not cnf.find_b("Dinstall::OverrideDisparityCheck"):
2268             return
2269
2270         summary = self.pkg.check_override()
2271
2272         if summary == "":
2273             return
2274
2275         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2276
2277         self.update_subst()
2278         self.Subst["__SUMMARY__"] = summary
2279         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2280         utils.send_mail(mail_message)
2281         del self.Subst["__SUMMARY__"]
2282
2283     ###########################################################################
2284
2285     def remove(self, from_dir=None):
2286         """
2287         Used (for instance) in p-u to remove the package from unchecked
2288
2289         Also removes the package from holding area.
2290         """
2291         if from_dir is None:
2292             from_dir = self.pkg.directory
2293         h = Holding()
2294
2295         for f in self.pkg.files.keys():
2296             os.unlink(os.path.join(from_dir, f))
2297             if os.path.exists(os.path.join(h.holding_dir, f)):
2298                 os.unlink(os.path.join(h.holding_dir, f))
2299
2300         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2301         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2302             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2303
2304     ###########################################################################
2305
2306     def move_to_queue (self, queue):
2307         """
2308         Move files to a destination queue using the permissions in the table
2309         """
2310         h = Holding()
2311         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2312                    queue.path, perms=int(queue.change_perms, 8))
2313         for f in self.pkg.files.keys():
2314             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2315
2316     ###########################################################################
2317
2318     def force_reject(self, reject_files):
2319         """
2320         Forcefully move files from the current directory to the
2321         reject directory.  If any file already exists in the reject
2322         directory it will be moved to the morgue to make way for
2323         the new file.
2324
2325         @type reject_files: dict
2326         @param reject_files: file dictionary
2327
2328         """
2329
2330         cnf = Config()
2331
2332         for file_entry in reject_files:
2333             # Skip any files which don't exist or which we don't have permission to copy.
2334             if os.access(file_entry, os.R_OK) == 0:
2335                 continue
2336
2337             dest_file = os.path.join(cnf["Dir::Reject"], file_entry)
2338
2339             try:
2340                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0o644)
2341             except OSError as e:
2342                 # File exists?  Let's find a new name by adding a number
2343                 if e.errno == errno.EEXIST:
2344                     try:
2345                         dest_file = utils.find_next_free(dest_file, 255)
2346                     except NoFreeFilenameError:
2347                         # Something's either gone badly Pete Tong, or
2348                         # someone is trying to exploit us.
2349                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Reject"]))
2350                         return
2351
2352                     # Make sure we really got it
2353                     try:
2354                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644)
2355                     except OSError as e:
2356                         # Likewise
2357                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2358                         return
2359                 else:
2360                     raise
2361             # If we got here, we own the destination file, so we can
2362             # safely overwrite it.
2363             utils.move(file_entry, dest_file, 1, perms=0o660)
2364             os.close(dest_fd)
2365
2366     ###########################################################################
2367     def do_reject (self, manual=0, reject_message="", notes=""):
2368         """
2369         Reject an upload. If called without a reject message or C{manual} is
2370         true, spawn an editor so the user can write one.
2371
2372         @type manual: bool
2373         @param manual: manual or automated rejection
2374
2375         @type reject_message: string
2376         @param reject_message: A reject message
2377
2378         @return: 0
2379
2380         """
2381         # If we weren't given a manual rejection message, spawn an
2382         # editor so the user can add one in...
2383         if manual and not reject_message:
2384             (fd, temp_filename) = utils.temp_filename()
2385             temp_file = os.fdopen(fd, 'w')
2386             if len(notes) > 0:
2387                 for note in notes:
2388                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2389                                     % (note.author, note.version, note.notedate, note.comment))
2390             temp_file.close()
2391             editor = os.environ.get("EDITOR","vi")
2392             answer = 'E'
2393             while answer == 'E':
2394                 os.system("%s %s" % (editor, temp_filename))
2395                 temp_fh = utils.open_file(temp_filename)
2396                 reject_message = "".join(temp_fh.readlines())
2397                 temp_fh.close()
2398                 print "Reject message:"
2399                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2400                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2401                 answer = "XXX"
2402                 while prompt.find(answer) == -1:
2403                     answer = utils.our_raw_input(prompt)
2404                     m = re_default_answer.search(prompt)
2405                     if answer == "":
2406                         answer = m.group(1)
2407                     answer = answer[:1].upper()
2408             os.unlink(temp_filename)
2409             if answer == 'A':
2410                 return 1
2411             elif answer == 'Q':
2412                 sys.exit(0)
2413
2414         print "Rejecting.\n"
2415
2416         cnf = Config()
2417
2418         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2419         reason_filename = os.path.join(cnf["Dir::Reject"], reason_filename)
2420         changesfile = os.path.join(cnf["Dir::Reject"], self.pkg.changes_file)
2421
2422         # Move all the files into the reject directory
2423         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2424         self.force_reject(reject_files)
2425
2426         # Change permissions of the .changes file to be world readable
2427         try:
2428             os.chmod(changesfile, os.stat(changesfile).st_mode | stat.S_IROTH)
2429         except OSError as (errno, strerror):
2430             # Ignore 'Operation not permitted' error.
2431             if errno != 1:
2432                 raise
2433
2434         # If we fail here someone is probably trying to exploit the race
2435         # so let's just raise an exception ...
2436         if os.path.exists(reason_filename):
2437             os.unlink(reason_filename)
2438         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644)
2439
2440         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2441
2442         self.update_subst()
2443         if not manual:
2444             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2445             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2446             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2447             os.write(reason_fd, reject_message)
2448             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2449         else:
2450             # Build up the rejection email
2451             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2452             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2453             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2454             self.Subst["__REJECT_MESSAGE__"] = ""
2455             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2456             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2457             # Write the rejection email out as the <foo>.reason file
2458             os.write(reason_fd, reject_mail_message)
2459
2460         del self.Subst["__REJECTOR_ADDRESS__"]
2461         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2462         del self.Subst["__CC__"]
2463
2464         os.close(reason_fd)
2465
2466         # Send the rejection mail
2467         utils.send_mail(reject_mail_message)
2468
2469         if self.logger:
2470             self.logger.log(["rejected", self.pkg.changes_file])
2471
2472         stats = SummaryStats()
2473         stats.reject_count += 1
2474         return 0
2475
2476     ################################################################################
2477     def in_override_p(self, package, component, suite, binary_type, filename, session):
2478         """
2479         Check if a package already has override entries in the DB
2480
2481         @type package: string
2482         @param package: package name
2483
2484         @type component: string
2485         @param component: database id of the component
2486
2487         @type suite: int
2488         @param suite: database id of the suite
2489
2490         @type binary_type: string
2491         @param binary_type: type of the package
2492
2493         @type filename: string
2494         @param filename: filename we check
2495
2496         @return: the database result. But noone cares anyway.
2497
2498         """
2499
2500         cnf = Config()
2501
2502         if binary_type == "": # must be source
2503             file_type = "dsc"
2504         else:
2505             file_type = binary_type
2506
2507         # Override suite name; used for example with proposed-updates
2508         oldsuite = get_suite(suite, session)
2509         if (not oldsuite is None) and oldsuite.overridesuite:
2510             suite = oldsuite.overridesuite
2511
2512         result = get_override(package, suite, component, file_type, session)
2513
2514         # If checking for a source package fall back on the binary override type
2515         if file_type == "dsc" and len(result) < 1:
2516             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2517
2518         # Remember the section and priority so we can check them later if appropriate
2519         if len(result) > 0:
2520             result = result[0]
2521             self.pkg.files[filename]["override section"] = result.section.section
2522             self.pkg.files[filename]["override priority"] = result.priority.priority
2523             return result
2524
2525         return None
2526
2527     ################################################################################
2528     def get_anyversion(self, sv_list, suite):
2529         """
2530         @type sv_list: list
2531         @param sv_list: list of (suite, version) tuples to check
2532
2533         @type suite: string
2534         @param suite: suite name
2535
2536         Description: TODO
2537         """
2538         Cnf = Config()
2539         anyversion = None
2540         anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2541         for (s, v) in sv_list:
2542             if s in [ x.lower() for x in anysuite ]:
2543                 if not anyversion or apt_pkg.version_compare(anyversion, v) <= 0:
2544                     anyversion = v
2545
2546         return anyversion
2547
2548     ################################################################################
2549
2550     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2551         """
2552         @type sv_list: list
2553         @param sv_list: list of (suite, version) tuples to check
2554
2555         @type filename: string
2556         @param filename: XXX
2557
2558         @type new_version: string
2559         @param new_version: XXX
2560
2561         Ensure versions are newer than existing packages in target
2562         suites and that cross-suite version checking rules as
2563         set out in the conf file are satisfied.
2564         """
2565
2566         cnf = Config()
2567
2568         # Check versions for each target suite
2569         for target_suite in self.pkg.changes["distribution"].keys():
2570             # Check we can find the target suite
2571             ts = get_suite(target_suite)
2572             if ts is None:
2573                 self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite)
2574                 continue
2575
2576             must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2577             must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2578
2579             # Enforce "must be newer than target suite" even if conffile omits it
2580             if target_suite not in must_be_newer_than:
2581                 must_be_newer_than.append(target_suite)
2582
2583             for (suite, existent_version) in sv_list:
2584                 vercmp = apt_pkg.version_compare(new_version, existent_version)
2585
2586                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2587                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2588
2589                 if suite in must_be_older_than and vercmp > -1:
2590                     cansave = 0
2591
2592                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2593                         # we really use the other suite, ignoring the conflicting one ...
2594                         addsuite = self.pkg.changes["distribution-version"][suite]
2595
2596                         add_version = self.get_anyversion(sv_list, addsuite)
2597                         target_version = self.get_anyversion(sv_list, target_suite)
2598
2599                         if not add_version:
2600                             # not add_version can only happen if we map to a suite
2601                             # that doesn't enhance the suite we're propup'ing from.
2602                             # so "propup-ver x a b c; map a d" is a problem only if
2603                             # d doesn't enhance a.
2604                             #
2605                             # i think we could always propagate in this case, rather
2606                             # than complaining. either way, this isn't a REJECT issue
2607                             #
2608                             # And - we really should complain to the dorks who configured dak
2609                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2610                             self.pkg.changes.setdefault("propdistribution", {})
2611                             self.pkg.changes["propdistribution"][addsuite] = 1
2612                             cansave = 1
2613                         elif not target_version:
2614                             # not targets_version is true when the package is NEW
2615                             # we could just stick with the "...old version..." REJECT
2616                             # for this, I think.
2617                             self.rejects.append("Won't propogate NEW packages.")
2618                         elif apt_pkg.version_compare(new_version, add_version) < 0:
2619                             # propogation would be redundant. no need to reject though.
2620                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2621                             cansave = 1
2622                         elif apt_pkg.version_compare(new_version, add_version) > 0 and \
2623                              apt_pkg.version_compare(add_version, target_version) >= 0:
2624                             # propogate!!
2625                             self.warnings.append("Propogating upload to %s" % (addsuite))
2626                             self.pkg.changes.setdefault("propdistribution", {})
2627                             self.pkg.changes["propdistribution"][addsuite] = 1
2628                             cansave = 1
2629
2630                     if not cansave:
2631                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2632
2633     ################################################################################
2634     def check_binary_against_db(self, filename, session):
2635         # Ensure version is sane
2636         self.cross_suite_version_check( \
2637             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2638                 self.pkg.files[filename]["architecture"], session),
2639             filename, self.pkg.files[filename]["version"], sourceful=False)
2640
2641         # Check for any existing copies of the file
2642         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2643         q = q.filter_by(version=self.pkg.files[filename]["version"])
2644         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2645
2646         if q.count() > 0:
2647             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2648
2649     ################################################################################
2650
2651     def check_source_against_db(self, filename, session):
2652         source = self.pkg.dsc.get("source")
2653         version = self.pkg.dsc.get("version")
2654
2655         # Ensure version is sane
2656         self.cross_suite_version_check( \
2657             get_suite_version_by_source(source, session), filename, version,
2658             sourceful=True)
2659
2660     ################################################################################
2661     def check_dsc_against_db(self, filename, session):
2662         """
2663
2664         @warning: NB: this function can remove entries from the 'files' index [if
2665          the orig tarball is a duplicate of the one in the archive]; if
2666          you're iterating over 'files' and call this function as part of
2667          the loop, be sure to add a check to the top of the loop to
2668          ensure you haven't just tried to dereference the deleted entry.
2669
2670         """
2671
2672         Cnf = Config()
2673         self.pkg.orig_files = {} # XXX: do we need to clear it?
2674         orig_files = self.pkg.orig_files
2675
2676         # Try and find all files mentioned in the .dsc.  This has
2677         # to work harder to cope with the multiple possible
2678         # locations of an .orig.tar.gz.
2679         # The ordering on the select is needed to pick the newest orig
2680         # when it exists in multiple places.
2681         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2682             found = None
2683             if self.pkg.files.has_key(dsc_name):
2684                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2685                 actual_size = int(self.pkg.files[dsc_name]["size"])
2686                 found = "%s in incoming" % (dsc_name)
2687
2688                 # Check the file does not already exist in the archive
2689                 ql = get_poolfile_like_name(dsc_name, session)
2690
2691                 # Strip out anything that isn't '%s' or '/%s$'
2692                 for i in ql:
2693                     if not i.filename.endswith(dsc_name):
2694                         ql.remove(i)
2695
2696                 # "[dak] has not broken them.  [dak] has fixed a
2697                 # brokenness.  Your crappy hack exploited a bug in
2698                 # the old dinstall.
2699                 #
2700                 # "(Come on!  I thought it was always obvious that
2701                 # one just doesn't release different files with
2702                 # the same name and version.)"
2703                 #                        -- ajk@ on d-devel@l.d.o
2704
2705                 if len(ql) > 0:
2706                     # Ignore exact matches for .orig.tar.gz
2707                     match = 0
2708                     if re_is_orig_source.match(dsc_name):
2709                         for i in ql:
2710                             if self.pkg.files.has_key(dsc_name) and \
2711                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2712                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2713                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2714                                 # TODO: Don't delete the entry, just mark it as not needed
2715                                 # This would fix the stupidity of changing something we often iterate over
2716                                 # whilst we're doing it
2717                                 del self.pkg.files[dsc_name]
2718                                 dsc_entry["files id"] = i.file_id
2719                                 if not orig_files.has_key(dsc_name):
2720                                     orig_files[dsc_name] = {}
2721                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2722                                 match = 1
2723
2724                                 # Don't bitch that we couldn't find this file later
2725                                 try:
2726                                     self.later_check_files.remove(dsc_name)
2727                                 except ValueError:
2728                                     pass
2729
2730
2731                     if not match:
2732                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2733
2734             elif re_is_orig_source.match(dsc_name):
2735                 # Check in the pool
2736                 ql = get_poolfile_like_name(dsc_name, session)
2737
2738                 # Strip out anything that isn't '%s' or '/%s$'
2739                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2740                 for i in ql:
2741                     if not i.filename.endswith(dsc_name):
2742                         ql.remove(i)
2743
2744                 if len(ql) > 0:
2745                     # Unfortunately, we may get more than one match here if,
2746                     # for example, the package was in potato but had an -sa
2747                     # upload in woody.  So we need to choose the right one.
2748
2749                     # default to something sane in case we don't match any or have only one
2750                     x = ql[0]
2751
2752                     if len(ql) > 1:
2753                         for i in ql:
2754                             old_file = os.path.join(i.location.path, i.filename)
2755                             old_file_fh = utils.open_file(old_file)
2756                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2757                             old_file_fh.close()
2758                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2759                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2760                                 x = i
2761
2762                     old_file = os.path.join(i.location.path, i.filename)
2763                     old_file_fh = utils.open_file(old_file)
2764                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2765                     old_file_fh.close()
2766                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2767                     found = old_file
2768                     suite_type = x.location.archive_type
2769                     # need this for updating dsc_files in install()
2770                     dsc_entry["files id"] = x.file_id
2771                     # See install() in process-accepted...
2772                     if not orig_files.has_key(dsc_name):
2773                         orig_files[dsc_name] = {}
2774                     orig_files[dsc_name]["id"] = x.file_id
2775                     orig_files[dsc_name]["path"] = old_file
2776                     orig_files[dsc_name]["location"] = x.location.location_id
2777                 else:
2778                     # TODO: Determine queue list dynamically
2779                     # Not there? Check the queue directories...
2780                     for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
2781                         queue = get_policy_queue(queue_name, session)
2782                         if not queue:
2783                             continue
2784
2785                         in_otherdir = os.path.join(queue.path, dsc_name)
2786
2787                         if os.path.exists(in_otherdir):
2788                             in_otherdir_fh = utils.open_file(in_otherdir)
2789                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2790                             in_otherdir_fh.close()
2791                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2792                             found = in_otherdir
2793                             if not orig_files.has_key(dsc_name):
2794                                 orig_files[dsc_name] = {}
2795                             orig_files[dsc_name]["path"] = in_otherdir
2796
2797                     if not found:
2798                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2799                         continue
2800             else:
2801                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2802                 continue
2803             if actual_md5 != dsc_entry["md5sum"]:
2804                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2805             if actual_size != int(dsc_entry["size"]):
2806                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2807
2808     ################################################################################
2809     # This is used by process-new and process-holding to recheck a changes file
2810     # at the time we're running.  It mainly wraps various other internal functions
2811     # and is similar to accepted_checks - these should probably be tidied up
2812     # and combined
2813     def recheck(self, session):
2814         cnf = Config()
2815         for f in self.pkg.files.keys():
2816             # The .orig.tar.gz can disappear out from under us is it's a
2817             # duplicate of one in the archive.
2818             if not self.pkg.files.has_key(f):
2819                 continue
2820
2821             entry = self.pkg.files[f]
2822
2823             # Check that the source still exists
2824             if entry["type"] == "deb":
2825                 source_version = entry["source version"]
2826                 source_package = entry["source package"]
2827                 if not self.pkg.changes["architecture"].has_key("source") \
2828                    and not source_exists(source_package, source_version, \
2829                     suites = self.pkg.changes["distribution"].keys(), session = session):
2830                     source_epochless_version = re_no_epoch.sub('', source_version)
2831                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2832                     found = False
2833                     for queue_name in ["embargoed", "unembargoed", "newstage"]:
2834                         queue = get_policy_queue(queue_name, session)
2835                         if queue and os.path.exists(os.path.join(queue.path, dsc_filename)):
2836                             found = True
2837                     if not found:
2838                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2839
2840             # Version and file overwrite checks
2841             if entry["type"] == "deb":
2842                 self.check_binary_against_db(f, session)
2843             elif entry["type"] == "dsc":
2844                 self.check_source_against_db(f, session)
2845                 self.check_dsc_against_db(f, session)
2846
2847     ################################################################################
2848     def accepted_checks(self, overwrite_checks, session):
2849         # Recheck anything that relies on the database; since that's not
2850         # frozen between accept and our run time when called from p-a.
2851
2852         # overwrite_checks is set to False when installing to stable/oldstable
2853
2854         propogate={}
2855         nopropogate={}
2856
2857         # Find the .dsc (again)
2858         dsc_filename = None
2859         for f in self.pkg.files.keys():
2860             if self.pkg.files[f]["type"] == "dsc":
2861                 dsc_filename = f
2862
2863         for checkfile in self.pkg.files.keys():
2864             # The .orig.tar.gz can disappear out from under us is it's a
2865             # duplicate of one in the archive.
2866             if not self.pkg.files.has_key(checkfile):
2867                 continue
2868
2869             entry = self.pkg.files[checkfile]
2870
2871             # Check that the source still exists
2872             if entry["type"] == "deb":
2873                 source_version = entry["source version"]
2874                 source_package = entry["source package"]
2875                 if not self.pkg.changes["architecture"].has_key("source") \
2876                    and not source_exists(source_package, source_version, \
2877                     suites = self.pkg.changes["distribution"].keys(), \
2878                     session = session):
2879                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2880
2881             # Version and file overwrite checks
2882             if overwrite_checks:
2883                 if entry["type"] == "deb":
2884                     self.check_binary_against_db(checkfile, session)
2885                 elif entry["type"] == "dsc":
2886                     self.check_source_against_db(checkfile, session)
2887                     self.check_dsc_against_db(dsc_filename, session)
2888
2889             # propogate in the case it is in the override tables:
2890             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2891                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2892                     propogate[suite] = 1
2893                 else:
2894                     nopropogate[suite] = 1
2895
2896         for suite in propogate.keys():
2897             if suite in nopropogate:
2898                 continue
2899             self.pkg.changes["distribution"][suite] = 1
2900
2901         for checkfile in self.pkg.files.keys():
2902             # Check the package is still in the override tables
2903             for suite in self.pkg.changes["distribution"].keys():
2904                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2905                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2906
2907     ################################################################################
2908     # If any file of an upload has a recent mtime then chances are good
2909     # the file is still being uploaded.
2910
2911     def upload_too_new(self):
2912         cnf = Config()
2913         too_new = False
2914         # Move back to the original directory to get accurate time stamps
2915         cwd = os.getcwd()
2916         os.chdir(self.pkg.directory)
2917         file_list = self.pkg.files.keys()
2918         file_list.extend(self.pkg.dsc_files.keys())
2919         file_list.append(self.pkg.changes_file)
2920         for f in file_list:
2921             try:
2922                 last_modified = time.time()-os.path.getmtime(f)
2923                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2924                     too_new = True
2925                     break
2926             except:
2927                 pass
2928
2929         os.chdir(cwd)
2930         return too_new
2931
2932     def store_changelog(self):
2933
2934         # Skip binary-only upload if it is not a bin-NMU
2935         if not self.pkg.changes['architecture'].has_key('source'):
2936             from daklib.regexes import re_bin_only_nmu
2937             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2938                 return
2939
2940         session = DBConn().session()
2941
2942         # Check if upload already has a changelog entry
2943         query = """SELECT changelog_id FROM changes WHERE source = :source
2944                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2945         if session.execute(query, {'source': self.pkg.changes['source'], \
2946                                    'version': self.pkg.changes['version'], \
2947                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2948             session.commit()
2949             return
2950
2951         # Add current changelog text into changelogs_text table, return created ID
2952         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2953         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2954
2955         # Link ID to the upload available in changes table
2956         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2957                    AND version = :version AND architecture = :architecture"""
2958         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2959                                 'version': self.pkg.changes['version'], \
2960                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2961
2962         session.commit()