]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
silent map first
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(filename, changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type filename: str
100     @param filename: changes filename
101
102     @type changes: Upload.Pkg.changes dict
103     @param changes: Changes dictionary
104
105     @type files: Upload.Pkg.files dict
106     @param files: Files dictionary
107
108     @type warn: bool
109     @param warn: Warn if overrides are added for (old)stable
110
111     @rtype: dict
112     @return: dictionary of NEW components.
113
114     """
115     # TODO: This should all use the database instead of parsing the changes
116     # file again
117     new = {}
118     byhand = {}
119
120     dbchg = get_dbchange(filename, session)
121     if dbchg is None:
122         print "Warning: cannot find changes file in database; won't check byhand"
123
124     # Build up a list of potentially new things
125     for name, f in files.items():
126         # Keep a record of byhand elements
127         if f["section"] == "byhand":
128             byhand[name] = 1
129             continue
130
131         pkg = f["package"]
132         priority = f["priority"]
133         section = f["section"]
134         file_type = get_type(f, session)
135         component = f["component"]
136
137         if file_type == "dsc":
138             priority = "source"
139
140         if not new.has_key(pkg):
141             new[pkg] = {}
142             new[pkg]["priority"] = priority
143             new[pkg]["section"] = section
144             new[pkg]["type"] = file_type
145             new[pkg]["component"] = component
146             new[pkg]["files"] = []
147         else:
148             old_type = new[pkg]["type"]
149             if old_type != file_type:
150                 # source gets trumped by deb or udeb
151                 if old_type == "dsc":
152                     new[pkg]["priority"] = priority
153                     new[pkg]["section"] = section
154                     new[pkg]["type"] = file_type
155                     new[pkg]["component"] = component
156
157         new[pkg]["files"].append(name)
158
159         if f.has_key("othercomponents"):
160             new[pkg]["othercomponents"] = f["othercomponents"]
161
162     # Fix up the list of target suites
163     cnf = Config()
164     for suite in changes["suite"].keys():
165         oldsuite = get_suite(suite, session)
166         if not oldsuite:
167             print "WARNING: Invalid suite %s found" % suite
168             continue
169
170         if oldsuite.overridesuite:
171             newsuite = get_suite(oldsuite.overridesuite, session)
172
173             if newsuite:
174                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
175                     oldsuite.overridesuite, suite)
176                 del changes["suite"][suite]
177                 changes["suite"][oldsuite.overridesuite] = 1
178             else:
179                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
180                     oldsuite.overridesuite, suite)
181
182     # Check for unprocessed byhand files
183     if dbchg is not None:
184         for b in byhand.keys():
185             # Find the file entry in the database
186             found = False
187             for f in dbchg.files:
188                 if f.filename == b:
189                     found = True
190                     # If it's processed, we can ignore it
191                     if f.processed:
192                         del byhand[b]
193                     break
194
195             if not found:
196                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
197
198     # Check for new stuff
199     for suite in changes["suite"].keys():
200         for pkg in new.keys():
201             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
202             if len(ql) > 0:
203                 for file_entry in new[pkg]["files"]:
204                     if files[file_entry].has_key("new"):
205                         del files[file_entry]["new"]
206                 del new[pkg]
207
208     if warn:
209         for s in ['stable', 'oldstable']:
210             if changes["suite"].has_key(s):
211                 print "WARNING: overrides will be added for %s!" % s
212         for pkg in new.keys():
213             if new[pkg].has_key("othercomponents"):
214                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
215
216     return new, byhand
217
218 ################################################################################
219
220 def check_valid(new, session = None):
221     """
222     Check if section and priority for NEW packages exist in database.
223     Additionally does sanity checks:
224       - debian-installer packages have to be udeb (or source)
225       - non debian-installer packages can not be udeb
226       - source priority can only be assigned to dsc file types
227
228     @type new: dict
229     @param new: Dict of new packages with their section, priority and type.
230
231     """
232     for pkg in new.keys():
233         section_name = new[pkg]["section"]
234         priority_name = new[pkg]["priority"]
235         file_type = new[pkg]["type"]
236
237         section = get_section(section_name, session)
238         if section is None:
239             new[pkg]["section id"] = -1
240         else:
241             new[pkg]["section id"] = section.section_id
242
243         priority = get_priority(priority_name, session)
244         if priority is None:
245             new[pkg]["priority id"] = -1
246         else:
247             new[pkg]["priority id"] = priority.priority_id
248
249         # Sanity checks
250         di = section_name.find("debian-installer") != -1
251
252         # If d-i, we must be udeb and vice-versa
253         if     (di and file_type not in ("udeb", "dsc")) or \
254            (not di and file_type == "udeb"):
255             new[pkg]["section id"] = -1
256
257         # If dsc we need to be source and vice-versa
258         if (priority == "source" and file_type != "dsc") or \
259            (priority != "source" and file_type == "dsc"):
260             new[pkg]["priority id"] = -1
261
262 ###############################################################################
263
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266     def __init__(self, future_cutoff, past_cutoff):
267         self.reset()
268         self.future_cutoff = future_cutoff
269         self.past_cutoff = past_cutoff
270
271     def reset(self):
272         self.future_files = {}
273         self.ancient_files = {}
274
275     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276         if MTime > self.future_cutoff:
277             self.future_files[Name] = MTime
278         if MTime < self.past_cutoff:
279             self.ancient_files[Name] = MTime
280
281 ###############################################################################
282
283 def prod_maintainer(notes, upload):
284     cnf = Config()
285
286     # Here we prepare an editor and get them ready to prod...
287     (fd, temp_filename) = utils.temp_filename()
288     temp_file = os.fdopen(fd, 'w')
289     for note in notes:
290         temp_file.write(note.comment)
291     temp_file.close()
292     editor = os.environ.get("EDITOR","vi")
293     answer = 'E'
294     while answer == 'E':
295         os.system("%s %s" % (editor, temp_filename))
296         temp_fh = utils.open_file(temp_filename)
297         prod_message = "".join(temp_fh.readlines())
298         temp_fh.close()
299         print "Prod message:"
300         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
301         prompt = "[P]rod, Edit, Abandon, Quit ?"
302         answer = "XXX"
303         while prompt.find(answer) == -1:
304             answer = utils.our_raw_input(prompt)
305             m = re_default_answer.search(prompt)
306             if answer == "":
307                 answer = m.group(1)
308             answer = answer[:1].upper()
309     os.unlink(temp_filename)
310     if answer == 'A':
311         return
312     elif answer == 'Q':
313         end()
314         sys.exit(0)
315     # Otherwise, do the proding...
316     user_email_address = utils.whoami() + " <%s>" % (
317         cnf["Dinstall::MyAdminAddress"])
318
319     Subst = upload.Subst
320
321     Subst["__FROM_ADDRESS__"] = user_email_address
322     Subst["__PROD_MESSAGE__"] = prod_message
323     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
324
325     prod_mail_message = utils.TemplateSubst(
326         Subst,cnf["Dir::Templates"]+"/process-new.prod")
327
328     # Send the prod mail
329     utils.send_mail(prod_mail_message)
330
331     print "Sent prodding message"
332
333 ################################################################################
334
335 def edit_note(note, upload, session, trainee=False):
336     # Write the current data to a temporary file
337     (fd, temp_filename) = utils.temp_filename()
338     editor = os.environ.get("EDITOR","vi")
339     answer = 'E'
340     while answer == 'E':
341         os.system("%s %s" % (editor, temp_filename))
342         temp_file = utils.open_file(temp_filename)
343         newnote = temp_file.read().rstrip()
344         temp_file.close()
345         print "New Note:"
346         print utils.prefix_multi_line_string(newnote,"  ")
347         prompt = "[D]one, Edit, Abandon, Quit ?"
348         answer = "XXX"
349         while prompt.find(answer) == -1:
350             answer = utils.our_raw_input(prompt)
351             m = re_default_answer.search(prompt)
352             if answer == "":
353                 answer = m.group(1)
354             answer = answer[:1].upper()
355     os.unlink(temp_filename)
356     if answer == 'A':
357         return
358     elif answer == 'Q':
359         end()
360         sys.exit(0)
361
362     comment = NewComment()
363     comment.package = upload.pkg.changes["source"]
364     comment.version = upload.pkg.changes["version"]
365     comment.comment = newnote
366     comment.author  = utils.whoami()
367     comment.trainee = trainee
368     session.add(comment)
369     session.commit()
370
371 ###############################################################################
372
373 # suite names DMs can upload to
374 dm_suites = ['unstable', 'experimental']
375
376 def get_newest_source(source, session):
377     'returns the newest DBSource object in dm_suites'
378     ## the most recent version of the package uploaded to unstable or
379     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
380     ## section of its control file
381     q = session.query(DBSource).filter_by(source = source). \
382         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
383         order_by(desc('source.version'))
384     return q.first()
385
386 def get_suite_version_by_source(source, session):
387     'returns a list of tuples (suite_name, version) for source package'
388     q = session.query(Suite.suite_name, DBSource.version). \
389         join(Suite.sources).filter_by(source = source)
390     return q.all()
391
392 def get_source_by_package_and_suite(package, suite_name, session):
393     '''
394     returns a DBSource query filtered by DBBinary.package and this package's
395     suite_name
396     '''
397     return session.query(DBSource). \
398         join(DBSource.binaries).filter_by(package = package). \
399         join(DBBinary.suites).filter_by(suite_name = suite_name)
400
401 def get_suite_version_by_package(package, arch_string, session):
402     '''
403     returns a list of tuples (suite_name, version) for binary package and
404     arch_string
405     '''
406     return session.query(Suite.suite_name, DBBinary.version). \
407         join(Suite.binaries).filter_by(package = package). \
408         join(DBBinary.architecture). \
409         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
410
411 class Upload(object):
412     """
413     Everything that has to do with an upload processed.
414
415     """
416     def __init__(self):
417         self.logger = None
418         self.pkg = Changes()
419         self.reset()
420
421     ###########################################################################
422
423     def reset (self):
424         """ Reset a number of internal variables."""
425
426         # Initialize the substitution template map
427         cnf = Config()
428         self.Subst = {}
429         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
430         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
431         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
432         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
433
434         self.rejects = []
435         self.warnings = []
436         self.notes = []
437
438         self.later_check_files = []
439
440         self.pkg.reset()
441
442     def package_info(self):
443         """
444         Format various messages from this Upload to send to the maintainer.
445         """
446
447         msgs = (
448             ('Reject Reasons', self.rejects),
449             ('Warnings', self.warnings),
450             ('Notes', self.notes),
451         )
452
453         msg = ''
454         for title, messages in msgs:
455             if messages:
456                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
457         msg += '\n\n'
458
459         return msg
460
461     ###########################################################################
462     def update_subst(self):
463         """ Set up the per-package template substitution mappings """
464
465         cnf = Config()
466
467         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
468         if not self.pkg.changes.has_key("architecture") or not \
469            isinstance(self.pkg.changes["architecture"], dict):
470             self.pkg.changes["architecture"] = { "Unknown" : "" }
471
472         # and maintainer2047 may not exist.
473         if not self.pkg.changes.has_key("maintainer2047"):
474             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
475
476         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
477         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
478         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
479
480         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
481         if self.pkg.changes["architecture"].has_key("source") and \
482            self.pkg.changes["changedby822"] != "" and \
483            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
484
485             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
486             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
487             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
488         else:
489             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
490             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
491             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
492
493         # Process policy doesn't set the fingerprint field and I don't want to make it
494         # do it for now as I don't want to have to deal with the case where we accepted
495         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
496         # the meantime so the package will be remarked as rejectable.  Urgh.
497         # TODO: Fix this properly
498         if self.pkg.changes.has_key('fingerprint'):
499             session = DBConn().session()
500             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
501             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
502                 if self.pkg.changes.has_key("sponsoremail"):
503                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
504             session.close()
505
506         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
507             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
508
509         # Apply any global override of the Maintainer field
510         if cnf.get("Dinstall::OverrideMaintainer"):
511             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
512             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
513
514         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
515         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
516         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
517         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
518
519     ###########################################################################
520     def load_changes(self, filename):
521         """
522         Load a changes file and setup a dictionary around it. Also checks for mandantory
523         fields  within.
524
525         @type filename: string
526         @param filename: Changes filename, full path.
527
528         @rtype: boolean
529         @return: whether the changes file was valid or not.  We may want to
530                  reject even if this is True (see what gets put in self.rejects).
531                  This is simply to prevent us even trying things later which will
532                  fail because we couldn't properly parse the file.
533         """
534         Cnf = Config()
535         self.pkg.changes_file = filename
536
537         # Parse the .changes field into a dictionary
538         try:
539             self.pkg.changes.update(parse_changes(filename))
540         except CantOpenError:
541             self.rejects.append("%s: can't read file." % (filename))
542             return False
543         except ParseChangesError, line:
544             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
545             return False
546         except ChangesUnicodeError:
547             self.rejects.append("%s: changes file not proper utf-8" % (filename))
548             return False
549
550         # Parse the Files field from the .changes into another dictionary
551         try:
552             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
553         except ParseChangesError, line:
554             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
555             return False
556         except UnknownFormatError, format:
557             self.rejects.append("%s: unknown format '%s'." % (filename, format))
558             return False
559
560         # Check for mandatory fields
561         for i in ("distribution", "source", "binary", "architecture",
562                   "version", "maintainer", "files", "changes", "description"):
563             if not self.pkg.changes.has_key(i):
564                 # Avoid undefined errors later
565                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
566                 return False
567
568         # Strip a source version in brackets from the source field
569         if re_strip_srcver.search(self.pkg.changes["source"]):
570             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
571
572         # Ensure the source field is a valid package name.
573         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
574             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
575
576         # Split multi-value fields into a lower-level dictionary
577         for i in ("architecture", "distribution", "binary", "closes"):
578             o = self.pkg.changes.get(i, "")
579             if o != "":
580                 del self.pkg.changes[i]
581
582             self.pkg.changes[i] = {}
583
584             for j in o.split():
585                 self.pkg.changes[i][j] = 1
586
587         # Fix the Maintainer: field to be RFC822/2047 compatible
588         try:
589             (self.pkg.changes["maintainer822"],
590              self.pkg.changes["maintainer2047"],
591              self.pkg.changes["maintainername"],
592              self.pkg.changes["maintaineremail"]) = \
593                    fix_maintainer (self.pkg.changes["maintainer"])
594         except ParseMaintError, msg:
595             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
596                    % (filename, self.pkg.changes["maintainer"], msg))
597
598         # ...likewise for the Changed-By: field if it exists.
599         try:
600             (self.pkg.changes["changedby822"],
601              self.pkg.changes["changedby2047"],
602              self.pkg.changes["changedbyname"],
603              self.pkg.changes["changedbyemail"]) = \
604                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
605         except ParseMaintError, msg:
606             self.pkg.changes["changedby822"] = ""
607             self.pkg.changes["changedby2047"] = ""
608             self.pkg.changes["changedbyname"] = ""
609             self.pkg.changes["changedbyemail"] = ""
610
611             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
612                    % (filename, self.pkg.changes["changed-by"], msg))
613
614         # Ensure all the values in Closes: are numbers
615         if self.pkg.changes.has_key("closes"):
616             for i in self.pkg.changes["closes"].keys():
617                 if re_isanum.match (i) == None:
618                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
619
620         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
621         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
622         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
623
624         # Check the .changes is non-empty
625         if not self.pkg.files:
626             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
627             return False
628
629         # Changes was syntactically valid even if we'll reject
630         return True
631
632     ###########################################################################
633
634     def check_distributions(self):
635         "Check and map the Distribution field"
636
637         Cnf = Config()
638
639         # Handle suite mappings
640         for m in Cnf.ValueList("SuiteMappings"):
641             args = m.split()
642             mtype = args[0]
643             if mtype == "map" or mtype == "silent-map":
644                 (source, dest) = args[1:3]
645                 if self.pkg.changes["distribution"].has_key(source):
646                     del self.pkg.changes["distribution"][source]
647                     self.pkg.changes["distribution"][dest] = 1
648                     if mtype != "silent-map":
649                         self.notes.append("Mapping %s to %s." % (source, dest))
650                 if self.pkg.changes.has_key("distribution-version"):
651                     if self.pkg.changes["distribution-version"].has_key(source):
652                         self.pkg.changes["distribution-version"][source]=dest
653             elif mtype == "map-unreleased":
654                 (source, dest) = args[1:3]
655                 if self.pkg.changes["distribution"].has_key(source):
656                     for arch in self.pkg.changes["architecture"].keys():
657                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
658                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
659                             del self.pkg.changes["distribution"][source]
660                             self.pkg.changes["distribution"][dest] = 1
661                             break
662             elif mtype == "ignore":
663                 suite = args[1]
664                 if self.pkg.changes["distribution"].has_key(suite):
665                     del self.pkg.changes["distribution"][suite]
666                     self.warnings.append("Ignoring %s as a target suite." % (suite))
667             elif mtype == "reject":
668                 suite = args[1]
669                 if self.pkg.changes["distribution"].has_key(suite):
670                     self.rejects.append("Uploads to %s are not accepted." % (suite))
671             elif mtype == "propup-version":
672                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
673                 #
674                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
675                 if self.pkg.changes["distribution"].has_key(args[1]):
676                     self.pkg.changes.setdefault("distribution-version", {})
677                     for suite in args[2:]:
678                         self.pkg.changes["distribution-version"][suite] = suite
679
680         # Ensure there is (still) a target distribution
681         if len(self.pkg.changes["distribution"].keys()) < 1:
682             self.rejects.append("No valid distribution remaining.")
683
684         # Ensure target distributions exist
685         for suite in self.pkg.changes["distribution"].keys():
686             if not Cnf.has_key("Suite::%s" % (suite)):
687                 self.rejects.append("Unknown distribution `%s'." % (suite))
688
689     ###########################################################################
690
691     def binary_file_checks(self, f, session):
692         cnf = Config()
693         entry = self.pkg.files[f]
694
695         # Extract package control information
696         deb_file = utils.open_file(f)
697         try:
698             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
699         except:
700             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
701             deb_file.close()
702             # Can't continue, none of the checks on control would work.
703             return
704
705         # Check for mandantory "Description:"
706         deb_file.seek(0)
707         try:
708             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
709         except:
710             self.rejects.append("%s: Missing Description in binary package" % (f))
711             return
712
713         deb_file.close()
714
715         # Check for mandatory fields
716         for field in [ "Package", "Architecture", "Version" ]:
717             if control.Find(field) == None:
718                 # Can't continue
719                 self.rejects.append("%s: No %s field in control." % (f, field))
720                 return
721
722         # Ensure the package name matches the one give in the .changes
723         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
724             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
725
726         # Validate the package field
727         package = control.Find("Package")
728         if not re_valid_pkg_name.match(package):
729             self.rejects.append("%s: invalid package name '%s'." % (f, package))
730
731         # Validate the version field
732         version = control.Find("Version")
733         if not re_valid_version.match(version):
734             self.rejects.append("%s: invalid version number '%s'." % (f, version))
735
736         # Ensure the architecture of the .deb is one we know about.
737         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
738         architecture = control.Find("Architecture")
739         upload_suite = self.pkg.changes["distribution"].keys()[0]
740
741         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
742             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
743             self.rejects.append("Unknown architecture '%s'." % (architecture))
744
745         # Ensure the architecture of the .deb is one of the ones
746         # listed in the .changes.
747         if not self.pkg.changes["architecture"].has_key(architecture):
748             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
749
750         # Sanity-check the Depends field
751         depends = control.Find("Depends")
752         if depends == '':
753             self.rejects.append("%s: Depends field is empty." % (f))
754
755         # Sanity-check the Provides field
756         provides = control.Find("Provides")
757         if provides:
758             provide = re_spacestrip.sub('', provides)
759             if provide == '':
760                 self.rejects.append("%s: Provides field is empty." % (f))
761             prov_list = provide.split(",")
762             for prov in prov_list:
763                 if not re_valid_pkg_name.match(prov):
764                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
765
766         # Check the section & priority match those given in the .changes (non-fatal)
767         if     control.Find("Section") and entry["section"] != "" \
768            and entry["section"] != control.Find("Section"):
769             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
770                                 (f, control.Find("Section", ""), entry["section"]))
771         if control.Find("Priority") and entry["priority"] != "" \
772            and entry["priority"] != control.Find("Priority"):
773             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
774                                 (f, control.Find("Priority", ""), entry["priority"]))
775
776         entry["package"] = package
777         entry["architecture"] = architecture
778         entry["version"] = version
779         entry["maintainer"] = control.Find("Maintainer", "")
780
781         if f.endswith(".udeb"):
782             self.pkg.files[f]["dbtype"] = "udeb"
783         elif f.endswith(".deb"):
784             self.pkg.files[f]["dbtype"] = "deb"
785         else:
786             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
787
788         entry["source"] = control.Find("Source", entry["package"])
789
790         # Get the source version
791         source = entry["source"]
792         source_version = ""
793
794         if source.find("(") != -1:
795             m = re_extract_src_version.match(source)
796             source = m.group(1)
797             source_version = m.group(2)
798
799         if not source_version:
800             source_version = self.pkg.files[f]["version"]
801
802         entry["source package"] = source
803         entry["source version"] = source_version
804
805         # Ensure the filename matches the contents of the .deb
806         m = re_isadeb.match(f)
807
808         #  package name
809         file_package = m.group(1)
810         if entry["package"] != file_package:
811             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
812                                 (f, file_package, entry["dbtype"], entry["package"]))
813         epochless_version = re_no_epoch.sub('', control.Find("Version"))
814
815         #  version
816         file_version = m.group(2)
817         if epochless_version != file_version:
818             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
819                                 (f, file_version, entry["dbtype"], epochless_version))
820
821         #  architecture
822         file_architecture = m.group(3)
823         if entry["architecture"] != file_architecture:
824             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
825                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
826
827         # Check for existent source
828         source_version = entry["source version"]
829         source_package = entry["source package"]
830         if self.pkg.changes["architecture"].has_key("source"):
831             if source_version != self.pkg.changes["version"]:
832                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
833                                     (source_version, f, self.pkg.changes["version"]))
834         else:
835             # Check in the SQL database
836             if not source_exists(source_package, source_version, suites = \
837                 self.pkg.changes["distribution"].keys(), session = session):
838                 # Check in one of the other directories
839                 source_epochless_version = re_no_epoch.sub('', source_version)
840                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
841                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
842                     entry["byhand"] = 1
843                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
844                     entry["new"] = 1
845                 else:
846                     dsc_file_exists = False
847                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
848                         if cnf.has_key("Dir::Queue::%s" % (myq)):
849                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
850                                 dsc_file_exists = True
851                                 break
852
853                     if not dsc_file_exists:
854                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
855
856         # Check the version and for file overwrites
857         self.check_binary_against_db(f, session)
858
859         # Temporarily disable contents generation until we change the table storage layout
860         #b = Binary(f)
861         #b.scan_package()
862         #if len(b.rejects) > 0:
863         #    for j in b.rejects:
864         #        self.rejects.append(j)
865
866     def source_file_checks(self, f, session):
867         entry = self.pkg.files[f]
868
869         m = re_issource.match(f)
870         if not m:
871             return
872
873         entry["package"] = m.group(1)
874         entry["version"] = m.group(2)
875         entry["type"] = m.group(3)
876
877         # Ensure the source package name matches the Source filed in the .changes
878         if self.pkg.changes["source"] != entry["package"]:
879             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
880
881         # Ensure the source version matches the version in the .changes file
882         if re_is_orig_source.match(f):
883             changes_version = self.pkg.changes["chopversion2"]
884         else:
885             changes_version = self.pkg.changes["chopversion"]
886
887         if changes_version != entry["version"]:
888             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
889
890         # Ensure the .changes lists source in the Architecture field
891         if not self.pkg.changes["architecture"].has_key("source"):
892             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
893
894         # Check the signature of a .dsc file
895         if entry["type"] == "dsc":
896             # check_signature returns either:
897             #  (None, [list, of, rejects]) or (signature, [])
898             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
899             for j in rejects:
900                 self.rejects.append(j)
901
902         entry["architecture"] = "source"
903
904     def per_suite_file_checks(self, f, suite, session):
905         cnf = Config()
906         entry = self.pkg.files[f]
907
908         # Skip byhand
909         if entry.has_key("byhand"):
910             return
911
912         # Check we have fields we need to do these checks
913         oktogo = True
914         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
915             if not entry.has_key(m):
916                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
917                 oktogo = False
918
919         if not oktogo:
920             return
921
922         # Handle component mappings
923         for m in cnf.ValueList("ComponentMappings"):
924             (source, dest) = m.split()
925             if entry["component"] == source:
926                 entry["original component"] = source
927                 entry["component"] = dest
928
929         # Ensure the component is valid for the target suite
930         if cnf.has_key("Suite:%s::Components" % (suite)) and \
931            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
932             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
933             return
934
935         # Validate the component
936         if not get_component(entry["component"], session):
937             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
938             return
939
940         # See if the package is NEW
941         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
942             entry["new"] = 1
943
944         # Validate the priority
945         if entry["priority"].find('/') != -1:
946             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
947
948         # Determine the location
949         location = cnf["Dir::Pool"]
950         l = get_location(location, entry["component"], session=session)
951         if l is None:
952             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
953             entry["location id"] = -1
954         else:
955             entry["location id"] = l.location_id
956
957         # Check the md5sum & size against existing files (if any)
958         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
959
960         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
961                                          entry["size"], entry["md5sum"], entry["location id"])
962
963         if found is None:
964             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
965         elif found is False and poolfile is not None:
966             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
967         else:
968             if poolfile is None:
969                 entry["files id"] = None
970             else:
971                 entry["files id"] = poolfile.file_id
972
973         # Check for packages that have moved from one component to another
974         entry['suite'] = suite
975         arch_list = [entry["architecture"], 'all']
976         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
977             [suite], arch_list = arch_list, session = session)
978         if component is not None:
979             entry["othercomponents"] = component
980
981     def check_files(self, action=True):
982         file_keys = self.pkg.files.keys()
983         holding = Holding()
984         cnf = Config()
985
986         if action:
987             cwd = os.getcwd()
988             os.chdir(self.pkg.directory)
989             for f in file_keys:
990                 ret = holding.copy_to_holding(f)
991                 if ret is not None:
992                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
993
994             os.chdir(cwd)
995
996         # check we already know the changes file
997         # [NB: this check must be done post-suite mapping]
998         base_filename = os.path.basename(self.pkg.changes_file)
999
1000         session = DBConn().session()
1001
1002         try:
1003             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1004             # if in the pool or in a queue other than unchecked, reject
1005             if (dbc.in_queue is None) \
1006                    or (dbc.in_queue is not None
1007                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1008                 self.rejects.append("%s file already known to dak" % base_filename)
1009         except NoResultFound, e:
1010             # not known, good
1011             pass
1012
1013         has_binaries = False
1014         has_source = False
1015
1016         for f, entry in self.pkg.files.items():
1017             # Ensure the file does not already exist in one of the accepted directories
1018             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1019                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1020                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1021                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1022
1023             if not re_taint_free.match(f):
1024                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1025
1026             # Check the file is readable
1027             if os.access(f, os.R_OK) == 0:
1028                 # When running in -n, copy_to_holding() won't have
1029                 # generated the reject_message, so we need to.
1030                 if action:
1031                     if os.path.exists(f):
1032                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1033                     else:
1034                         # Don't directly reject, mark to check later to deal with orig's
1035                         # we can find in the pool
1036                         self.later_check_files.append(f)
1037                 entry["type"] = "unreadable"
1038                 continue
1039
1040             # If it's byhand skip remaining checks
1041             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1042                 entry["byhand"] = 1
1043                 entry["type"] = "byhand"
1044
1045             # Checks for a binary package...
1046             elif re_isadeb.match(f):
1047                 has_binaries = True
1048                 entry["type"] = "deb"
1049
1050                 # This routine appends to self.rejects/warnings as appropriate
1051                 self.binary_file_checks(f, session)
1052
1053             # Checks for a source package...
1054             elif re_issource.match(f):
1055                 has_source = True
1056
1057                 # This routine appends to self.rejects/warnings as appropriate
1058                 self.source_file_checks(f, session)
1059
1060             # Not a binary or source package?  Assume byhand...
1061             else:
1062                 entry["byhand"] = 1
1063                 entry["type"] = "byhand"
1064
1065             # Per-suite file checks
1066             entry["oldfiles"] = {}
1067             for suite in self.pkg.changes["distribution"].keys():
1068                 self.per_suite_file_checks(f, suite, session)
1069
1070         session.close()
1071
1072         # If the .changes file says it has source, it must have source.
1073         if self.pkg.changes["architecture"].has_key("source"):
1074             if not has_source:
1075                 self.rejects.append("no source found and Architecture line in changes mention source.")
1076
1077             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1078                 self.rejects.append("source only uploads are not supported.")
1079
1080     ###########################################################################
1081     def check_dsc(self, action=True, session=None):
1082         """Returns bool indicating whether or not the source changes are valid"""
1083         # Ensure there is source to check
1084         if not self.pkg.changes["architecture"].has_key("source"):
1085             return True
1086
1087         # Find the .dsc
1088         dsc_filename = None
1089         for f, entry in self.pkg.files.items():
1090             if entry["type"] == "dsc":
1091                 if dsc_filename:
1092                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1093                     return False
1094                 else:
1095                     dsc_filename = f
1096
1097         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1098         if not dsc_filename:
1099             self.rejects.append("source uploads must contain a dsc file")
1100             return False
1101
1102         # Parse the .dsc file
1103         try:
1104             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1105         except CantOpenError:
1106             # if not -n copy_to_holding() will have done this for us...
1107             if not action:
1108                 self.rejects.append("%s: can't read file." % (dsc_filename))
1109         except ParseChangesError, line:
1110             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1111         except InvalidDscError, line:
1112             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1113         except ChangesUnicodeError:
1114             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1115
1116         # Build up the file list of files mentioned by the .dsc
1117         try:
1118             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1119         except NoFilesFieldError:
1120             self.rejects.append("%s: no Files: field." % (dsc_filename))
1121             return False
1122         except UnknownFormatError, format:
1123             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1124             return False
1125         except ParseChangesError, line:
1126             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1127             return False
1128
1129         # Enforce mandatory fields
1130         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1131             if not self.pkg.dsc.has_key(i):
1132                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1133                 return False
1134
1135         # Validate the source and version fields
1136         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1137             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1138         if not re_valid_version.match(self.pkg.dsc["version"]):
1139             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1140
1141         # Only a limited list of source formats are allowed in each suite
1142         for dist in self.pkg.changes["distribution"].keys():
1143             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1144             if self.pkg.dsc["format"] not in allowed:
1145                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1146
1147         # Validate the Maintainer field
1148         try:
1149             # We ignore the return value
1150             fix_maintainer(self.pkg.dsc["maintainer"])
1151         except ParseMaintError, msg:
1152             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1153                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1154
1155         # Validate the build-depends field(s)
1156         for field_name in [ "build-depends", "build-depends-indep" ]:
1157             field = self.pkg.dsc.get(field_name)
1158             if field:
1159                 # Have apt try to parse them...
1160                 try:
1161                     apt_pkg.ParseSrcDepends(field)
1162                 except:
1163                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1164
1165         # Ensure the version number in the .dsc matches the version number in the .changes
1166         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1167         changes_version = self.pkg.files[dsc_filename]["version"]
1168
1169         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1170             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1171
1172         # Ensure the Files field contain only what's expected
1173         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1174
1175         # Ensure source is newer than existing source in target suites
1176         session = DBConn().session()
1177         self.check_source_against_db(dsc_filename, session)
1178         self.check_dsc_against_db(dsc_filename, session)
1179
1180         dbchg = get_dbchange(self.pkg.changes_file, session)
1181
1182         # Finally, check if we're missing any files
1183         for f in self.later_check_files:
1184             print 'XXX: %s' % f
1185             # Check if we've already processed this file if we have a dbchg object
1186             ok = False
1187             if dbchg:
1188                 for pf in dbchg.files:
1189                     if pf.filename == f and pf.processed:
1190                         self.notes.append('%s was already processed so we can go ahead' % f)
1191                         ok = True
1192                         del self.pkg.files[f]
1193             if not ok:
1194                 self.rejects.append("Could not find file %s references in changes" % f)
1195
1196         session.close()
1197
1198         return True
1199
1200     ###########################################################################
1201
1202     def get_changelog_versions(self, source_dir):
1203         """Extracts a the source package and (optionally) grabs the
1204         version history out of debian/changelog for the BTS."""
1205
1206         cnf = Config()
1207
1208         # Find the .dsc (again)
1209         dsc_filename = None
1210         for f in self.pkg.files.keys():
1211             if self.pkg.files[f]["type"] == "dsc":
1212                 dsc_filename = f
1213
1214         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1215         if not dsc_filename:
1216             return
1217
1218         # Create a symlink mirror of the source files in our temporary directory
1219         for f in self.pkg.files.keys():
1220             m = re_issource.match(f)
1221             if m:
1222                 src = os.path.join(source_dir, f)
1223                 # If a file is missing for whatever reason, give up.
1224                 if not os.path.exists(src):
1225                     return
1226                 ftype = m.group(3)
1227                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1228                    self.pkg.orig_files[f].has_key("path"):
1229                     continue
1230                 dest = os.path.join(os.getcwd(), f)
1231                 os.symlink(src, dest)
1232
1233         # If the orig files are not a part of the upload, create symlinks to the
1234         # existing copies.
1235         for orig_file in self.pkg.orig_files.keys():
1236             if not self.pkg.orig_files[orig_file].has_key("path"):
1237                 continue
1238             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1239             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1240
1241         # Extract the source
1242         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1243         (result, output) = commands.getstatusoutput(cmd)
1244         if (result != 0):
1245             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1246             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1247             return
1248
1249         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1250             return
1251
1252         # Get the upstream version
1253         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1254         if re_strip_revision.search(upstr_version):
1255             upstr_version = re_strip_revision.sub('', upstr_version)
1256
1257         # Ensure the changelog file exists
1258         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1259         if not os.path.exists(changelog_filename):
1260             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1261             return
1262
1263         # Parse the changelog
1264         self.pkg.dsc["bts changelog"] = ""
1265         changelog_file = utils.open_file(changelog_filename)
1266         for line in changelog_file.readlines():
1267             m = re_changelog_versions.match(line)
1268             if m:
1269                 self.pkg.dsc["bts changelog"] += line
1270         changelog_file.close()
1271
1272         # Check we found at least one revision in the changelog
1273         if not self.pkg.dsc["bts changelog"]:
1274             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1275
1276     def check_source(self):
1277         # Bail out if:
1278         #    a) there's no source
1279         if not self.pkg.changes["architecture"].has_key("source"):
1280             return
1281
1282         tmpdir = utils.temp_dirname()
1283
1284         # Move into the temporary directory
1285         cwd = os.getcwd()
1286         os.chdir(tmpdir)
1287
1288         # Get the changelog version history
1289         self.get_changelog_versions(cwd)
1290
1291         # Move back and cleanup the temporary tree
1292         os.chdir(cwd)
1293
1294         try:
1295             shutil.rmtree(tmpdir)
1296         except OSError, e:
1297             if e.errno != errno.EACCES:
1298                 print "foobar"
1299                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1300
1301             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1302             # We probably have u-r or u-w directories so chmod everything
1303             # and try again.
1304             cmd = "chmod -R u+rwx %s" % (tmpdir)
1305             result = os.system(cmd)
1306             if result != 0:
1307                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1308             shutil.rmtree(tmpdir)
1309         except Exception, e:
1310             print "foobar2 (%s)" % e
1311             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1312
1313     ###########################################################################
1314     def ensure_hashes(self):
1315         # Make sure we recognise the format of the Files: field in the .changes
1316         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1317         if len(format) == 2:
1318             format = int(format[0]), int(format[1])
1319         else:
1320             format = int(float(format[0])), 0
1321
1322         # We need to deal with the original changes blob, as the fields we need
1323         # might not be in the changes dict serialised into the .dak anymore.
1324         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1325
1326         # Copy the checksums over to the current changes dict.  This will keep
1327         # the existing modifications to it intact.
1328         for field in orig_changes:
1329             if field.startswith('checksums-'):
1330                 self.pkg.changes[field] = orig_changes[field]
1331
1332         # Check for unsupported hashes
1333         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1334             self.rejects.append(j)
1335
1336         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1337             self.rejects.append(j)
1338
1339         # We have to calculate the hash if we have an earlier changes version than
1340         # the hash appears in rather than require it exist in the changes file
1341         for hashname, hashfunc, version in utils.known_hashes:
1342             # TODO: Move _ensure_changes_hash into this class
1343             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1344                 self.rejects.append(j)
1345             if "source" in self.pkg.changes["architecture"]:
1346                 # TODO: Move _ensure_dsc_hash into this class
1347                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1348                     self.rejects.append(j)
1349
1350     def check_hashes(self):
1351         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1352             self.rejects.append(m)
1353
1354         for m in utils.check_size(".changes", self.pkg.files):
1355             self.rejects.append(m)
1356
1357         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1358             self.rejects.append(m)
1359
1360         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1361             self.rejects.append(m)
1362
1363         self.ensure_hashes()
1364
1365     ###########################################################################
1366
1367     def ensure_orig(self, target_dir='.', session=None):
1368         """
1369         Ensures that all orig files mentioned in the changes file are present
1370         in target_dir. If they do not exist, they are symlinked into place.
1371
1372         An list containing the symlinks that were created are returned (so they
1373         can be removed).
1374         """
1375
1376         symlinked = []
1377         cnf = Config()
1378
1379         for filename, entry in self.pkg.dsc_files.iteritems():
1380             if not re_is_orig_source.match(filename):
1381                 # File is not an orig; ignore
1382                 continue
1383
1384             if os.path.exists(filename):
1385                 # File exists, no need to continue
1386                 continue
1387
1388             def symlink_if_valid(path):
1389                 f = utils.open_file(path)
1390                 md5sum = apt_pkg.md5sum(f)
1391                 f.close()
1392
1393                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1394                 expected = (int(entry['size']), entry['md5sum'])
1395
1396                 if fingerprint != expected:
1397                     return False
1398
1399                 dest = os.path.join(target_dir, filename)
1400
1401                 os.symlink(path, dest)
1402                 symlinked.append(dest)
1403
1404                 return True
1405
1406             session_ = session
1407             if session is None:
1408                 session_ = DBConn().session()
1409
1410             found = False
1411
1412             # Look in the pool
1413             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1414                 poolfile_path = os.path.join(
1415                     poolfile.location.path, poolfile.filename
1416                 )
1417
1418                 if symlink_if_valid(poolfile_path):
1419                     found = True
1420                     break
1421
1422             if session is None:
1423                 session_.close()
1424
1425             if found:
1426                 continue
1427
1428             # Look in some other queues for the file
1429             queues = ('New', 'Byhand', 'ProposedUpdates',
1430                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1431
1432             for queue in queues:
1433                 if not cnf.get('Dir::Queue::%s' % queue):
1434                     continue
1435
1436                 queuefile_path = os.path.join(
1437                     cnf['Dir::Queue::%s' % queue], filename
1438                 )
1439
1440                 if not os.path.exists(queuefile_path):
1441                     # Does not exist in this queue
1442                     continue
1443
1444                 if symlink_if_valid(queuefile_path):
1445                     break
1446
1447         return symlinked
1448
1449     ###########################################################################
1450
1451     def check_lintian(self):
1452         """
1453         Extends self.rejects by checking the output of lintian against tags
1454         specified in Dinstall::LintianTags.
1455         """
1456
1457         cnf = Config()
1458
1459         # Don't reject binary uploads
1460         if not self.pkg.changes['architecture'].has_key('source'):
1461             return
1462
1463         # Only check some distributions
1464         for dist in ('unstable', 'experimental'):
1465             if dist in self.pkg.changes['distribution']:
1466                 break
1467         else:
1468             return
1469
1470         # If we do not have a tagfile, don't do anything
1471         tagfile = cnf.get("Dinstall::LintianTags")
1472         if tagfile is None:
1473             return
1474
1475         # Parse the yaml file
1476         sourcefile = file(tagfile, 'r')
1477         sourcecontent = sourcefile.read()
1478         sourcefile.close()
1479
1480         try:
1481             lintiantags = yaml.load(sourcecontent)['lintian']
1482         except yaml.YAMLError, msg:
1483             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1484             return
1485
1486         # Try and find all orig mentioned in the .dsc
1487         symlinked = self.ensure_orig()
1488
1489         # Setup the input file for lintian
1490         fd, temp_filename = utils.temp_filename()
1491         temptagfile = os.fdopen(fd, 'w')
1492         for tags in lintiantags.values():
1493             temptagfile.writelines(['%s\n' % x for x in tags])
1494         temptagfile.close()
1495
1496         try:
1497             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1498                 (temp_filename, self.pkg.changes_file)
1499
1500             result, output = commands.getstatusoutput(cmd)
1501         finally:
1502             # Remove our tempfile and any symlinks we created
1503             os.unlink(temp_filename)
1504
1505             for symlink in symlinked:
1506                 os.unlink(symlink)
1507
1508         if result == 2:
1509             utils.warn("lintian failed for %s [return code: %s]." % \
1510                 (self.pkg.changes_file, result))
1511             utils.warn(utils.prefix_multi_line_string(output, \
1512                 " [possible output:] "))
1513
1514         def log(*txt):
1515             if self.logger:
1516                 self.logger.log(
1517                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1518                 )
1519
1520         # Generate messages
1521         parsed_tags = parse_lintian_output(output)
1522         self.rejects.extend(
1523             generate_reject_messages(parsed_tags, lintiantags, log=log)
1524         )
1525
1526     ###########################################################################
1527     def check_urgency(self):
1528         cnf = Config()
1529         if self.pkg.changes["architecture"].has_key("source"):
1530             if not self.pkg.changes.has_key("urgency"):
1531                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1532             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1533             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1534                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1535                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1536                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1537
1538     ###########################################################################
1539
1540     # Sanity check the time stamps of files inside debs.
1541     # [Files in the near future cause ugly warnings and extreme time
1542     #  travel can cause errors on extraction]
1543
1544     def check_timestamps(self):
1545         Cnf = Config()
1546
1547         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1548         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1549         tar = TarTime(future_cutoff, past_cutoff)
1550
1551         for filename, entry in self.pkg.files.items():
1552             if entry["type"] == "deb":
1553                 tar.reset()
1554                 try:
1555                     deb_file = utils.open_file(filename)
1556                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1557                     deb_file.seek(0)
1558                     try:
1559                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1560                     except SystemError, e:
1561                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1562                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1563                             raise
1564                         deb_file.seek(0)
1565                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1566
1567                     deb_file.close()
1568
1569                     future_files = tar.future_files.keys()
1570                     if future_files:
1571                         num_future_files = len(future_files)
1572                         future_file = future_files[0]
1573                         future_date = tar.future_files[future_file]
1574                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1575                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1576
1577                     ancient_files = tar.ancient_files.keys()
1578                     if ancient_files:
1579                         num_ancient_files = len(ancient_files)
1580                         ancient_file = ancient_files[0]
1581                         ancient_date = tar.ancient_files[ancient_file]
1582                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1583                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1584                 except:
1585                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1586
1587     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1588         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1589             sponsored = False
1590         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1591             sponsored = False
1592             if uid_name == "":
1593                 sponsored = True
1594         else:
1595             sponsored = True
1596             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1597                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1598                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1599                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1600                         self.pkg.changes["sponsoremail"] = uid_email
1601
1602         return sponsored
1603
1604
1605     ###########################################################################
1606     # check_signed_by_key checks
1607     ###########################################################################
1608
1609     def check_signed_by_key(self):
1610         """Ensure the .changes is signed by an authorized uploader."""
1611         session = DBConn().session()
1612
1613         # First of all we check that the person has proper upload permissions
1614         # and that this upload isn't blocked
1615         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1616
1617         if fpr is None:
1618             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1619             return
1620
1621         # TODO: Check that import-keyring adds UIDs properly
1622         if not fpr.uid:
1623             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1624             return
1625
1626         # Check that the fingerprint which uploaded has permission to do so
1627         self.check_upload_permissions(fpr, session)
1628
1629         # Check that this package is not in a transition
1630         self.check_transition(session)
1631
1632         session.close()
1633
1634
1635     def check_upload_permissions(self, fpr, session):
1636         # Check any one-off upload blocks
1637         self.check_upload_blocks(fpr, session)
1638
1639         # Start with DM as a special case
1640         # DM is a special case unfortunately, so we check it first
1641         # (keys with no source access get more access than DMs in one
1642         #  way; DMs can only upload for their packages whether source
1643         #  or binary, whereas keys with no access might be able to
1644         #  upload some binaries)
1645         if fpr.source_acl.access_level == 'dm':
1646             self.check_dm_upload(fpr, session)
1647         else:
1648             # Check source-based permissions for other types
1649             if self.pkg.changes["architecture"].has_key("source") and \
1650                 fpr.source_acl.access_level is None:
1651                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1652                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1653                 self.rejects.append(rej)
1654                 return
1655             # If not a DM, we allow full upload rights
1656             uid_email = "%s@debian.org" % (fpr.uid.uid)
1657             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1658
1659
1660         # Check binary upload permissions
1661         # By this point we know that DMs can't have got here unless they
1662         # are allowed to deal with the package concerned so just apply
1663         # normal checks
1664         if fpr.binary_acl.access_level == 'full':
1665             return
1666
1667         # Otherwise we're in the map case
1668         tmparches = self.pkg.changes["architecture"].copy()
1669         tmparches.pop('source', None)
1670
1671         for bam in fpr.binary_acl_map:
1672             tmparches.pop(bam.architecture.arch_string, None)
1673
1674         if len(tmparches.keys()) > 0:
1675             if fpr.binary_reject:
1676                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1677                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1678                 self.rejects.append(rej)
1679             else:
1680                 # TODO: This is where we'll implement reject vs throw away binaries later
1681                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1682                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1683                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1684                 self.rejects.append(rej)
1685
1686
1687     def check_upload_blocks(self, fpr, session):
1688         """Check whether any upload blocks apply to this source, source
1689            version, uid / fpr combination"""
1690
1691         def block_rej_template(fb):
1692             rej = 'Manual upload block in place for package %s' % fb.source
1693             if fb.version is not None:
1694                 rej += ', version %s' % fb.version
1695             return rej
1696
1697         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1698             # version is None if the block applies to all versions
1699             if fb.version is None or fb.version == self.pkg.changes['version']:
1700                 # Check both fpr and uid - either is enough to cause a reject
1701                 if fb.fpr is not None:
1702                     if fb.fpr.fingerprint == fpr.fingerprint:
1703                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1704                 if fb.uid is not None:
1705                     if fb.uid == fpr.uid:
1706                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1707
1708
1709     def check_dm_upload(self, fpr, session):
1710         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1711         ## none of the uploaded packages are NEW
1712         rej = False
1713         for f in self.pkg.files.keys():
1714             if self.pkg.files[f].has_key("byhand"):
1715                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1716                 rej = True
1717             if self.pkg.files[f].has_key("new"):
1718                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1719                 rej = True
1720
1721         if rej:
1722             return
1723
1724         r = get_newest_source(self.pkg.changes["source"], session)
1725
1726         if r is None:
1727             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1728             self.rejects.append(rej)
1729             return
1730
1731         if not r.dm_upload_allowed:
1732             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1733             self.rejects.append(rej)
1734             return
1735
1736         ## the Maintainer: field of the uploaded .changes file corresponds with
1737         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1738         ## uploads)
1739         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1740             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1741
1742         ## the most recent version of the package uploaded to unstable or
1743         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1744         ## non-developer maintainers cannot NMU or hijack packages)
1745
1746         # srcuploaders includes the maintainer
1747         accept = False
1748         for sup in r.srcuploaders:
1749             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1750             # Eww - I hope we never have two people with the same name in Debian
1751             if email == fpr.uid.uid or name == fpr.uid.name:
1752                 accept = True
1753                 break
1754
1755         if not accept:
1756             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1757             return
1758
1759         ## none of the packages are being taken over from other source packages
1760         for b in self.pkg.changes["binary"].keys():
1761             for suite in self.pkg.changes["distribution"].keys():
1762                 for s in get_source_by_package_and_suite(b, suite, session):
1763                     if s.source != self.pkg.changes["source"]:
1764                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1765
1766
1767
1768     def check_transition(self, session):
1769         cnf = Config()
1770
1771         sourcepkg = self.pkg.changes["source"]
1772
1773         # No sourceful upload -> no need to do anything else, direct return
1774         # We also work with unstable uploads, not experimental or those going to some
1775         # proposed-updates queue
1776         if "source" not in self.pkg.changes["architecture"] or \
1777            "unstable" not in self.pkg.changes["distribution"]:
1778             return
1779
1780         # Also only check if there is a file defined (and existant) with
1781         # checks.
1782         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1783         if transpath == "" or not os.path.exists(transpath):
1784             return
1785
1786         # Parse the yaml file
1787         sourcefile = file(transpath, 'r')
1788         sourcecontent = sourcefile.read()
1789         try:
1790             transitions = yaml.load(sourcecontent)
1791         except yaml.YAMLError, msg:
1792             # This shouldn't happen, there is a wrapper to edit the file which
1793             # checks it, but we prefer to be safe than ending up rejecting
1794             # everything.
1795             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1796             return
1797
1798         # Now look through all defined transitions
1799         for trans in transitions:
1800             t = transitions[trans]
1801             source = t["source"]
1802             expected = t["new"]
1803
1804             # Will be None if nothing is in testing.
1805             current = get_source_in_suite(source, "testing", session)
1806             if current is not None:
1807                 compare = apt_pkg.VersionCompare(current.version, expected)
1808
1809             if current is None or compare < 0:
1810                 # This is still valid, the current version in testing is older than
1811                 # the new version we wait for, or there is none in testing yet
1812
1813                 # Check if the source we look at is affected by this.
1814                 if sourcepkg in t['packages']:
1815                     # The source is affected, lets reject it.
1816
1817                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1818                         sourcepkg, trans)
1819
1820                     if current is not None:
1821                         currentlymsg = "at version %s" % (current.version)
1822                     else:
1823                         currentlymsg = "not present in testing"
1824
1825                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1826
1827                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1828 is part of a testing transition designed to get %s migrated (it is
1829 currently %s, we need version %s).  This transition is managed by the
1830 Release Team, and %s is the Release-Team member responsible for it.
1831 Please mail debian-release@lists.debian.org or contact %s directly if you
1832 need further assistance.  You might want to upload to experimental until this
1833 transition is done."""
1834                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1835
1836                     self.rejects.append(rejectmsg)
1837                     return
1838
1839     ###########################################################################
1840     # End check_signed_by_key checks
1841     ###########################################################################
1842
1843     def build_summaries(self):
1844         """ Build a summary of changes the upload introduces. """
1845
1846         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1847
1848         short_summary = summary
1849
1850         # This is for direport's benefit...
1851         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1852
1853         if byhand or new:
1854             summary += "Changes: " + f
1855
1856         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1857
1858         summary += self.announce(short_summary, 0)
1859
1860         return (summary, short_summary)
1861
1862     ###########################################################################
1863
1864     def close_bugs(self, summary, action):
1865         """
1866         Send mail to close bugs as instructed by the closes field in the changes file.
1867         Also add a line to summary if any work was done.
1868
1869         @type summary: string
1870         @param summary: summary text, as given by L{build_summaries}
1871
1872         @type action: bool
1873         @param action: Set to false no real action will be done.
1874
1875         @rtype: string
1876         @return: summary. If action was taken, extended by the list of closed bugs.
1877
1878         """
1879
1880         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1881
1882         bugs = self.pkg.changes["closes"].keys()
1883
1884         if not bugs:
1885             return summary
1886
1887         bugs.sort()
1888         summary += "Closing bugs: "
1889         for bug in bugs:
1890             summary += "%s " % (bug)
1891             if action:
1892                 self.update_subst()
1893                 self.Subst["__BUG_NUMBER__"] = bug
1894                 if self.pkg.changes["distribution"].has_key("stable"):
1895                     self.Subst["__STABLE_WARNING__"] = """
1896 Note that this package is not part of the released stable Debian
1897 distribution.  It may have dependencies on other unreleased software,
1898 or other instabilities.  Please take care if you wish to install it.
1899 The update will eventually make its way into the next released Debian
1900 distribution."""
1901                 else:
1902                     self.Subst["__STABLE_WARNING__"] = ""
1903                 mail_message = utils.TemplateSubst(self.Subst, template)
1904                 utils.send_mail(mail_message)
1905
1906                 # Clear up after ourselves
1907                 del self.Subst["__BUG_NUMBER__"]
1908                 del self.Subst["__STABLE_WARNING__"]
1909
1910         if action and self.logger:
1911             self.logger.log(["closing bugs"] + bugs)
1912
1913         summary += "\n"
1914
1915         return summary
1916
1917     ###########################################################################
1918
1919     def announce(self, short_summary, action):
1920         """
1921         Send an announce mail about a new upload.
1922
1923         @type short_summary: string
1924         @param short_summary: Short summary text to include in the mail
1925
1926         @type action: bool
1927         @param action: Set to false no real action will be done.
1928
1929         @rtype: string
1930         @return: Textstring about action taken.
1931
1932         """
1933
1934         cnf = Config()
1935         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1936
1937         # Only do announcements for source uploads with a recent dpkg-dev installed
1938         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1939            self.pkg.changes["architecture"].has_key("source"):
1940             return ""
1941
1942         lists_done = {}
1943         summary = ""
1944
1945         self.Subst["__SHORT_SUMMARY__"] = short_summary
1946
1947         for dist in self.pkg.changes["distribution"].keys():
1948             suite = get_suite(dist)
1949             if suite is None: continue
1950             announce_list = suite.announce
1951             if announce_list == "" or lists_done.has_key(announce_list):
1952                 continue
1953
1954             lists_done[announce_list] = 1
1955             summary += "Announcing to %s\n" % (announce_list)
1956
1957             if action:
1958                 self.update_subst()
1959                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1960                 if cnf.get("Dinstall::TrackingServer") and \
1961                    self.pkg.changes["architecture"].has_key("source"):
1962                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1963                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1964
1965                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1966                 utils.send_mail(mail_message)
1967
1968                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1969
1970         if cnf.FindB("Dinstall::CloseBugs"):
1971             summary = self.close_bugs(summary, action)
1972
1973         del self.Subst["__SHORT_SUMMARY__"]
1974
1975         return summary
1976
1977     ###########################################################################
1978     @session_wrapper
1979     def accept (self, summary, short_summary, session=None):
1980         """
1981         Accept an upload.
1982
1983         This moves all files referenced from the .changes into the pool,
1984         sends the accepted mail, announces to lists, closes bugs and
1985         also checks for override disparities. If enabled it will write out
1986         the version history for the BTS Version Tracking and will finally call
1987         L{queue_build}.
1988
1989         @type summary: string
1990         @param summary: Summary text
1991
1992         @type short_summary: string
1993         @param short_summary: Short summary
1994         """
1995
1996         cnf = Config()
1997         stats = SummaryStats()
1998
1999         print "Installing."
2000         self.logger.log(["installing changes", self.pkg.changes_file])
2001
2002         poolfiles = []
2003
2004         # Add the .dsc file to the DB first
2005         for newfile, entry in self.pkg.files.items():
2006             if entry["type"] == "dsc":
2007                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2008                 for j in pfs:
2009                     poolfiles.append(j)
2010
2011         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2012         for newfile, entry in self.pkg.files.items():
2013             if entry["type"] == "deb":
2014                 poolfiles.append(add_deb_to_db(self, newfile, session))
2015
2016         # If this is a sourceful diff only upload that is moving
2017         # cross-component we need to copy the .orig files into the new
2018         # component too for the same reasons as above.
2019         # XXX: mhy: I think this should be in add_dsc_to_db
2020         if self.pkg.changes["architecture"].has_key("source"):
2021             for orig_file in self.pkg.orig_files.keys():
2022                 if not self.pkg.orig_files[orig_file].has_key("id"):
2023                     continue # Skip if it's not in the pool
2024                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2025                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2026                     continue # Skip if the location didn't change
2027
2028                 # Do the move
2029                 oldf = get_poolfile_by_id(orig_file_id, session)
2030                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2031                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2032                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2033
2034                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2035
2036                 # TODO: Care about size/md5sum collisions etc
2037                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2038
2039                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2040                 if newf is None:
2041                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2042                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2043
2044                     session.flush()
2045
2046                     # Don't reference the old file from this changes
2047                     for p in poolfiles:
2048                         if p.file_id == oldf.file_id:
2049                             poolfiles.remove(p)
2050
2051                     poolfiles.append(newf)
2052
2053                     # Fix up the DSC references
2054                     toremove = []
2055
2056                     for df in source.srcfiles:
2057                         if df.poolfile.file_id == oldf.file_id:
2058                             # Add a new DSC entry and mark the old one for deletion
2059                             # Don't do it in the loop so we don't change the thing we're iterating over
2060                             newdscf = DSCFile()
2061                             newdscf.source_id = source.source_id
2062                             newdscf.poolfile_id = newf.file_id
2063                             session.add(newdscf)
2064
2065                             toremove.append(df)
2066
2067                     for df in toremove:
2068                         session.delete(df)
2069
2070                     # Flush our changes
2071                     session.flush()
2072
2073                     # Make sure that our source object is up-to-date
2074                     session.expire(source)
2075
2076         # Add changelog information to the database
2077         self.store_changelog()
2078
2079         # Install the files into the pool
2080         for newfile, entry in self.pkg.files.items():
2081             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2082             utils.move(newfile, destination)
2083             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2084             stats.accept_bytes += float(entry["size"])
2085
2086         # Copy the .changes file across for suite which need it.
2087         copy_changes = dict([(x.copychanges, '')
2088                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2089                              if x.copychanges is not None])
2090
2091         for dest in copy_changes.keys():
2092             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2093
2094         # We're done - commit the database changes
2095         session.commit()
2096         # Our SQL session will automatically start a new transaction after
2097         # the last commit
2098
2099         # Move the .changes into the 'done' directory
2100         utils.move(self.pkg.changes_file,
2101                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2102
2103         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2104             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2105
2106         self.update_subst()
2107         self.Subst["__SUMMARY__"] = summary
2108         mail_message = utils.TemplateSubst(self.Subst,
2109                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2110         utils.send_mail(mail_message)
2111         self.announce(short_summary, 1)
2112
2113         ## Helper stuff for DebBugs Version Tracking
2114         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2115             if self.pkg.changes["architecture"].has_key("source"):
2116                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2117                 version_history = os.fdopen(fd, 'w')
2118                 version_history.write(self.pkg.dsc["bts changelog"])
2119                 version_history.close()
2120                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2121                                       self.pkg.changes_file[:-8]+".versions")
2122                 os.rename(temp_filename, filename)
2123                 os.chmod(filename, 0644)
2124
2125             # Write out the binary -> source mapping.
2126             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2127             debinfo = os.fdopen(fd, 'w')
2128             for name, entry in sorted(self.pkg.files.items()):
2129                 if entry["type"] == "deb":
2130                     line = " ".join([entry["package"], entry["version"],
2131                                      entry["architecture"], entry["source package"],
2132                                      entry["source version"]])
2133                     debinfo.write(line+"\n")
2134             debinfo.close()
2135             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2136                                   self.pkg.changes_file[:-8]+".debinfo")
2137             os.rename(temp_filename, filename)
2138             os.chmod(filename, 0644)
2139
2140         session.commit()
2141
2142         # Set up our copy queues (e.g. buildd queues)
2143         for suite_name in self.pkg.changes["distribution"].keys():
2144             suite = get_suite(suite_name, session)
2145             for q in suite.copy_queues:
2146                 for f in poolfiles:
2147                     q.add_file_from_pool(f)
2148
2149         session.commit()
2150
2151         # Finally...
2152         stats.accept_count += 1
2153
2154     def check_override(self):
2155         """
2156         Checks override entries for validity. Mails "Override disparity" warnings,
2157         if that feature is enabled.
2158
2159         Abandons the check if
2160           - override disparity checks are disabled
2161           - mail sending is disabled
2162         """
2163
2164         cnf = Config()
2165
2166         # Abandon the check if override disparity checks have been disabled
2167         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2168             return
2169
2170         summary = self.pkg.check_override()
2171
2172         if summary == "":
2173             return
2174
2175         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2176
2177         self.update_subst()
2178         self.Subst["__SUMMARY__"] = summary
2179         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2180         utils.send_mail(mail_message)
2181         del self.Subst["__SUMMARY__"]
2182
2183     ###########################################################################
2184
2185     def remove(self, from_dir=None):
2186         """
2187         Used (for instance) in p-u to remove the package from unchecked
2188
2189         Also removes the package from holding area.
2190         """
2191         if from_dir is None:
2192             from_dir = self.pkg.directory
2193         h = Holding()
2194
2195         for f in self.pkg.files.keys():
2196             os.unlink(os.path.join(from_dir, f))
2197             if os.path.exists(os.path.join(h.holding_dir, f)):
2198                 os.unlink(os.path.join(h.holding_dir, f))
2199
2200         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2201         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2202             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2203
2204     ###########################################################################
2205
2206     def move_to_queue (self, queue):
2207         """
2208         Move files to a destination queue using the permissions in the table
2209         """
2210         h = Holding()
2211         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2212                    queue.path, perms=int(queue.change_perms, 8))
2213         for f in self.pkg.files.keys():
2214             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2215
2216     ###########################################################################
2217
2218     def force_reject(self, reject_files):
2219         """
2220         Forcefully move files from the current directory to the
2221         reject directory.  If any file already exists in the reject
2222         directory it will be moved to the morgue to make way for
2223         the new file.
2224
2225         @type reject_files: dict
2226         @param reject_files: file dictionary
2227
2228         """
2229
2230         cnf = Config()
2231
2232         for file_entry in reject_files:
2233             # Skip any files which don't exist or which we don't have permission to copy.
2234             if os.access(file_entry, os.R_OK) == 0:
2235                 continue
2236
2237             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2238
2239             try:
2240                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2241             except OSError, e:
2242                 # File exists?  Let's find a new name by adding a number
2243                 if e.errno == errno.EEXIST:
2244                     try:
2245                         dest_file = utils.find_next_free(dest_file, 255)
2246                     except NoFreeFilenameError:
2247                         # Something's either gone badly Pete Tong, or
2248                         # someone is trying to exploit us.
2249                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2250                         return
2251
2252                     # Make sure we really got it
2253                     try:
2254                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2255                     except OSError, e:
2256                         # Likewise
2257                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2258                         return
2259                 else:
2260                     raise
2261             # If we got here, we own the destination file, so we can
2262             # safely overwrite it.
2263             utils.move(file_entry, dest_file, 1, perms=0660)
2264             os.close(dest_fd)
2265
2266     ###########################################################################
2267     def do_reject (self, manual=0, reject_message="", notes=""):
2268         """
2269         Reject an upload. If called without a reject message or C{manual} is
2270         true, spawn an editor so the user can write one.
2271
2272         @type manual: bool
2273         @param manual: manual or automated rejection
2274
2275         @type reject_message: string
2276         @param reject_message: A reject message
2277
2278         @return: 0
2279
2280         """
2281         # If we weren't given a manual rejection message, spawn an
2282         # editor so the user can add one in...
2283         if manual and not reject_message:
2284             (fd, temp_filename) = utils.temp_filename()
2285             temp_file = os.fdopen(fd, 'w')
2286             if len(notes) > 0:
2287                 for note in notes:
2288                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2289                                     % (note.author, note.version, note.notedate, note.comment))
2290             temp_file.close()
2291             editor = os.environ.get("EDITOR","vi")
2292             answer = 'E'
2293             while answer == 'E':
2294                 os.system("%s %s" % (editor, temp_filename))
2295                 temp_fh = utils.open_file(temp_filename)
2296                 reject_message = "".join(temp_fh.readlines())
2297                 temp_fh.close()
2298                 print "Reject message:"
2299                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2300                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2301                 answer = "XXX"
2302                 while prompt.find(answer) == -1:
2303                     answer = utils.our_raw_input(prompt)
2304                     m = re_default_answer.search(prompt)
2305                     if answer == "":
2306                         answer = m.group(1)
2307                     answer = answer[:1].upper()
2308             os.unlink(temp_filename)
2309             if answer == 'A':
2310                 return 1
2311             elif answer == 'Q':
2312                 sys.exit(0)
2313
2314         print "Rejecting.\n"
2315
2316         cnf = Config()
2317
2318         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2319         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2320
2321         # Move all the files into the reject directory
2322         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2323         self.force_reject(reject_files)
2324
2325         # If we fail here someone is probably trying to exploit the race
2326         # so let's just raise an exception ...
2327         if os.path.exists(reason_filename):
2328             os.unlink(reason_filename)
2329         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2330
2331         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2332
2333         self.update_subst()
2334         if not manual:
2335             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2336             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2337             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2338             os.write(reason_fd, reject_message)
2339             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2340         else:
2341             # Build up the rejection email
2342             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2343             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2344             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2345             self.Subst["__REJECT_MESSAGE__"] = ""
2346             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2347             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2348             # Write the rejection email out as the <foo>.reason file
2349             os.write(reason_fd, reject_mail_message)
2350
2351         del self.Subst["__REJECTOR_ADDRESS__"]
2352         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2353         del self.Subst["__CC__"]
2354
2355         os.close(reason_fd)
2356
2357         # Send the rejection mail
2358         utils.send_mail(reject_mail_message)
2359
2360         if self.logger:
2361             self.logger.log(["rejected", self.pkg.changes_file])
2362
2363         return 0
2364
2365     ################################################################################
2366     def in_override_p(self, package, component, suite, binary_type, filename, session):
2367         """
2368         Check if a package already has override entries in the DB
2369
2370         @type package: string
2371         @param package: package name
2372
2373         @type component: string
2374         @param component: database id of the component
2375
2376         @type suite: int
2377         @param suite: database id of the suite
2378
2379         @type binary_type: string
2380         @param binary_type: type of the package
2381
2382         @type filename: string
2383         @param filename: filename we check
2384
2385         @return: the database result. But noone cares anyway.
2386
2387         """
2388
2389         cnf = Config()
2390
2391         if binary_type == "": # must be source
2392             file_type = "dsc"
2393         else:
2394             file_type = binary_type
2395
2396         # Override suite name; used for example with proposed-updates
2397         oldsuite = get_suite(suite, session)
2398         if (not oldsuite is None) and oldsuite.overridesuite:
2399             suite = oldsuite.overridesuite
2400
2401         result = get_override(package, suite, component, file_type, session)
2402
2403         # If checking for a source package fall back on the binary override type
2404         if file_type == "dsc" and len(result) < 1:
2405             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2406
2407         # Remember the section and priority so we can check them later if appropriate
2408         if len(result) > 0:
2409             result = result[0]
2410             self.pkg.files[filename]["override section"] = result.section.section
2411             self.pkg.files[filename]["override priority"] = result.priority.priority
2412             return result
2413
2414         return None
2415
2416     ################################################################################
2417     def get_anyversion(self, sv_list, suite):
2418         """
2419         @type sv_list: list
2420         @param sv_list: list of (suite, version) tuples to check
2421
2422         @type suite: string
2423         @param suite: suite name
2424
2425         Description: TODO
2426         """
2427         Cnf = Config()
2428         anyversion = None
2429         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2430         for (s, v) in sv_list:
2431             if s in [ x.lower() for x in anysuite ]:
2432                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2433                     anyversion = v
2434
2435         return anyversion
2436
2437     ################################################################################
2438
2439     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2440         """
2441         @type sv_list: list
2442         @param sv_list: list of (suite, version) tuples to check
2443
2444         @type filename: string
2445         @param filename: XXX
2446
2447         @type new_version: string
2448         @param new_version: XXX
2449
2450         Ensure versions are newer than existing packages in target
2451         suites and that cross-suite version checking rules as
2452         set out in the conf file are satisfied.
2453         """
2454
2455         cnf = Config()
2456
2457         # Check versions for each target suite
2458         for target_suite in self.pkg.changes["distribution"].keys():
2459             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2460             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2461
2462             # Enforce "must be newer than target suite" even if conffile omits it
2463             if target_suite not in must_be_newer_than:
2464                 must_be_newer_than.append(target_suite)
2465
2466             for (suite, existent_version) in sv_list:
2467                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2468
2469                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2470                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2471
2472                 if suite in must_be_older_than and vercmp > -1:
2473                     cansave = 0
2474
2475                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2476                         # we really use the other suite, ignoring the conflicting one ...
2477                         addsuite = self.pkg.changes["distribution-version"][suite]
2478
2479                         add_version = self.get_anyversion(sv_list, addsuite)
2480                         target_version = self.get_anyversion(sv_list, target_suite)
2481
2482                         if not add_version:
2483                             # not add_version can only happen if we map to a suite
2484                             # that doesn't enhance the suite we're propup'ing from.
2485                             # so "propup-ver x a b c; map a d" is a problem only if
2486                             # d doesn't enhance a.
2487                             #
2488                             # i think we could always propagate in this case, rather
2489                             # than complaining. either way, this isn't a REJECT issue
2490                             #
2491                             # And - we really should complain to the dorks who configured dak
2492                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2493                             self.pkg.changes.setdefault("propdistribution", {})
2494                             self.pkg.changes["propdistribution"][addsuite] = 1
2495                             cansave = 1
2496                         elif not target_version:
2497                             # not targets_version is true when the package is NEW
2498                             # we could just stick with the "...old version..." REJECT
2499                             # for this, I think.
2500                             self.rejects.append("Won't propogate NEW packages.")
2501                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2502                             # propogation would be redundant. no need to reject though.
2503                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2504                             cansave = 1
2505                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2506                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2507                             # propogate!!
2508                             self.warnings.append("Propogating upload to %s" % (addsuite))
2509                             self.pkg.changes.setdefault("propdistribution", {})
2510                             self.pkg.changes["propdistribution"][addsuite] = 1
2511                             cansave = 1
2512
2513                     if not cansave:
2514                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2515
2516     ################################################################################
2517     def check_binary_against_db(self, filename, session):
2518         # Ensure version is sane
2519         self.cross_suite_version_check( \
2520             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2521                 self.pkg.files[filename]["architecture"], session),
2522             filename, self.pkg.files[filename]["version"], sourceful=False)
2523
2524         # Check for any existing copies of the file
2525         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2526         q = q.filter_by(version=self.pkg.files[filename]["version"])
2527         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2528
2529         if q.count() > 0:
2530             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2531
2532     ################################################################################
2533
2534     def check_source_against_db(self, filename, session):
2535         source = self.pkg.dsc.get("source")
2536         version = self.pkg.dsc.get("version")
2537
2538         # Ensure version is sane
2539         self.cross_suite_version_check( \
2540             get_suite_version_by_source(source, session), filename, version,
2541             sourceful=True)
2542
2543     ################################################################################
2544     def check_dsc_against_db(self, filename, session):
2545         """
2546
2547         @warning: NB: this function can remove entries from the 'files' index [if
2548          the orig tarball is a duplicate of the one in the archive]; if
2549          you're iterating over 'files' and call this function as part of
2550          the loop, be sure to add a check to the top of the loop to
2551          ensure you haven't just tried to dereference the deleted entry.
2552
2553         """
2554
2555         Cnf = Config()
2556         self.pkg.orig_files = {} # XXX: do we need to clear it?
2557         orig_files = self.pkg.orig_files
2558
2559         # Try and find all files mentioned in the .dsc.  This has
2560         # to work harder to cope with the multiple possible
2561         # locations of an .orig.tar.gz.
2562         # The ordering on the select is needed to pick the newest orig
2563         # when it exists in multiple places.
2564         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2565             found = None
2566             if self.pkg.files.has_key(dsc_name):
2567                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2568                 actual_size = int(self.pkg.files[dsc_name]["size"])
2569                 found = "%s in incoming" % (dsc_name)
2570
2571                 # Check the file does not already exist in the archive
2572                 ql = get_poolfile_like_name(dsc_name, session)
2573
2574                 # Strip out anything that isn't '%s' or '/%s$'
2575                 for i in ql:
2576                     if not i.filename.endswith(dsc_name):
2577                         ql.remove(i)
2578
2579                 # "[dak] has not broken them.  [dak] has fixed a
2580                 # brokenness.  Your crappy hack exploited a bug in
2581                 # the old dinstall.
2582                 #
2583                 # "(Come on!  I thought it was always obvious that
2584                 # one just doesn't release different files with
2585                 # the same name and version.)"
2586                 #                        -- ajk@ on d-devel@l.d.o
2587
2588                 if len(ql) > 0:
2589                     # Ignore exact matches for .orig.tar.gz
2590                     match = 0
2591                     if re_is_orig_source.match(dsc_name):
2592                         for i in ql:
2593                             if self.pkg.files.has_key(dsc_name) and \
2594                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2595                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2596                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2597                                 # TODO: Don't delete the entry, just mark it as not needed
2598                                 # This would fix the stupidity of changing something we often iterate over
2599                                 # whilst we're doing it
2600                                 del self.pkg.files[dsc_name]
2601                                 dsc_entry["files id"] = i.file_id
2602                                 if not orig_files.has_key(dsc_name):
2603                                     orig_files[dsc_name] = {}
2604                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2605                                 match = 1
2606
2607                                 # Don't bitch that we couldn't find this file later
2608                                 try:
2609                                     self.later_check_files.remove(dsc_name)
2610                                 except ValueError:
2611                                     pass
2612
2613
2614                     if not match:
2615                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2616
2617             elif re_is_orig_source.match(dsc_name):
2618                 # Check in the pool
2619                 ql = get_poolfile_like_name(dsc_name, session)
2620
2621                 # Strip out anything that isn't '%s' or '/%s$'
2622                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2623                 for i in ql:
2624                     if not i.filename.endswith(dsc_name):
2625                         ql.remove(i)
2626
2627                 if len(ql) > 0:
2628                     # Unfortunately, we may get more than one match here if,
2629                     # for example, the package was in potato but had an -sa
2630                     # upload in woody.  So we need to choose the right one.
2631
2632                     # default to something sane in case we don't match any or have only one
2633                     x = ql[0]
2634
2635                     if len(ql) > 1:
2636                         for i in ql:
2637                             old_file = os.path.join(i.location.path, i.filename)
2638                             old_file_fh = utils.open_file(old_file)
2639                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2640                             old_file_fh.close()
2641                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2642                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2643                                 x = i
2644
2645                     old_file = os.path.join(i.location.path, i.filename)
2646                     old_file_fh = utils.open_file(old_file)
2647                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2648                     old_file_fh.close()
2649                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2650                     found = old_file
2651                     suite_type = x.location.archive_type
2652                     # need this for updating dsc_files in install()
2653                     dsc_entry["files id"] = x.file_id
2654                     # See install() in process-accepted...
2655                     if not orig_files.has_key(dsc_name):
2656                         orig_files[dsc_name] = {}
2657                     orig_files[dsc_name]["id"] = x.file_id
2658                     orig_files[dsc_name]["path"] = old_file
2659                     orig_files[dsc_name]["location"] = x.location.location_id
2660                 else:
2661                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2662                     # Not there? Check the queue directories...
2663                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2664                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2665                             continue
2666                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2667                         if os.path.exists(in_otherdir):
2668                             in_otherdir_fh = utils.open_file(in_otherdir)
2669                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2670                             in_otherdir_fh.close()
2671                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2672                             found = in_otherdir
2673                             if not orig_files.has_key(dsc_name):
2674                                 orig_files[dsc_name] = {}
2675                             orig_files[dsc_name]["path"] = in_otherdir
2676
2677                     if not found:
2678                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2679                         continue
2680             else:
2681                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2682                 continue
2683             if actual_md5 != dsc_entry["md5sum"]:
2684                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2685             if actual_size != int(dsc_entry["size"]):
2686                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2687
2688     ################################################################################
2689     # This is used by process-new and process-holding to recheck a changes file
2690     # at the time we're running.  It mainly wraps various other internal functions
2691     # and is similar to accepted_checks - these should probably be tidied up
2692     # and combined
2693     def recheck(self, session):
2694         cnf = Config()
2695         for f in self.pkg.files.keys():
2696             # The .orig.tar.gz can disappear out from under us is it's a
2697             # duplicate of one in the archive.
2698             if not self.pkg.files.has_key(f):
2699                 continue
2700
2701             entry = self.pkg.files[f]
2702
2703             # Check that the source still exists
2704             if entry["type"] == "deb":
2705                 source_version = entry["source version"]
2706                 source_package = entry["source package"]
2707                 if not self.pkg.changes["architecture"].has_key("source") \
2708                    and not source_exists(source_package, source_version, \
2709                     suites = self.pkg.changes["distribution"].keys(), session = session):
2710                     source_epochless_version = re_no_epoch.sub('', source_version)
2711                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2712                     found = False
2713                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2714                         if cnf.has_key("Dir::Queue::%s" % (q)):
2715                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2716                                 found = True
2717                     if not found:
2718                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2719
2720             # Version and file overwrite checks
2721             if entry["type"] == "deb":
2722                 self.check_binary_against_db(f, session)
2723             elif entry["type"] == "dsc":
2724                 self.check_source_against_db(f, session)
2725                 self.check_dsc_against_db(f, session)
2726
2727     ################################################################################
2728     def accepted_checks(self, overwrite_checks, session):
2729         # Recheck anything that relies on the database; since that's not
2730         # frozen between accept and our run time when called from p-a.
2731
2732         # overwrite_checks is set to False when installing to stable/oldstable
2733
2734         propogate={}
2735         nopropogate={}
2736
2737         # Find the .dsc (again)
2738         dsc_filename = None
2739         for f in self.pkg.files.keys():
2740             if self.pkg.files[f]["type"] == "dsc":
2741                 dsc_filename = f
2742
2743         for checkfile in self.pkg.files.keys():
2744             # The .orig.tar.gz can disappear out from under us is it's a
2745             # duplicate of one in the archive.
2746             if not self.pkg.files.has_key(checkfile):
2747                 continue
2748
2749             entry = self.pkg.files[checkfile]
2750
2751             # Check that the source still exists
2752             if entry["type"] == "deb":
2753                 source_version = entry["source version"]
2754                 source_package = entry["source package"]
2755                 if not self.pkg.changes["architecture"].has_key("source") \
2756                    and not source_exists(source_package, source_version, \
2757                     suites = self.pkg.changes["distribution"].keys(), \
2758                     session = session):
2759                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2760
2761             # Version and file overwrite checks
2762             if overwrite_checks:
2763                 if entry["type"] == "deb":
2764                     self.check_binary_against_db(checkfile, session)
2765                 elif entry["type"] == "dsc":
2766                     self.check_source_against_db(checkfile, session)
2767                     self.check_dsc_against_db(dsc_filename, session)
2768
2769             # propogate in the case it is in the override tables:
2770             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2771                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2772                     propogate[suite] = 1
2773                 else:
2774                     nopropogate[suite] = 1
2775
2776         for suite in propogate.keys():
2777             if suite in nopropogate:
2778                 continue
2779             self.pkg.changes["distribution"][suite] = 1
2780
2781         for checkfile in self.pkg.files.keys():
2782             # Check the package is still in the override tables
2783             for suite in self.pkg.changes["distribution"].keys():
2784                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2785                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2786
2787     ################################################################################
2788     # If any file of an upload has a recent mtime then chances are good
2789     # the file is still being uploaded.
2790
2791     def upload_too_new(self):
2792         cnf = Config()
2793         too_new = False
2794         # Move back to the original directory to get accurate time stamps
2795         cwd = os.getcwd()
2796         os.chdir(self.pkg.directory)
2797         file_list = self.pkg.files.keys()
2798         file_list.extend(self.pkg.dsc_files.keys())
2799         file_list.append(self.pkg.changes_file)
2800         for f in file_list:
2801             try:
2802                 last_modified = time.time()-os.path.getmtime(f)
2803                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2804                     too_new = True
2805                     break
2806             except:
2807                 pass
2808
2809         os.chdir(cwd)
2810         return too_new
2811
2812     def store_changelog(self):
2813
2814         # Skip binary-only upload if it is not a bin-NMU
2815         if not self.pkg.changes['architecture'].has_key('source'):
2816             from daklib.regexes import re_bin_only_nmu
2817             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2818                 return
2819
2820         session = DBConn().session()
2821
2822         # Check if upload already has a changelog entry
2823         query = """SELECT changelog_id FROM changes WHERE source = :source
2824                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2825         if session.execute(query, {'source': self.pkg.changes['source'], \
2826                                    'version': self.pkg.changes['version'], \
2827                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2828             session.commit()
2829             return
2830
2831         # Add current changelog text into changelogs_text table, return created ID
2832         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2833         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2834
2835         # Link ID to the upload available in changes table
2836         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2837                    AND version = :version AND architecture = :architecture"""
2838         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2839                                 'version': self.pkg.changes['version'], \
2840                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2841
2842         session.commit()