]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Merge branch 'master' into merge
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(filename, changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type filename: str
100     @param filename: changes filename
101
102     @type changes: Upload.Pkg.changes dict
103     @param changes: Changes dictionary
104
105     @type files: Upload.Pkg.files dict
106     @param files: Files dictionary
107
108     @type warn: bool
109     @param warn: Warn if overrides are added for (old)stable
110
111     @rtype: dict
112     @return: dictionary of NEW components.
113
114     """
115     # TODO: This should all use the database instead of parsing the changes
116     # file again
117     new = {}
118     byhand = {}
119
120     dbchg = get_dbchange(filename, session)
121     if dbchg is None:
122         print "Warning: cannot find changes file in database; won't check byhand"
123
124     # Build up a list of potentially new things
125     for name, f in files.items():
126         # Keep a record of byhand elements
127         if f["section"] == "byhand":
128             byhand[name] = 1
129             continue
130
131         pkg = f["package"]
132         priority = f["priority"]
133         section = f["section"]
134         file_type = get_type(f, session)
135         component = f["component"]
136
137         if file_type == "dsc":
138             priority = "source"
139
140         if not new.has_key(pkg):
141             new[pkg] = {}
142             new[pkg]["priority"] = priority
143             new[pkg]["section"] = section
144             new[pkg]["type"] = file_type
145             new[pkg]["component"] = component
146             new[pkg]["files"] = []
147         else:
148             old_type = new[pkg]["type"]
149             if old_type != file_type:
150                 # source gets trumped by deb or udeb
151                 if old_type == "dsc":
152                     new[pkg]["priority"] = priority
153                     new[pkg]["section"] = section
154                     new[pkg]["type"] = file_type
155                     new[pkg]["component"] = component
156
157         new[pkg]["files"].append(name)
158
159         if f.has_key("othercomponents"):
160             new[pkg]["othercomponents"] = f["othercomponents"]
161
162     # Fix up the list of target suites
163     cnf = Config()
164     for suite in changes["suite"].keys():
165         oldsuite = get_suite(suite, session)
166         if not oldsuite:
167             print "WARNING: Invalid suite %s found" % suite
168             continue
169
170         if oldsuite.overridesuite:
171             newsuite = get_suite(oldsuite.overridesuite, session)
172
173             if newsuite:
174                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
175                     oldsuite.overridesuite, suite)
176                 del changes["suite"][suite]
177                 changes["suite"][oldsuite.overridesuite] = 1
178             else:
179                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
180                     oldsuite.overridesuite, suite)
181
182     # Check for unprocessed byhand files
183     if dbchg is not None:
184         for b in byhand.keys():
185             # Find the file entry in the database
186             found = False
187             for f in dbchg.files:
188                 if f.filename == b:
189                     found = True
190                     # If it's processed, we can ignore it
191                     if f.processed:
192                         del byhand[b]
193                     break
194
195             if not found:
196                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
197
198     # Check for new stuff
199     for suite in changes["suite"].keys():
200         for pkg in new.keys():
201             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
202             if len(ql) > 0:
203                 for file_entry in new[pkg]["files"]:
204                     if files[file_entry].has_key("new"):
205                         del files[file_entry]["new"]
206                 del new[pkg]
207
208     if warn:
209         for s in ['stable', 'oldstable']:
210             if changes["suite"].has_key(s):
211                 print "WARNING: overrides will be added for %s!" % s
212         for pkg in new.keys():
213             if new[pkg].has_key("othercomponents"):
214                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
215
216     return new, byhand
217
218 ################################################################################
219
220 def check_valid(new, session = None):
221     """
222     Check if section and priority for NEW packages exist in database.
223     Additionally does sanity checks:
224       - debian-installer packages have to be udeb (or source)
225       - non debian-installer packages can not be udeb
226       - source priority can only be assigned to dsc file types
227
228     @type new: dict
229     @param new: Dict of new packages with their section, priority and type.
230
231     """
232     for pkg in new.keys():
233         section_name = new[pkg]["section"]
234         priority_name = new[pkg]["priority"]
235         file_type = new[pkg]["type"]
236
237         section = get_section(section_name, session)
238         if section is None:
239             new[pkg]["section id"] = -1
240         else:
241             new[pkg]["section id"] = section.section_id
242
243         priority = get_priority(priority_name, session)
244         if priority is None:
245             new[pkg]["priority id"] = -1
246         else:
247             new[pkg]["priority id"] = priority.priority_id
248
249         # Sanity checks
250         di = section_name.find("debian-installer") != -1
251
252         # If d-i, we must be udeb and vice-versa
253         if     (di and file_type not in ("udeb", "dsc")) or \
254            (not di and file_type == "udeb"):
255             new[pkg]["section id"] = -1
256
257         # If dsc we need to be source and vice-versa
258         if (priority == "source" and file_type != "dsc") or \
259            (priority != "source" and file_type == "dsc"):
260             new[pkg]["priority id"] = -1
261
262 ###############################################################################
263
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266     def __init__(self, future_cutoff, past_cutoff):
267         self.reset()
268         self.future_cutoff = future_cutoff
269         self.past_cutoff = past_cutoff
270
271     def reset(self):
272         self.future_files = {}
273         self.ancient_files = {}
274
275     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276         if MTime > self.future_cutoff:
277             self.future_files[Name] = MTime
278         if MTime < self.past_cutoff:
279             self.ancient_files[Name] = MTime
280
281 ###############################################################################
282
283 def prod_maintainer(notes, upload):
284     cnf = Config()
285
286     # Here we prepare an editor and get them ready to prod...
287     (fd, temp_filename) = utils.temp_filename()
288     temp_file = os.fdopen(fd, 'w')
289     for note in notes:
290         temp_file.write(note.comment)
291     temp_file.close()
292     editor = os.environ.get("EDITOR","vi")
293     answer = 'E'
294     while answer == 'E':
295         os.system("%s %s" % (editor, temp_filename))
296         temp_fh = utils.open_file(temp_filename)
297         prod_message = "".join(temp_fh.readlines())
298         temp_fh.close()
299         print "Prod message:"
300         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
301         prompt = "[P]rod, Edit, Abandon, Quit ?"
302         answer = "XXX"
303         while prompt.find(answer) == -1:
304             answer = utils.our_raw_input(prompt)
305             m = re_default_answer.search(prompt)
306             if answer == "":
307                 answer = m.group(1)
308             answer = answer[:1].upper()
309     os.unlink(temp_filename)
310     if answer == 'A':
311         return
312     elif answer == 'Q':
313         end()
314         sys.exit(0)
315     # Otherwise, do the proding...
316     user_email_address = utils.whoami() + " <%s>" % (
317         cnf["Dinstall::MyAdminAddress"])
318
319     Subst = upload.Subst
320
321     Subst["__FROM_ADDRESS__"] = user_email_address
322     Subst["__PROD_MESSAGE__"] = prod_message
323     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
324
325     prod_mail_message = utils.TemplateSubst(
326         Subst,cnf["Dir::Templates"]+"/process-new.prod")
327
328     # Send the prod mail
329     utils.send_mail(prod_mail_message)
330
331     print "Sent prodding message"
332
333 ################################################################################
334
335 def edit_note(note, upload, session, trainee=False):
336     # Write the current data to a temporary file
337     (fd, temp_filename) = utils.temp_filename()
338     editor = os.environ.get("EDITOR","vi")
339     answer = 'E'
340     while answer == 'E':
341         os.system("%s %s" % (editor, temp_filename))
342         temp_file = utils.open_file(temp_filename)
343         newnote = temp_file.read().rstrip()
344         temp_file.close()
345         print "New Note:"
346         print utils.prefix_multi_line_string(newnote,"  ")
347         prompt = "[D]one, Edit, Abandon, Quit ?"
348         answer = "XXX"
349         while prompt.find(answer) == -1:
350             answer = utils.our_raw_input(prompt)
351             m = re_default_answer.search(prompt)
352             if answer == "":
353                 answer = m.group(1)
354             answer = answer[:1].upper()
355     os.unlink(temp_filename)
356     if answer == 'A':
357         return
358     elif answer == 'Q':
359         end()
360         sys.exit(0)
361
362     comment = NewComment()
363     comment.package = upload.pkg.changes["source"]
364     comment.version = upload.pkg.changes["version"]
365     comment.comment = newnote
366     comment.author  = utils.whoami()
367     comment.trainee = trainee
368     session.add(comment)
369     session.commit()
370
371 ###############################################################################
372
373 # suite names DMs can upload to
374 dm_suites = ['unstable', 'experimental']
375
376 def get_newest_source(source, session):
377     'returns the newest DBSource object in dm_suites'
378     ## the most recent version of the package uploaded to unstable or
379     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
380     ## section of its control file
381     q = session.query(DBSource).filter_by(source = source). \
382         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
383         order_by(desc('source.version'))
384     return q.first()
385
386 def get_suite_version(source, session):
387     'returns a list of tuples (suite_name, version) for source package'
388     q = session.query(Suite.suite_name, DBSource.version). \
389         join(Suite.sources).filter_by(source = source)
390     return q.all()
391
392 class Upload(object):
393     """
394     Everything that has to do with an upload processed.
395
396     """
397     def __init__(self):
398         self.logger = None
399         self.pkg = Changes()
400         self.reset()
401
402     ###########################################################################
403
404     def reset (self):
405         """ Reset a number of internal variables."""
406
407         # Initialize the substitution template map
408         cnf = Config()
409         self.Subst = {}
410         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
411         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
412         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
413         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
414
415         self.rejects = []
416         self.warnings = []
417         self.notes = []
418
419         self.later_check_files = []
420
421         self.pkg.reset()
422
423     def package_info(self):
424         """
425         Format various messages from this Upload to send to the maintainer.
426         """
427
428         msgs = (
429             ('Reject Reasons', self.rejects),
430             ('Warnings', self.warnings),
431             ('Notes', self.notes),
432         )
433
434         msg = ''
435         for title, messages in msgs:
436             if messages:
437                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
438         msg += '\n\n'
439
440         return msg
441
442     ###########################################################################
443     def update_subst(self):
444         """ Set up the per-package template substitution mappings """
445
446         cnf = Config()
447
448         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
449         if not self.pkg.changes.has_key("architecture") or not \
450            isinstance(self.pkg.changes["architecture"], dict):
451             self.pkg.changes["architecture"] = { "Unknown" : "" }
452
453         # and maintainer2047 may not exist.
454         if not self.pkg.changes.has_key("maintainer2047"):
455             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
456
457         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
458         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
459         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
460
461         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
462         if self.pkg.changes["architecture"].has_key("source") and \
463            self.pkg.changes["changedby822"] != "" and \
464            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
465
466             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
467             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
468             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
469         else:
470             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
471             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
472             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
473
474         # Process policy doesn't set the fingerprint field and I don't want to make it
475         # do it for now as I don't want to have to deal with the case where we accepted
476         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
477         # the meantime so the package will be remarked as rejectable.  Urgh.
478         # TODO: Fix this properly
479         if self.pkg.changes.has_key('fingerprint'):
480             session = DBConn().session()
481             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
482             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
483                 if self.pkg.changes.has_key("sponsoremail"):
484                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
485             session.close()
486
487         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
488             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
489
490         # Apply any global override of the Maintainer field
491         if cnf.get("Dinstall::OverrideMaintainer"):
492             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
493             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
494
495         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
496         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
497         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
498         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
499
500     ###########################################################################
501     def load_changes(self, filename):
502         """
503         Load a changes file and setup a dictionary around it. Also checks for mandantory
504         fields  within.
505
506         @type filename: string
507         @param filename: Changes filename, full path.
508
509         @rtype: boolean
510         @return: whether the changes file was valid or not.  We may want to
511                  reject even if this is True (see what gets put in self.rejects).
512                  This is simply to prevent us even trying things later which will
513                  fail because we couldn't properly parse the file.
514         """
515         Cnf = Config()
516         self.pkg.changes_file = filename
517
518         # Parse the .changes field into a dictionary
519         try:
520             self.pkg.changes.update(parse_changes(filename))
521         except CantOpenError:
522             self.rejects.append("%s: can't read file." % (filename))
523             return False
524         except ParseChangesError, line:
525             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
526             return False
527         except ChangesUnicodeError:
528             self.rejects.append("%s: changes file not proper utf-8" % (filename))
529             return False
530
531         # Parse the Files field from the .changes into another dictionary
532         try:
533             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
534         except ParseChangesError, line:
535             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
536             return False
537         except UnknownFormatError, format:
538             self.rejects.append("%s: unknown format '%s'." % (filename, format))
539             return False
540
541         # Check for mandatory fields
542         for i in ("distribution", "source", "binary", "architecture",
543                   "version", "maintainer", "files", "changes", "description"):
544             if not self.pkg.changes.has_key(i):
545                 # Avoid undefined errors later
546                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
547                 return False
548
549         # Strip a source version in brackets from the source field
550         if re_strip_srcver.search(self.pkg.changes["source"]):
551             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
552
553         # Ensure the source field is a valid package name.
554         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
555             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
556
557         # Split multi-value fields into a lower-level dictionary
558         for i in ("architecture", "distribution", "binary", "closes"):
559             o = self.pkg.changes.get(i, "")
560             if o != "":
561                 del self.pkg.changes[i]
562
563             self.pkg.changes[i] = {}
564
565             for j in o.split():
566                 self.pkg.changes[i][j] = 1
567
568         # Fix the Maintainer: field to be RFC822/2047 compatible
569         try:
570             (self.pkg.changes["maintainer822"],
571              self.pkg.changes["maintainer2047"],
572              self.pkg.changes["maintainername"],
573              self.pkg.changes["maintaineremail"]) = \
574                    fix_maintainer (self.pkg.changes["maintainer"])
575         except ParseMaintError, msg:
576             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
577                    % (filename, self.pkg.changes["maintainer"], msg))
578
579         # ...likewise for the Changed-By: field if it exists.
580         try:
581             (self.pkg.changes["changedby822"],
582              self.pkg.changes["changedby2047"],
583              self.pkg.changes["changedbyname"],
584              self.pkg.changes["changedbyemail"]) = \
585                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
586         except ParseMaintError, msg:
587             self.pkg.changes["changedby822"] = ""
588             self.pkg.changes["changedby2047"] = ""
589             self.pkg.changes["changedbyname"] = ""
590             self.pkg.changes["changedbyemail"] = ""
591
592             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
593                    % (filename, self.pkg.changes["changed-by"], msg))
594
595         # Ensure all the values in Closes: are numbers
596         if self.pkg.changes.has_key("closes"):
597             for i in self.pkg.changes["closes"].keys():
598                 if re_isanum.match (i) == None:
599                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
600
601         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
602         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
603         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
604
605         # Check the .changes is non-empty
606         if not self.pkg.files:
607             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
608             return False
609
610         # Changes was syntactically valid even if we'll reject
611         return True
612
613     ###########################################################################
614
615     def check_distributions(self):
616         "Check and map the Distribution field"
617
618         Cnf = Config()
619
620         # Handle suite mappings
621         for m in Cnf.ValueList("SuiteMappings"):
622             args = m.split()
623             mtype = args[0]
624             if mtype == "map" or mtype == "silent-map":
625                 (source, dest) = args[1:3]
626                 if self.pkg.changes["distribution"].has_key(source):
627                     del self.pkg.changes["distribution"][source]
628                     self.pkg.changes["distribution"][dest] = 1
629                     if mtype != "silent-map":
630                         self.notes.append("Mapping %s to %s." % (source, dest))
631                 if self.pkg.changes.has_key("distribution-version"):
632                     if self.pkg.changes["distribution-version"].has_key(source):
633                         self.pkg.changes["distribution-version"][source]=dest
634             elif mtype == "map-unreleased":
635                 (source, dest) = args[1:3]
636                 if self.pkg.changes["distribution"].has_key(source):
637                     for arch in self.pkg.changes["architecture"].keys():
638                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
639                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
640                             del self.pkg.changes["distribution"][source]
641                             self.pkg.changes["distribution"][dest] = 1
642                             break
643             elif mtype == "ignore":
644                 suite = args[1]
645                 if self.pkg.changes["distribution"].has_key(suite):
646                     del self.pkg.changes["distribution"][suite]
647                     self.warnings.append("Ignoring %s as a target suite." % (suite))
648             elif mtype == "reject":
649                 suite = args[1]
650                 if self.pkg.changes["distribution"].has_key(suite):
651                     self.rejects.append("Uploads to %s are not accepted." % (suite))
652             elif mtype == "propup-version":
653                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
654                 #
655                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
656                 if self.pkg.changes["distribution"].has_key(args[1]):
657                     self.pkg.changes.setdefault("distribution-version", {})
658                     for suite in args[2:]:
659                         self.pkg.changes["distribution-version"][suite] = suite
660
661         # Ensure there is (still) a target distribution
662         if len(self.pkg.changes["distribution"].keys()) < 1:
663             self.rejects.append("No valid distribution remaining.")
664
665         # Ensure target distributions exist
666         for suite in self.pkg.changes["distribution"].keys():
667             if not Cnf.has_key("Suite::%s" % (suite)):
668                 self.rejects.append("Unknown distribution `%s'." % (suite))
669
670     ###########################################################################
671
672     def binary_file_checks(self, f, session):
673         cnf = Config()
674         entry = self.pkg.files[f]
675
676         # Extract package control information
677         deb_file = utils.open_file(f)
678         try:
679             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
680         except:
681             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
682             deb_file.close()
683             # Can't continue, none of the checks on control would work.
684             return
685
686         # Check for mandantory "Description:"
687         deb_file.seek(0)
688         try:
689             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
690         except:
691             self.rejects.append("%s: Missing Description in binary package" % (f))
692             return
693
694         deb_file.close()
695
696         # Check for mandatory fields
697         for field in [ "Package", "Architecture", "Version" ]:
698             if control.Find(field) == None:
699                 # Can't continue
700                 self.rejects.append("%s: No %s field in control." % (f, field))
701                 return
702
703         # Ensure the package name matches the one give in the .changes
704         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
705             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
706
707         # Validate the package field
708         package = control.Find("Package")
709         if not re_valid_pkg_name.match(package):
710             self.rejects.append("%s: invalid package name '%s'." % (f, package))
711
712         # Validate the version field
713         version = control.Find("Version")
714         if not re_valid_version.match(version):
715             self.rejects.append("%s: invalid version number '%s'." % (f, version))
716
717         # Ensure the architecture of the .deb is one we know about.
718         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
719         architecture = control.Find("Architecture")
720         upload_suite = self.pkg.changes["distribution"].keys()[0]
721
722         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
723             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
724             self.rejects.append("Unknown architecture '%s'." % (architecture))
725
726         # Ensure the architecture of the .deb is one of the ones
727         # listed in the .changes.
728         if not self.pkg.changes["architecture"].has_key(architecture):
729             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
730
731         # Sanity-check the Depends field
732         depends = control.Find("Depends")
733         if depends == '':
734             self.rejects.append("%s: Depends field is empty." % (f))
735
736         # Sanity-check the Provides field
737         provides = control.Find("Provides")
738         if provides:
739             provide = re_spacestrip.sub('', provides)
740             if provide == '':
741                 self.rejects.append("%s: Provides field is empty." % (f))
742             prov_list = provide.split(",")
743             for prov in prov_list:
744                 if not re_valid_pkg_name.match(prov):
745                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
746
747         # Check the section & priority match those given in the .changes (non-fatal)
748         if     control.Find("Section") and entry["section"] != "" \
749            and entry["section"] != control.Find("Section"):
750             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
751                                 (f, control.Find("Section", ""), entry["section"]))
752         if control.Find("Priority") and entry["priority"] != "" \
753            and entry["priority"] != control.Find("Priority"):
754             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
755                                 (f, control.Find("Priority", ""), entry["priority"]))
756
757         entry["package"] = package
758         entry["architecture"] = architecture
759         entry["version"] = version
760         entry["maintainer"] = control.Find("Maintainer", "")
761
762         if f.endswith(".udeb"):
763             self.pkg.files[f]["dbtype"] = "udeb"
764         elif f.endswith(".deb"):
765             self.pkg.files[f]["dbtype"] = "deb"
766         else:
767             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
768
769         entry["source"] = control.Find("Source", entry["package"])
770
771         # Get the source version
772         source = entry["source"]
773         source_version = ""
774
775         if source.find("(") != -1:
776             m = re_extract_src_version.match(source)
777             source = m.group(1)
778             source_version = m.group(2)
779
780         if not source_version:
781             source_version = self.pkg.files[f]["version"]
782
783         entry["source package"] = source
784         entry["source version"] = source_version
785
786         # Ensure the filename matches the contents of the .deb
787         m = re_isadeb.match(f)
788
789         #  package name
790         file_package = m.group(1)
791         if entry["package"] != file_package:
792             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
793                                 (f, file_package, entry["dbtype"], entry["package"]))
794         epochless_version = re_no_epoch.sub('', control.Find("Version"))
795
796         #  version
797         file_version = m.group(2)
798         if epochless_version != file_version:
799             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
800                                 (f, file_version, entry["dbtype"], epochless_version))
801
802         #  architecture
803         file_architecture = m.group(3)
804         if entry["architecture"] != file_architecture:
805             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
806                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
807
808         # Check for existent source
809         source_version = entry["source version"]
810         source_package = entry["source package"]
811         if self.pkg.changes["architecture"].has_key("source"):
812             if source_version != self.pkg.changes["version"]:
813                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
814                                     (source_version, f, self.pkg.changes["version"]))
815         else:
816             # Check in the SQL database
817             if not source_exists(source_package, source_version, suites = \
818                 self.pkg.changes["distribution"].keys(), session = session):
819                 # Check in one of the other directories
820                 source_epochless_version = re_no_epoch.sub('', source_version)
821                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
822                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
823                     entry["byhand"] = 1
824                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
825                     entry["new"] = 1
826                 else:
827                     dsc_file_exists = False
828                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
829                         if cnf.has_key("Dir::Queue::%s" % (myq)):
830                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
831                                 dsc_file_exists = True
832                                 break
833
834                     if not dsc_file_exists:
835                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
836
837         # Check the version and for file overwrites
838         self.check_binary_against_db(f, session)
839
840         # Temporarily disable contents generation until we change the table storage layout
841         #b = Binary(f)
842         #b.scan_package()
843         #if len(b.rejects) > 0:
844         #    for j in b.rejects:
845         #        self.rejects.append(j)
846
847     def source_file_checks(self, f, session):
848         entry = self.pkg.files[f]
849
850         m = re_issource.match(f)
851         if not m:
852             return
853
854         entry["package"] = m.group(1)
855         entry["version"] = m.group(2)
856         entry["type"] = m.group(3)
857
858         # Ensure the source package name matches the Source filed in the .changes
859         if self.pkg.changes["source"] != entry["package"]:
860             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
861
862         # Ensure the source version matches the version in the .changes file
863         if re_is_orig_source.match(f):
864             changes_version = self.pkg.changes["chopversion2"]
865         else:
866             changes_version = self.pkg.changes["chopversion"]
867
868         if changes_version != entry["version"]:
869             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
870
871         # Ensure the .changes lists source in the Architecture field
872         if not self.pkg.changes["architecture"].has_key("source"):
873             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
874
875         # Check the signature of a .dsc file
876         if entry["type"] == "dsc":
877             # check_signature returns either:
878             #  (None, [list, of, rejects]) or (signature, [])
879             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
880             for j in rejects:
881                 self.rejects.append(j)
882
883         entry["architecture"] = "source"
884
885     def per_suite_file_checks(self, f, suite, session):
886         cnf = Config()
887         entry = self.pkg.files[f]
888
889         # Skip byhand
890         if entry.has_key("byhand"):
891             return
892
893         # Check we have fields we need to do these checks
894         oktogo = True
895         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
896             if not entry.has_key(m):
897                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
898                 oktogo = False
899
900         if not oktogo:
901             return
902
903         # Handle component mappings
904         for m in cnf.ValueList("ComponentMappings"):
905             (source, dest) = m.split()
906             if entry["component"] == source:
907                 entry["original component"] = source
908                 entry["component"] = dest
909
910         # Ensure the component is valid for the target suite
911         if cnf.has_key("Suite:%s::Components" % (suite)) and \
912            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
913             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
914             return
915
916         # Validate the component
917         if not get_component(entry["component"], session):
918             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
919             return
920
921         # See if the package is NEW
922         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
923             entry["new"] = 1
924
925         # Validate the priority
926         if entry["priority"].find('/') != -1:
927             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
928
929         # Determine the location
930         location = cnf["Dir::Pool"]
931         l = get_location(location, entry["component"], session=session)
932         if l is None:
933             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
934             entry["location id"] = -1
935         else:
936             entry["location id"] = l.location_id
937
938         # Check the md5sum & size against existing files (if any)
939         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
940
941         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
942                                          entry["size"], entry["md5sum"], entry["location id"])
943
944         if found is None:
945             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
946         elif found is False and poolfile is not None:
947             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
948         else:
949             if poolfile is None:
950                 entry["files id"] = None
951             else:
952                 entry["files id"] = poolfile.file_id
953
954         # Check for packages that have moved from one component to another
955         entry['suite'] = suite
956         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
957         if res.rowcount > 0:
958             entry["othercomponents"] = res.fetchone()[0]
959
960     def check_files(self, action=True):
961         file_keys = self.pkg.files.keys()
962         holding = Holding()
963         cnf = Config()
964
965         if action:
966             cwd = os.getcwd()
967             os.chdir(self.pkg.directory)
968             for f in file_keys:
969                 ret = holding.copy_to_holding(f)
970                 if ret is not None:
971                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
972
973             os.chdir(cwd)
974
975         # check we already know the changes file
976         # [NB: this check must be done post-suite mapping]
977         base_filename = os.path.basename(self.pkg.changes_file)
978
979         session = DBConn().session()
980
981         try:
982             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
983             # if in the pool or in a queue other than unchecked, reject
984             if (dbc.in_queue is None) \
985                    or (dbc.in_queue is not None
986                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
987                 self.rejects.append("%s file already known to dak" % base_filename)
988         except NoResultFound, e:
989             # not known, good
990             pass
991
992         has_binaries = False
993         has_source = False
994
995         for f, entry in self.pkg.files.items():
996             # Ensure the file does not already exist in one of the accepted directories
997             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
998                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
999                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1000                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1001
1002             if not re_taint_free.match(f):
1003                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1004
1005             # Check the file is readable
1006             if os.access(f, os.R_OK) == 0:
1007                 # When running in -n, copy_to_holding() won't have
1008                 # generated the reject_message, so we need to.
1009                 if action:
1010                     if os.path.exists(f):
1011                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1012                     else:
1013                         # Don't directly reject, mark to check later to deal with orig's
1014                         # we can find in the pool
1015                         self.later_check_files.append(f)
1016                 entry["type"] = "unreadable"
1017                 continue
1018
1019             # If it's byhand skip remaining checks
1020             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1021                 entry["byhand"] = 1
1022                 entry["type"] = "byhand"
1023
1024             # Checks for a binary package...
1025             elif re_isadeb.match(f):
1026                 has_binaries = True
1027                 entry["type"] = "deb"
1028
1029                 # This routine appends to self.rejects/warnings as appropriate
1030                 self.binary_file_checks(f, session)
1031
1032             # Checks for a source package...
1033             elif re_issource.match(f):
1034                 has_source = True
1035
1036                 # This routine appends to self.rejects/warnings as appropriate
1037                 self.source_file_checks(f, session)
1038
1039             # Not a binary or source package?  Assume byhand...
1040             else:
1041                 entry["byhand"] = 1
1042                 entry["type"] = "byhand"
1043
1044             # Per-suite file checks
1045             entry["oldfiles"] = {}
1046             for suite in self.pkg.changes["distribution"].keys():
1047                 self.per_suite_file_checks(f, suite, session)
1048
1049         session.close()
1050
1051         # If the .changes file says it has source, it must have source.
1052         if self.pkg.changes["architecture"].has_key("source"):
1053             if not has_source:
1054                 self.rejects.append("no source found and Architecture line in changes mention source.")
1055
1056             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1057                 self.rejects.append("source only uploads are not supported.")
1058
1059     ###########################################################################
1060     def check_dsc(self, action=True, session=None):
1061         """Returns bool indicating whether or not the source changes are valid"""
1062         # Ensure there is source to check
1063         if not self.pkg.changes["architecture"].has_key("source"):
1064             return True
1065
1066         # Find the .dsc
1067         dsc_filename = None
1068         for f, entry in self.pkg.files.items():
1069             if entry["type"] == "dsc":
1070                 if dsc_filename:
1071                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1072                     return False
1073                 else:
1074                     dsc_filename = f
1075
1076         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1077         if not dsc_filename:
1078             self.rejects.append("source uploads must contain a dsc file")
1079             return False
1080
1081         # Parse the .dsc file
1082         try:
1083             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1084         except CantOpenError:
1085             # if not -n copy_to_holding() will have done this for us...
1086             if not action:
1087                 self.rejects.append("%s: can't read file." % (dsc_filename))
1088         except ParseChangesError, line:
1089             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1090         except InvalidDscError, line:
1091             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1092         except ChangesUnicodeError:
1093             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1094
1095         # Build up the file list of files mentioned by the .dsc
1096         try:
1097             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1098         except NoFilesFieldError:
1099             self.rejects.append("%s: no Files: field." % (dsc_filename))
1100             return False
1101         except UnknownFormatError, format:
1102             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1103             return False
1104         except ParseChangesError, line:
1105             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1106             return False
1107
1108         # Enforce mandatory fields
1109         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1110             if not self.pkg.dsc.has_key(i):
1111                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1112                 return False
1113
1114         # Validate the source and version fields
1115         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1116             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1117         if not re_valid_version.match(self.pkg.dsc["version"]):
1118             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1119
1120         # Only a limited list of source formats are allowed in each suite
1121         for dist in self.pkg.changes["distribution"].keys():
1122             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1123             if self.pkg.dsc["format"] not in allowed:
1124                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1125
1126         # Validate the Maintainer field
1127         try:
1128             # We ignore the return value
1129             fix_maintainer(self.pkg.dsc["maintainer"])
1130         except ParseMaintError, msg:
1131             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1132                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1133
1134         # Validate the build-depends field(s)
1135         for field_name in [ "build-depends", "build-depends-indep" ]:
1136             field = self.pkg.dsc.get(field_name)
1137             if field:
1138                 # Have apt try to parse them...
1139                 try:
1140                     apt_pkg.ParseSrcDepends(field)
1141                 except:
1142                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1143
1144         # Ensure the version number in the .dsc matches the version number in the .changes
1145         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1146         changes_version = self.pkg.files[dsc_filename]["version"]
1147
1148         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1149             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1150
1151         # Ensure the Files field contain only what's expected
1152         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1153
1154         # Ensure source is newer than existing source in target suites
1155         session = DBConn().session()
1156         self.check_source_against_db(dsc_filename, session)
1157         self.check_dsc_against_db(dsc_filename, session)
1158
1159         dbchg = get_dbchange(self.pkg.changes_file, session)
1160
1161         # Finally, check if we're missing any files
1162         for f in self.later_check_files:
1163             print 'XXX: %s' % f
1164             # Check if we've already processed this file if we have a dbchg object
1165             ok = False
1166             if dbchg:
1167                 for pf in dbchg.files:
1168                     if pf.filename == f and pf.processed:
1169                         self.notes.append('%s was already processed so we can go ahead' % f)
1170                         ok = True
1171                         del self.pkg.files[f]
1172             if not ok:
1173                 self.rejects.append("Could not find file %s references in changes" % f)
1174
1175         session.close()
1176
1177         return True
1178
1179     ###########################################################################
1180
1181     def get_changelog_versions(self, source_dir):
1182         """Extracts a the source package and (optionally) grabs the
1183         version history out of debian/changelog for the BTS."""
1184
1185         cnf = Config()
1186
1187         # Find the .dsc (again)
1188         dsc_filename = None
1189         for f in self.pkg.files.keys():
1190             if self.pkg.files[f]["type"] == "dsc":
1191                 dsc_filename = f
1192
1193         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1194         if not dsc_filename:
1195             return
1196
1197         # Create a symlink mirror of the source files in our temporary directory
1198         for f in self.pkg.files.keys():
1199             m = re_issource.match(f)
1200             if m:
1201                 src = os.path.join(source_dir, f)
1202                 # If a file is missing for whatever reason, give up.
1203                 if not os.path.exists(src):
1204                     return
1205                 ftype = m.group(3)
1206                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1207                    self.pkg.orig_files[f].has_key("path"):
1208                     continue
1209                 dest = os.path.join(os.getcwd(), f)
1210                 os.symlink(src, dest)
1211
1212         # If the orig files are not a part of the upload, create symlinks to the
1213         # existing copies.
1214         for orig_file in self.pkg.orig_files.keys():
1215             if not self.pkg.orig_files[orig_file].has_key("path"):
1216                 continue
1217             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1218             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1219
1220         # Extract the source
1221         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1222         (result, output) = commands.getstatusoutput(cmd)
1223         if (result != 0):
1224             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1225             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1226             return
1227
1228         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1229             return
1230
1231         # Get the upstream version
1232         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1233         if re_strip_revision.search(upstr_version):
1234             upstr_version = re_strip_revision.sub('', upstr_version)
1235
1236         # Ensure the changelog file exists
1237         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1238         if not os.path.exists(changelog_filename):
1239             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1240             return
1241
1242         # Parse the changelog
1243         self.pkg.dsc["bts changelog"] = ""
1244         changelog_file = utils.open_file(changelog_filename)
1245         for line in changelog_file.readlines():
1246             m = re_changelog_versions.match(line)
1247             if m:
1248                 self.pkg.dsc["bts changelog"] += line
1249         changelog_file.close()
1250
1251         # Check we found at least one revision in the changelog
1252         if not self.pkg.dsc["bts changelog"]:
1253             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1254
1255     def check_source(self):
1256         # Bail out if:
1257         #    a) there's no source
1258         if not self.pkg.changes["architecture"].has_key("source"):
1259             return
1260
1261         tmpdir = utils.temp_dirname()
1262
1263         # Move into the temporary directory
1264         cwd = os.getcwd()
1265         os.chdir(tmpdir)
1266
1267         # Get the changelog version history
1268         self.get_changelog_versions(cwd)
1269
1270         # Move back and cleanup the temporary tree
1271         os.chdir(cwd)
1272
1273         try:
1274             shutil.rmtree(tmpdir)
1275         except OSError, e:
1276             if e.errno != errno.EACCES:
1277                 print "foobar"
1278                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1279
1280             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1281             # We probably have u-r or u-w directories so chmod everything
1282             # and try again.
1283             cmd = "chmod -R u+rwx %s" % (tmpdir)
1284             result = os.system(cmd)
1285             if result != 0:
1286                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1287             shutil.rmtree(tmpdir)
1288         except Exception, e:
1289             print "foobar2 (%s)" % e
1290             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1291
1292     ###########################################################################
1293     def ensure_hashes(self):
1294         # Make sure we recognise the format of the Files: field in the .changes
1295         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1296         if len(format) == 2:
1297             format = int(format[0]), int(format[1])
1298         else:
1299             format = int(float(format[0])), 0
1300
1301         # We need to deal with the original changes blob, as the fields we need
1302         # might not be in the changes dict serialised into the .dak anymore.
1303         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1304
1305         # Copy the checksums over to the current changes dict.  This will keep
1306         # the existing modifications to it intact.
1307         for field in orig_changes:
1308             if field.startswith('checksums-'):
1309                 self.pkg.changes[field] = orig_changes[field]
1310
1311         # Check for unsupported hashes
1312         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1313             self.rejects.append(j)
1314
1315         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1316             self.rejects.append(j)
1317
1318         # We have to calculate the hash if we have an earlier changes version than
1319         # the hash appears in rather than require it exist in the changes file
1320         for hashname, hashfunc, version in utils.known_hashes:
1321             # TODO: Move _ensure_changes_hash into this class
1322             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1323                 self.rejects.append(j)
1324             if "source" in self.pkg.changes["architecture"]:
1325                 # TODO: Move _ensure_dsc_hash into this class
1326                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1327                     self.rejects.append(j)
1328
1329     def check_hashes(self):
1330         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1331             self.rejects.append(m)
1332
1333         for m in utils.check_size(".changes", self.pkg.files):
1334             self.rejects.append(m)
1335
1336         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1337             self.rejects.append(m)
1338
1339         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1340             self.rejects.append(m)
1341
1342         self.ensure_hashes()
1343
1344     ###########################################################################
1345
1346     def ensure_orig(self, target_dir='.', session=None):
1347         """
1348         Ensures that all orig files mentioned in the changes file are present
1349         in target_dir. If they do not exist, they are symlinked into place.
1350
1351         An list containing the symlinks that were created are returned (so they
1352         can be removed).
1353         """
1354
1355         symlinked = []
1356         cnf = Config()
1357
1358         for filename, entry in self.pkg.dsc_files.iteritems():
1359             if not re_is_orig_source.match(filename):
1360                 # File is not an orig; ignore
1361                 continue
1362
1363             if os.path.exists(filename):
1364                 # File exists, no need to continue
1365                 continue
1366
1367             def symlink_if_valid(path):
1368                 f = utils.open_file(path)
1369                 md5sum = apt_pkg.md5sum(f)
1370                 f.close()
1371
1372                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1373                 expected = (int(entry['size']), entry['md5sum'])
1374
1375                 if fingerprint != expected:
1376                     return False
1377
1378                 dest = os.path.join(target_dir, filename)
1379
1380                 os.symlink(path, dest)
1381                 symlinked.append(dest)
1382
1383                 return True
1384
1385             session_ = session
1386             if session is None:
1387                 session_ = DBConn().session()
1388
1389             found = False
1390
1391             # Look in the pool
1392             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1393                 poolfile_path = os.path.join(
1394                     poolfile.location.path, poolfile.filename
1395                 )
1396
1397                 if symlink_if_valid(poolfile_path):
1398                     found = True
1399                     break
1400
1401             if session is None:
1402                 session_.close()
1403
1404             if found:
1405                 continue
1406
1407             # Look in some other queues for the file
1408             queues = ('New', 'Byhand', 'ProposedUpdates',
1409                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1410
1411             for queue in queues:
1412                 if not cnf.get('Dir::Queue::%s' % queue):
1413                     continue
1414
1415                 queuefile_path = os.path.join(
1416                     cnf['Dir::Queue::%s' % queue], filename
1417                 )
1418
1419                 if not os.path.exists(queuefile_path):
1420                     # Does not exist in this queue
1421                     continue
1422
1423                 if symlink_if_valid(queuefile_path):
1424                     break
1425
1426         return symlinked
1427
1428     ###########################################################################
1429
1430     def check_lintian(self):
1431         """
1432         Extends self.rejects by checking the output of lintian against tags
1433         specified in Dinstall::LintianTags.
1434         """
1435
1436         cnf = Config()
1437
1438         # Don't reject binary uploads
1439         if not self.pkg.changes['architecture'].has_key('source'):
1440             return
1441
1442         # Only check some distributions
1443         for dist in ('unstable', 'experimental'):
1444             if dist in self.pkg.changes['distribution']:
1445                 break
1446         else:
1447             return
1448
1449         # If we do not have a tagfile, don't do anything
1450         tagfile = cnf.get("Dinstall::LintianTags")
1451         if tagfile is None:
1452             return
1453
1454         # Parse the yaml file
1455         sourcefile = file(tagfile, 'r')
1456         sourcecontent = sourcefile.read()
1457         sourcefile.close()
1458
1459         try:
1460             lintiantags = yaml.load(sourcecontent)['lintian']
1461         except yaml.YAMLError, msg:
1462             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1463             return
1464
1465         # Try and find all orig mentioned in the .dsc
1466         symlinked = self.ensure_orig()
1467
1468         # Setup the input file for lintian
1469         fd, temp_filename = utils.temp_filename()
1470         temptagfile = os.fdopen(fd, 'w')
1471         for tags in lintiantags.values():
1472             temptagfile.writelines(['%s\n' % x for x in tags])
1473         temptagfile.close()
1474
1475         try:
1476             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1477                 (temp_filename, self.pkg.changes_file)
1478
1479             result, output = commands.getstatusoutput(cmd)
1480         finally:
1481             # Remove our tempfile and any symlinks we created
1482             os.unlink(temp_filename)
1483
1484             for symlink in symlinked:
1485                 os.unlink(symlink)
1486
1487         if result == 2:
1488             utils.warn("lintian failed for %s [return code: %s]." % \
1489                 (self.pkg.changes_file, result))
1490             utils.warn(utils.prefix_multi_line_string(output, \
1491                 " [possible output:] "))
1492
1493         def log(*txt):
1494             if self.logger:
1495                 self.logger.log(
1496                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1497                 )
1498
1499         # Generate messages
1500         parsed_tags = parse_lintian_output(output)
1501         self.rejects.extend(
1502             generate_reject_messages(parsed_tags, lintiantags, log=log)
1503         )
1504
1505     ###########################################################################
1506     def check_urgency(self):
1507         cnf = Config()
1508         if self.pkg.changes["architecture"].has_key("source"):
1509             if not self.pkg.changes.has_key("urgency"):
1510                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1511             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1512             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1513                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1514                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1515                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1516
1517     ###########################################################################
1518
1519     # Sanity check the time stamps of files inside debs.
1520     # [Files in the near future cause ugly warnings and extreme time
1521     #  travel can cause errors on extraction]
1522
1523     def check_timestamps(self):
1524         Cnf = Config()
1525
1526         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1527         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1528         tar = TarTime(future_cutoff, past_cutoff)
1529
1530         for filename, entry in self.pkg.files.items():
1531             if entry["type"] == "deb":
1532                 tar.reset()
1533                 try:
1534                     deb_file = utils.open_file(filename)
1535                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1536                     deb_file.seek(0)
1537                     try:
1538                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1539                     except SystemError, e:
1540                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1541                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1542                             raise
1543                         deb_file.seek(0)
1544                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1545
1546                     deb_file.close()
1547
1548                     future_files = tar.future_files.keys()
1549                     if future_files:
1550                         num_future_files = len(future_files)
1551                         future_file = future_files[0]
1552                         future_date = tar.future_files[future_file]
1553                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1554                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1555
1556                     ancient_files = tar.ancient_files.keys()
1557                     if ancient_files:
1558                         num_ancient_files = len(ancient_files)
1559                         ancient_file = ancient_files[0]
1560                         ancient_date = tar.ancient_files[ancient_file]
1561                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1562                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1563                 except:
1564                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1565
1566     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1567         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1568             sponsored = False
1569         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1570             sponsored = False
1571             if uid_name == "":
1572                 sponsored = True
1573         else:
1574             sponsored = True
1575             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1576                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1577                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1578                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1579                         self.pkg.changes["sponsoremail"] = uid_email
1580
1581         return sponsored
1582
1583
1584     ###########################################################################
1585     # check_signed_by_key checks
1586     ###########################################################################
1587
1588     def check_signed_by_key(self):
1589         """Ensure the .changes is signed by an authorized uploader."""
1590         session = DBConn().session()
1591
1592         # First of all we check that the person has proper upload permissions
1593         # and that this upload isn't blocked
1594         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1595
1596         if fpr is None:
1597             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1598             return
1599
1600         # TODO: Check that import-keyring adds UIDs properly
1601         if not fpr.uid:
1602             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1603             return
1604
1605         # Check that the fingerprint which uploaded has permission to do so
1606         self.check_upload_permissions(fpr, session)
1607
1608         # Check that this package is not in a transition
1609         self.check_transition(session)
1610
1611         session.close()
1612
1613
1614     def check_upload_permissions(self, fpr, session):
1615         # Check any one-off upload blocks
1616         self.check_upload_blocks(fpr, session)
1617
1618         # Start with DM as a special case
1619         # DM is a special case unfortunately, so we check it first
1620         # (keys with no source access get more access than DMs in one
1621         #  way; DMs can only upload for their packages whether source
1622         #  or binary, whereas keys with no access might be able to
1623         #  upload some binaries)
1624         if fpr.source_acl.access_level == 'dm':
1625             self.check_dm_upload(fpr, session)
1626         else:
1627             # Check source-based permissions for other types
1628             if self.pkg.changes["architecture"].has_key("source") and \
1629                 fpr.source_acl.access_level is None:
1630                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1631                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1632                 self.rejects.append(rej)
1633                 return
1634             # If not a DM, we allow full upload rights
1635             uid_email = "%s@debian.org" % (fpr.uid.uid)
1636             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1637
1638
1639         # Check binary upload permissions
1640         # By this point we know that DMs can't have got here unless they
1641         # are allowed to deal with the package concerned so just apply
1642         # normal checks
1643         if fpr.binary_acl.access_level == 'full':
1644             return
1645
1646         # Otherwise we're in the map case
1647         tmparches = self.pkg.changes["architecture"].copy()
1648         tmparches.pop('source', None)
1649
1650         for bam in fpr.binary_acl_map:
1651             tmparches.pop(bam.architecture.arch_string, None)
1652
1653         if len(tmparches.keys()) > 0:
1654             if fpr.binary_reject:
1655                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1656                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1657                 self.rejects.append(rej)
1658             else:
1659                 # TODO: This is where we'll implement reject vs throw away binaries later
1660                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1661                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1662                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1663                 self.rejects.append(rej)
1664
1665
1666     def check_upload_blocks(self, fpr, session):
1667         """Check whether any upload blocks apply to this source, source
1668            version, uid / fpr combination"""
1669
1670         def block_rej_template(fb):
1671             rej = 'Manual upload block in place for package %s' % fb.source
1672             if fb.version is not None:
1673                 rej += ', version %s' % fb.version
1674             return rej
1675
1676         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1677             # version is None if the block applies to all versions
1678             if fb.version is None or fb.version == self.pkg.changes['version']:
1679                 # Check both fpr and uid - either is enough to cause a reject
1680                 if fb.fpr is not None:
1681                     if fb.fpr.fingerprint == fpr.fingerprint:
1682                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1683                 if fb.uid is not None:
1684                     if fb.uid == fpr.uid:
1685                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1686
1687
1688     def check_dm_upload(self, fpr, session):
1689         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1690         ## none of the uploaded packages are NEW
1691         rej = False
1692         for f in self.pkg.files.keys():
1693             if self.pkg.files[f].has_key("byhand"):
1694                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1695                 rej = True
1696             if self.pkg.files[f].has_key("new"):
1697                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1698                 rej = True
1699
1700         if rej:
1701             return
1702
1703         r = get_newest_source(self.pkg.changes["source"], session)
1704
1705         if r is None:
1706             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1707             self.rejects.append(rej)
1708             return
1709
1710         if not r.dm_upload_allowed:
1711             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1712             self.rejects.append(rej)
1713             return
1714
1715         ## the Maintainer: field of the uploaded .changes file corresponds with
1716         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1717         ## uploads)
1718         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1719             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1720
1721         ## the most recent version of the package uploaded to unstable or
1722         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1723         ## non-developer maintainers cannot NMU or hijack packages)
1724
1725         # srcuploaders includes the maintainer
1726         accept = False
1727         for sup in r.srcuploaders:
1728             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1729             # Eww - I hope we never have two people with the same name in Debian
1730             if email == fpr.uid.uid or name == fpr.uid.name:
1731                 accept = True
1732                 break
1733
1734         if not accept:
1735             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1736             return
1737
1738         ## none of the packages are being taken over from other source packages
1739         for b in self.pkg.changes["binary"].keys():
1740             for suite in self.pkg.changes["distribution"].keys():
1741                 q = session.query(DBSource)
1742                 q = q.join(DBBinary).filter_by(package=b)
1743                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1744
1745                 for s in q.all():
1746                     if s.source != self.pkg.changes["source"]:
1747                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1748
1749
1750
1751     def check_transition(self, session):
1752         cnf = Config()
1753
1754         sourcepkg = self.pkg.changes["source"]
1755
1756         # No sourceful upload -> no need to do anything else, direct return
1757         # We also work with unstable uploads, not experimental or those going to some
1758         # proposed-updates queue
1759         if "source" not in self.pkg.changes["architecture"] or \
1760            "unstable" not in self.pkg.changes["distribution"]:
1761             return
1762
1763         # Also only check if there is a file defined (and existant) with
1764         # checks.
1765         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1766         if transpath == "" or not os.path.exists(transpath):
1767             return
1768
1769         # Parse the yaml file
1770         sourcefile = file(transpath, 'r')
1771         sourcecontent = sourcefile.read()
1772         try:
1773             transitions = yaml.load(sourcecontent)
1774         except yaml.YAMLError, msg:
1775             # This shouldn't happen, there is a wrapper to edit the file which
1776             # checks it, but we prefer to be safe than ending up rejecting
1777             # everything.
1778             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1779             return
1780
1781         # Now look through all defined transitions
1782         for trans in transitions:
1783             t = transitions[trans]
1784             source = t["source"]
1785             expected = t["new"]
1786
1787             # Will be None if nothing is in testing.
1788             current = get_source_in_suite(source, "testing", session)
1789             if current is not None:
1790                 compare = apt_pkg.VersionCompare(current.version, expected)
1791
1792             if current is None or compare < 0:
1793                 # This is still valid, the current version in testing is older than
1794                 # the new version we wait for, or there is none in testing yet
1795
1796                 # Check if the source we look at is affected by this.
1797                 if sourcepkg in t['packages']:
1798                     # The source is affected, lets reject it.
1799
1800                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1801                         sourcepkg, trans)
1802
1803                     if current is not None:
1804                         currentlymsg = "at version %s" % (current.version)
1805                     else:
1806                         currentlymsg = "not present in testing"
1807
1808                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1809
1810                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1811 is part of a testing transition designed to get %s migrated (it is
1812 currently %s, we need version %s).  This transition is managed by the
1813 Release Team, and %s is the Release-Team member responsible for it.
1814 Please mail debian-release@lists.debian.org or contact %s directly if you
1815 need further assistance.  You might want to upload to experimental until this
1816 transition is done."""
1817                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1818
1819                     self.rejects.append(rejectmsg)
1820                     return
1821
1822     ###########################################################################
1823     # End check_signed_by_key checks
1824     ###########################################################################
1825
1826     def build_summaries(self):
1827         """ Build a summary of changes the upload introduces. """
1828
1829         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1830
1831         short_summary = summary
1832
1833         # This is for direport's benefit...
1834         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1835
1836         if byhand or new:
1837             summary += "Changes: " + f
1838
1839         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1840
1841         summary += self.announce(short_summary, 0)
1842
1843         return (summary, short_summary)
1844
1845     ###########################################################################
1846
1847     def close_bugs(self, summary, action):
1848         """
1849         Send mail to close bugs as instructed by the closes field in the changes file.
1850         Also add a line to summary if any work was done.
1851
1852         @type summary: string
1853         @param summary: summary text, as given by L{build_summaries}
1854
1855         @type action: bool
1856         @param action: Set to false no real action will be done.
1857
1858         @rtype: string
1859         @return: summary. If action was taken, extended by the list of closed bugs.
1860
1861         """
1862
1863         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1864
1865         bugs = self.pkg.changes["closes"].keys()
1866
1867         if not bugs:
1868             return summary
1869
1870         bugs.sort()
1871         summary += "Closing bugs: "
1872         for bug in bugs:
1873             summary += "%s " % (bug)
1874             if action:
1875                 self.update_subst()
1876                 self.Subst["__BUG_NUMBER__"] = bug
1877                 if self.pkg.changes["distribution"].has_key("stable"):
1878                     self.Subst["__STABLE_WARNING__"] = """
1879 Note that this package is not part of the released stable Debian
1880 distribution.  It may have dependencies on other unreleased software,
1881 or other instabilities.  Please take care if you wish to install it.
1882 The update will eventually make its way into the next released Debian
1883 distribution."""
1884                 else:
1885                     self.Subst["__STABLE_WARNING__"] = ""
1886                 mail_message = utils.TemplateSubst(self.Subst, template)
1887                 utils.send_mail(mail_message)
1888
1889                 # Clear up after ourselves
1890                 del self.Subst["__BUG_NUMBER__"]
1891                 del self.Subst["__STABLE_WARNING__"]
1892
1893         if action and self.logger:
1894             self.logger.log(["closing bugs"] + bugs)
1895
1896         summary += "\n"
1897
1898         return summary
1899
1900     ###########################################################################
1901
1902     def announce(self, short_summary, action):
1903         """
1904         Send an announce mail about a new upload.
1905
1906         @type short_summary: string
1907         @param short_summary: Short summary text to include in the mail
1908
1909         @type action: bool
1910         @param action: Set to false no real action will be done.
1911
1912         @rtype: string
1913         @return: Textstring about action taken.
1914
1915         """
1916
1917         cnf = Config()
1918         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1919
1920         # Only do announcements for source uploads with a recent dpkg-dev installed
1921         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1922            self.pkg.changes["architecture"].has_key("source"):
1923             return ""
1924
1925         lists_done = {}
1926         summary = ""
1927
1928         self.Subst["__SHORT_SUMMARY__"] = short_summary
1929
1930         for dist in self.pkg.changes["distribution"].keys():
1931             suite = get_suite(dist)
1932             if suite is None: continue
1933             announce_list = suite.announce
1934             if announce_list == "" or lists_done.has_key(announce_list):
1935                 continue
1936
1937             lists_done[announce_list] = 1
1938             summary += "Announcing to %s\n" % (announce_list)
1939
1940             if action:
1941                 self.update_subst()
1942                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1943                 if cnf.get("Dinstall::TrackingServer") and \
1944                    self.pkg.changes["architecture"].has_key("source"):
1945                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1946                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1947
1948                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1949                 utils.send_mail(mail_message)
1950
1951                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1952
1953         if cnf.FindB("Dinstall::CloseBugs"):
1954             summary = self.close_bugs(summary, action)
1955
1956         del self.Subst["__SHORT_SUMMARY__"]
1957
1958         return summary
1959
1960     ###########################################################################
1961     @session_wrapper
1962     def accept (self, summary, short_summary, session=None):
1963         """
1964         Accept an upload.
1965
1966         This moves all files referenced from the .changes into the pool,
1967         sends the accepted mail, announces to lists, closes bugs and
1968         also checks for override disparities. If enabled it will write out
1969         the version history for the BTS Version Tracking and will finally call
1970         L{queue_build}.
1971
1972         @type summary: string
1973         @param summary: Summary text
1974
1975         @type short_summary: string
1976         @param short_summary: Short summary
1977         """
1978
1979         cnf = Config()
1980         stats = SummaryStats()
1981
1982         print "Installing."
1983         self.logger.log(["installing changes", self.pkg.changes_file])
1984
1985         poolfiles = []
1986
1987         # Add the .dsc file to the DB first
1988         for newfile, entry in self.pkg.files.items():
1989             if entry["type"] == "dsc":
1990                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1991                 for j in pfs:
1992                     poolfiles.append(j)
1993
1994         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1995         for newfile, entry in self.pkg.files.items():
1996             if entry["type"] == "deb":
1997                 poolfiles.append(add_deb_to_db(self, newfile, session))
1998
1999         # If this is a sourceful diff only upload that is moving
2000         # cross-component we need to copy the .orig files into the new
2001         # component too for the same reasons as above.
2002         # XXX: mhy: I think this should be in add_dsc_to_db
2003         if self.pkg.changes["architecture"].has_key("source"):
2004             for orig_file in self.pkg.orig_files.keys():
2005                 if not self.pkg.orig_files[orig_file].has_key("id"):
2006                     continue # Skip if it's not in the pool
2007                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2008                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2009                     continue # Skip if the location didn't change
2010
2011                 # Do the move
2012                 oldf = get_poolfile_by_id(orig_file_id, session)
2013                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2014                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2015                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2016
2017                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2018
2019                 # TODO: Care about size/md5sum collisions etc
2020                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2021
2022                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2023                 if newf is None:
2024                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2025                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2026
2027                     session.flush()
2028
2029                     # Don't reference the old file from this changes
2030                     for p in poolfiles:
2031                         if p.file_id == oldf.file_id:
2032                             poolfiles.remove(p)
2033
2034                     poolfiles.append(newf)
2035
2036                     # Fix up the DSC references
2037                     toremove = []
2038
2039                     for df in source.srcfiles:
2040                         if df.poolfile.file_id == oldf.file_id:
2041                             # Add a new DSC entry and mark the old one for deletion
2042                             # Don't do it in the loop so we don't change the thing we're iterating over
2043                             newdscf = DSCFile()
2044                             newdscf.source_id = source.source_id
2045                             newdscf.poolfile_id = newf.file_id
2046                             session.add(newdscf)
2047
2048                             toremove.append(df)
2049
2050                     for df in toremove:
2051                         session.delete(df)
2052
2053                     # Flush our changes
2054                     session.flush()
2055
2056                     # Make sure that our source object is up-to-date
2057                     session.expire(source)
2058
2059         # Add changelog information to the database
2060         self.store_changelog()
2061
2062         # Install the files into the pool
2063         for newfile, entry in self.pkg.files.items():
2064             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2065             utils.move(newfile, destination)
2066             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2067             stats.accept_bytes += float(entry["size"])
2068
2069         # Copy the .changes file across for suite which need it.
2070         copy_changes = dict([(x.copychanges, '')
2071                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2072                              if x.copychanges is not None])
2073
2074         for dest in copy_changes.keys():
2075             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2076
2077         # We're done - commit the database changes
2078         session.commit()
2079         # Our SQL session will automatically start a new transaction after
2080         # the last commit
2081
2082         # Move the .changes into the 'done' directory
2083         utils.move(self.pkg.changes_file,
2084                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2085
2086         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2087             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2088
2089         self.update_subst()
2090         self.Subst["__SUMMARY__"] = summary
2091         mail_message = utils.TemplateSubst(self.Subst,
2092                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2093         utils.send_mail(mail_message)
2094         self.announce(short_summary, 1)
2095
2096         ## Helper stuff for DebBugs Version Tracking
2097         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2098             if self.pkg.changes["architecture"].has_key("source"):
2099                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2100                 version_history = os.fdopen(fd, 'w')
2101                 version_history.write(self.pkg.dsc["bts changelog"])
2102                 version_history.close()
2103                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2104                                       self.pkg.changes_file[:-8]+".versions")
2105                 os.rename(temp_filename, filename)
2106                 os.chmod(filename, 0644)
2107
2108             # Write out the binary -> source mapping.
2109             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2110             debinfo = os.fdopen(fd, 'w')
2111             for name, entry in sorted(self.pkg.files.items()):
2112                 if entry["type"] == "deb":
2113                     line = " ".join([entry["package"], entry["version"],
2114                                      entry["architecture"], entry["source package"],
2115                                      entry["source version"]])
2116                     debinfo.write(line+"\n")
2117             debinfo.close()
2118             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2119                                   self.pkg.changes_file[:-8]+".debinfo")
2120             os.rename(temp_filename, filename)
2121             os.chmod(filename, 0644)
2122
2123         session.commit()
2124
2125         # Set up our copy queues (e.g. buildd queues)
2126         for suite_name in self.pkg.changes["distribution"].keys():
2127             suite = get_suite(suite_name, session)
2128             for q in suite.copy_queues:
2129                 for f in poolfiles:
2130                     q.add_file_from_pool(f)
2131
2132         session.commit()
2133
2134         # Finally...
2135         stats.accept_count += 1
2136
2137     def check_override(self):
2138         """
2139         Checks override entries for validity. Mails "Override disparity" warnings,
2140         if that feature is enabled.
2141
2142         Abandons the check if
2143           - override disparity checks are disabled
2144           - mail sending is disabled
2145         """
2146
2147         cnf = Config()
2148
2149         # Abandon the check if override disparity checks have been disabled
2150         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2151             return
2152
2153         summary = self.pkg.check_override()
2154
2155         if summary == "":
2156             return
2157
2158         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2159
2160         self.update_subst()
2161         self.Subst["__SUMMARY__"] = summary
2162         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2163         utils.send_mail(mail_message)
2164         del self.Subst["__SUMMARY__"]
2165
2166     ###########################################################################
2167
2168     def remove(self, from_dir=None):
2169         """
2170         Used (for instance) in p-u to remove the package from unchecked
2171
2172         Also removes the package from holding area.
2173         """
2174         if from_dir is None:
2175             from_dir = self.pkg.directory
2176         h = Holding()
2177
2178         for f in self.pkg.files.keys():
2179             os.unlink(os.path.join(from_dir, f))
2180             if os.path.exists(os.path.join(h.holding_dir, f)):
2181                 os.unlink(os.path.join(h.holding_dir, f))
2182
2183         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2184         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2185             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2186
2187     ###########################################################################
2188
2189     def move_to_queue (self, queue):
2190         """
2191         Move files to a destination queue using the permissions in the table
2192         """
2193         h = Holding()
2194         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2195                    queue.path, perms=int(queue.change_perms, 8))
2196         for f in self.pkg.files.keys():
2197             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2198
2199     ###########################################################################
2200
2201     def force_reject(self, reject_files):
2202         """
2203         Forcefully move files from the current directory to the
2204         reject directory.  If any file already exists in the reject
2205         directory it will be moved to the morgue to make way for
2206         the new file.
2207
2208         @type reject_files: dict
2209         @param reject_files: file dictionary
2210
2211         """
2212
2213         cnf = Config()
2214
2215         for file_entry in reject_files:
2216             # Skip any files which don't exist or which we don't have permission to copy.
2217             if os.access(file_entry, os.R_OK) == 0:
2218                 continue
2219
2220             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2221
2222             try:
2223                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2224             except OSError, e:
2225                 # File exists?  Let's find a new name by adding a number
2226                 if e.errno == errno.EEXIST:
2227                     try:
2228                         dest_file = utils.find_next_free(dest_file, 255)
2229                     except NoFreeFilenameError:
2230                         # Something's either gone badly Pete Tong, or
2231                         # someone is trying to exploit us.
2232                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2233                         return
2234
2235                     # Make sure we really got it
2236                     try:
2237                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2238                     except OSError, e:
2239                         # Likewise
2240                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2241                         return
2242                 else:
2243                     raise
2244             # If we got here, we own the destination file, so we can
2245             # safely overwrite it.
2246             utils.move(file_entry, dest_file, 1, perms=0660)
2247             os.close(dest_fd)
2248
2249     ###########################################################################
2250     def do_reject (self, manual=0, reject_message="", notes=""):
2251         """
2252         Reject an upload. If called without a reject message or C{manual} is
2253         true, spawn an editor so the user can write one.
2254
2255         @type manual: bool
2256         @param manual: manual or automated rejection
2257
2258         @type reject_message: string
2259         @param reject_message: A reject message
2260
2261         @return: 0
2262
2263         """
2264         # If we weren't given a manual rejection message, spawn an
2265         # editor so the user can add one in...
2266         if manual and not reject_message:
2267             (fd, temp_filename) = utils.temp_filename()
2268             temp_file = os.fdopen(fd, 'w')
2269             if len(notes) > 0:
2270                 for note in notes:
2271                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2272                                     % (note.author, note.version, note.notedate, note.comment))
2273             temp_file.close()
2274             editor = os.environ.get("EDITOR","vi")
2275             answer = 'E'
2276             while answer == 'E':
2277                 os.system("%s %s" % (editor, temp_filename))
2278                 temp_fh = utils.open_file(temp_filename)
2279                 reject_message = "".join(temp_fh.readlines())
2280                 temp_fh.close()
2281                 print "Reject message:"
2282                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2283                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2284                 answer = "XXX"
2285                 while prompt.find(answer) == -1:
2286                     answer = utils.our_raw_input(prompt)
2287                     m = re_default_answer.search(prompt)
2288                     if answer == "":
2289                         answer = m.group(1)
2290                     answer = answer[:1].upper()
2291             os.unlink(temp_filename)
2292             if answer == 'A':
2293                 return 1
2294             elif answer == 'Q':
2295                 sys.exit(0)
2296
2297         print "Rejecting.\n"
2298
2299         cnf = Config()
2300
2301         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2302         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2303
2304         # Move all the files into the reject directory
2305         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2306         self.force_reject(reject_files)
2307
2308         # If we fail here someone is probably trying to exploit the race
2309         # so let's just raise an exception ...
2310         if os.path.exists(reason_filename):
2311             os.unlink(reason_filename)
2312         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2313
2314         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2315
2316         self.update_subst()
2317         if not manual:
2318             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2319             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2320             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2321             os.write(reason_fd, reject_message)
2322             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2323         else:
2324             # Build up the rejection email
2325             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2326             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2327             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2328             self.Subst["__REJECT_MESSAGE__"] = ""
2329             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2330             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2331             # Write the rejection email out as the <foo>.reason file
2332             os.write(reason_fd, reject_mail_message)
2333
2334         del self.Subst["__REJECTOR_ADDRESS__"]
2335         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2336         del self.Subst["__CC__"]
2337
2338         os.close(reason_fd)
2339
2340         # Send the rejection mail
2341         utils.send_mail(reject_mail_message)
2342
2343         if self.logger:
2344             self.logger.log(["rejected", self.pkg.changes_file])
2345
2346         return 0
2347
2348     ################################################################################
2349     def in_override_p(self, package, component, suite, binary_type, filename, session):
2350         """
2351         Check if a package already has override entries in the DB
2352
2353         @type package: string
2354         @param package: package name
2355
2356         @type component: string
2357         @param component: database id of the component
2358
2359         @type suite: int
2360         @param suite: database id of the suite
2361
2362         @type binary_type: string
2363         @param binary_type: type of the package
2364
2365         @type filename: string
2366         @param filename: filename we check
2367
2368         @return: the database result. But noone cares anyway.
2369
2370         """
2371
2372         cnf = Config()
2373
2374         if binary_type == "": # must be source
2375             file_type = "dsc"
2376         else:
2377             file_type = binary_type
2378
2379         # Override suite name; used for example with proposed-updates
2380         oldsuite = get_suite(suite, session)
2381         if (not oldsuite is None) and oldsuite.overridesuite:
2382             suite = oldsuite.overridesuite
2383
2384         result = get_override(package, suite, component, file_type, session)
2385
2386         # If checking for a source package fall back on the binary override type
2387         if file_type == "dsc" and len(result) < 1:
2388             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2389
2390         # Remember the section and priority so we can check them later if appropriate
2391         if len(result) > 0:
2392             result = result[0]
2393             self.pkg.files[filename]["override section"] = result.section.section
2394             self.pkg.files[filename]["override priority"] = result.priority.priority
2395             return result
2396
2397         return None
2398
2399     ################################################################################
2400     def get_anyversion(self, sv_list, suite):
2401         """
2402         @type sv_list: list
2403         @param sv_list: list of (suite, version) tuples to check
2404
2405         @type suite: string
2406         @param suite: suite name
2407
2408         Description: TODO
2409         """
2410         Cnf = Config()
2411         anyversion = None
2412         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2413         for (s, v) in sv_list:
2414             if s in [ x.lower() for x in anysuite ]:
2415                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2416                     anyversion = v
2417
2418         return anyversion
2419
2420     ################################################################################
2421
2422     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2423         """
2424         @type sv_list: list
2425         @param sv_list: list of (suite, version) tuples to check
2426
2427         @type filename: string
2428         @param filename: XXX
2429
2430         @type new_version: string
2431         @param new_version: XXX
2432
2433         Ensure versions are newer than existing packages in target
2434         suites and that cross-suite version checking rules as
2435         set out in the conf file are satisfied.
2436         """
2437
2438         cnf = Config()
2439
2440         # Check versions for each target suite
2441         for target_suite in self.pkg.changes["distribution"].keys():
2442             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2443             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2444
2445             # Enforce "must be newer than target suite" even if conffile omits it
2446             if target_suite not in must_be_newer_than:
2447                 must_be_newer_than.append(target_suite)
2448
2449             for (suite, existent_version) in sv_list:
2450                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2451
2452                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2453                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2454
2455                 if suite in must_be_older_than and vercmp > -1:
2456                     cansave = 0
2457
2458                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2459                         # we really use the other suite, ignoring the conflicting one ...
2460                         addsuite = self.pkg.changes["distribution-version"][suite]
2461
2462                         add_version = self.get_anyversion(sv_list, addsuite)
2463                         target_version = self.get_anyversion(sv_list, target_suite)
2464
2465                         if not add_version:
2466                             # not add_version can only happen if we map to a suite
2467                             # that doesn't enhance the suite we're propup'ing from.
2468                             # so "propup-ver x a b c; map a d" is a problem only if
2469                             # d doesn't enhance a.
2470                             #
2471                             # i think we could always propagate in this case, rather
2472                             # than complaining. either way, this isn't a REJECT issue
2473                             #
2474                             # And - we really should complain to the dorks who configured dak
2475                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2476                             self.pkg.changes.setdefault("propdistribution", {})
2477                             self.pkg.changes["propdistribution"][addsuite] = 1
2478                             cansave = 1
2479                         elif not target_version:
2480                             # not targets_version is true when the package is NEW
2481                             # we could just stick with the "...old version..." REJECT
2482                             # for this, I think.
2483                             self.rejects.append("Won't propogate NEW packages.")
2484                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2485                             # propogation would be redundant. no need to reject though.
2486                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2487                             cansave = 1
2488                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2489                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2490                             # propogate!!
2491                             self.warnings.append("Propogating upload to %s" % (addsuite))
2492                             self.pkg.changes.setdefault("propdistribution", {})
2493                             self.pkg.changes["propdistribution"][addsuite] = 1
2494                             cansave = 1
2495
2496                     if not cansave:
2497                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2498
2499     ################################################################################
2500     def check_binary_against_db(self, filename, session):
2501         # Ensure version is sane
2502         q = session.query(BinAssociation)
2503         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2504         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2505
2506         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2507                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2508
2509         # Check for any existing copies of the file
2510         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2511         q = q.filter_by(version=self.pkg.files[filename]["version"])
2512         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2513
2514         if q.count() > 0:
2515             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2516
2517     ################################################################################
2518
2519     def check_source_against_db(self, filename, session):
2520         source = self.pkg.dsc.get("source")
2521         version = self.pkg.dsc.get("version")
2522
2523         # Ensure version is sane
2524         self.cross_suite_version_check(get_suite_version(source, session),
2525                                        filename, version, sourceful=True)
2526
2527     ################################################################################
2528     def check_dsc_against_db(self, filename, session):
2529         """
2530
2531         @warning: NB: this function can remove entries from the 'files' index [if
2532          the orig tarball is a duplicate of the one in the archive]; if
2533          you're iterating over 'files' and call this function as part of
2534          the loop, be sure to add a check to the top of the loop to
2535          ensure you haven't just tried to dereference the deleted entry.
2536
2537         """
2538
2539         Cnf = Config()
2540         self.pkg.orig_files = {} # XXX: do we need to clear it?
2541         orig_files = self.pkg.orig_files
2542
2543         # Try and find all files mentioned in the .dsc.  This has
2544         # to work harder to cope with the multiple possible
2545         # locations of an .orig.tar.gz.
2546         # The ordering on the select is needed to pick the newest orig
2547         # when it exists in multiple places.
2548         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2549             found = None
2550             if self.pkg.files.has_key(dsc_name):
2551                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2552                 actual_size = int(self.pkg.files[dsc_name]["size"])
2553                 found = "%s in incoming" % (dsc_name)
2554
2555                 # Check the file does not already exist in the archive
2556                 ql = get_poolfile_like_name(dsc_name, session)
2557
2558                 # Strip out anything that isn't '%s' or '/%s$'
2559                 for i in ql:
2560                     if not i.filename.endswith(dsc_name):
2561                         ql.remove(i)
2562
2563                 # "[dak] has not broken them.  [dak] has fixed a
2564                 # brokenness.  Your crappy hack exploited a bug in
2565                 # the old dinstall.
2566                 #
2567                 # "(Come on!  I thought it was always obvious that
2568                 # one just doesn't release different files with
2569                 # the same name and version.)"
2570                 #                        -- ajk@ on d-devel@l.d.o
2571
2572                 if len(ql) > 0:
2573                     # Ignore exact matches for .orig.tar.gz
2574                     match = 0
2575                     if re_is_orig_source.match(dsc_name):
2576                         for i in ql:
2577                             if self.pkg.files.has_key(dsc_name) and \
2578                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2579                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2580                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2581                                 # TODO: Don't delete the entry, just mark it as not needed
2582                                 # This would fix the stupidity of changing something we often iterate over
2583                                 # whilst we're doing it
2584                                 del self.pkg.files[dsc_name]
2585                                 dsc_entry["files id"] = i.file_id
2586                                 if not orig_files.has_key(dsc_name):
2587                                     orig_files[dsc_name] = {}
2588                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2589                                 match = 1
2590
2591                                 # Don't bitch that we couldn't find this file later
2592                                 try:
2593                                     self.later_check_files.remove(dsc_name)
2594                                 except ValueError:
2595                                     pass
2596
2597
2598                     if not match:
2599                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2600
2601             elif re_is_orig_source.match(dsc_name):
2602                 # Check in the pool
2603                 ql = get_poolfile_like_name(dsc_name, session)
2604
2605                 # Strip out anything that isn't '%s' or '/%s$'
2606                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2607                 for i in ql:
2608                     if not i.filename.endswith(dsc_name):
2609                         ql.remove(i)
2610
2611                 if len(ql) > 0:
2612                     # Unfortunately, we may get more than one match here if,
2613                     # for example, the package was in potato but had an -sa
2614                     # upload in woody.  So we need to choose the right one.
2615
2616                     # default to something sane in case we don't match any or have only one
2617                     x = ql[0]
2618
2619                     if len(ql) > 1:
2620                         for i in ql:
2621                             old_file = os.path.join(i.location.path, i.filename)
2622                             old_file_fh = utils.open_file(old_file)
2623                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2624                             old_file_fh.close()
2625                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2626                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2627                                 x = i
2628
2629                     old_file = os.path.join(i.location.path, i.filename)
2630                     old_file_fh = utils.open_file(old_file)
2631                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2632                     old_file_fh.close()
2633                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2634                     found = old_file
2635                     suite_type = x.location.archive_type
2636                     # need this for updating dsc_files in install()
2637                     dsc_entry["files id"] = x.file_id
2638                     # See install() in process-accepted...
2639                     if not orig_files.has_key(dsc_name):
2640                         orig_files[dsc_name] = {}
2641                     orig_files[dsc_name]["id"] = x.file_id
2642                     orig_files[dsc_name]["path"] = old_file
2643                     orig_files[dsc_name]["location"] = x.location.location_id
2644                 else:
2645                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2646                     # Not there? Check the queue directories...
2647                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2648                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2649                             continue
2650                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2651                         if os.path.exists(in_otherdir):
2652                             in_otherdir_fh = utils.open_file(in_otherdir)
2653                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2654                             in_otherdir_fh.close()
2655                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2656                             found = in_otherdir
2657                             if not orig_files.has_key(dsc_name):
2658                                 orig_files[dsc_name] = {}
2659                             orig_files[dsc_name]["path"] = in_otherdir
2660
2661                     if not found:
2662                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2663                         continue
2664             else:
2665                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2666                 continue
2667             if actual_md5 != dsc_entry["md5sum"]:
2668                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2669             if actual_size != int(dsc_entry["size"]):
2670                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2671
2672     ################################################################################
2673     # This is used by process-new and process-holding to recheck a changes file
2674     # at the time we're running.  It mainly wraps various other internal functions
2675     # and is similar to accepted_checks - these should probably be tidied up
2676     # and combined
2677     def recheck(self, session):
2678         cnf = Config()
2679         for f in self.pkg.files.keys():
2680             # The .orig.tar.gz can disappear out from under us is it's a
2681             # duplicate of one in the archive.
2682             if not self.pkg.files.has_key(f):
2683                 continue
2684
2685             entry = self.pkg.files[f]
2686
2687             # Check that the source still exists
2688             if entry["type"] == "deb":
2689                 source_version = entry["source version"]
2690                 source_package = entry["source package"]
2691                 if not self.pkg.changes["architecture"].has_key("source") \
2692                    and not source_exists(source_package, source_version, \
2693                     suites = self.pkg.changes["distribution"].keys(), session = session):
2694                     source_epochless_version = re_no_epoch.sub('', source_version)
2695                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2696                     found = False
2697                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2698                         if cnf.has_key("Dir::Queue::%s" % (q)):
2699                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2700                                 found = True
2701                     if not found:
2702                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2703
2704             # Version and file overwrite checks
2705             if entry["type"] == "deb":
2706                 self.check_binary_against_db(f, session)
2707             elif entry["type"] == "dsc":
2708                 self.check_source_against_db(f, session)
2709                 self.check_dsc_against_db(f, session)
2710
2711     ################################################################################
2712     def accepted_checks(self, overwrite_checks, session):
2713         # Recheck anything that relies on the database; since that's not
2714         # frozen between accept and our run time when called from p-a.
2715
2716         # overwrite_checks is set to False when installing to stable/oldstable
2717
2718         propogate={}
2719         nopropogate={}
2720
2721         # Find the .dsc (again)
2722         dsc_filename = None
2723         for f in self.pkg.files.keys():
2724             if self.pkg.files[f]["type"] == "dsc":
2725                 dsc_filename = f
2726
2727         for checkfile in self.pkg.files.keys():
2728             # The .orig.tar.gz can disappear out from under us is it's a
2729             # duplicate of one in the archive.
2730             if not self.pkg.files.has_key(checkfile):
2731                 continue
2732
2733             entry = self.pkg.files[checkfile]
2734
2735             # Check that the source still exists
2736             if entry["type"] == "deb":
2737                 source_version = entry["source version"]
2738                 source_package = entry["source package"]
2739                 if not self.pkg.changes["architecture"].has_key("source") \
2740                    and not source_exists(source_package, source_version, \
2741                     suites = self.pkg.changes["distribution"].keys(), \
2742                     session = session):
2743                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2744
2745             # Version and file overwrite checks
2746             if overwrite_checks:
2747                 if entry["type"] == "deb":
2748                     self.check_binary_against_db(checkfile, session)
2749                 elif entry["type"] == "dsc":
2750                     self.check_source_against_db(checkfile, session)
2751                     self.check_dsc_against_db(dsc_filename, session)
2752
2753             # propogate in the case it is in the override tables:
2754             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2755                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2756                     propogate[suite] = 1
2757                 else:
2758                     nopropogate[suite] = 1
2759
2760         for suite in propogate.keys():
2761             if suite in nopropogate:
2762                 continue
2763             self.pkg.changes["distribution"][suite] = 1
2764
2765         for checkfile in self.pkg.files.keys():
2766             # Check the package is still in the override tables
2767             for suite in self.pkg.changes["distribution"].keys():
2768                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2769                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2770
2771     ################################################################################
2772     # If any file of an upload has a recent mtime then chances are good
2773     # the file is still being uploaded.
2774
2775     def upload_too_new(self):
2776         cnf = Config()
2777         too_new = False
2778         # Move back to the original directory to get accurate time stamps
2779         cwd = os.getcwd()
2780         os.chdir(self.pkg.directory)
2781         file_list = self.pkg.files.keys()
2782         file_list.extend(self.pkg.dsc_files.keys())
2783         file_list.append(self.pkg.changes_file)
2784         for f in file_list:
2785             try:
2786                 last_modified = time.time()-os.path.getmtime(f)
2787                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2788                     too_new = True
2789                     break
2790             except:
2791                 pass
2792
2793         os.chdir(cwd)
2794         return too_new
2795
2796     def store_changelog(self):
2797
2798         # Skip binary-only upload if it is not a bin-NMU
2799         if not self.pkg.changes['architecture'].has_key('source'):
2800             from daklib.regexes import re_bin_only_nmu
2801             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2802                 return
2803
2804         session = DBConn().session()
2805
2806         # Check if upload already has a changelog entry
2807         query = """SELECT changelog_id FROM changes WHERE source = :source
2808                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2809         if session.execute(query, {'source': self.pkg.changes['source'], \
2810                                    'version': self.pkg.changes['version'], \
2811                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2812             session.commit()
2813             return
2814
2815         # Add current changelog text into changelogs_text table, return created ID
2816         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2817         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2818
2819         # Link ID to the upload available in changes table
2820         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2821                    AND version = :version AND architecture = :architecture"""
2822         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2823                                 'version': self.pkg.changes['version'], \
2824                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2825
2826         session.commit()