]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
9130c3a38ab1758382c0bbc851039cf48773ab77
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(filename, changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type filename: str
100     @param filename: changes filename
101
102     @type changes: Upload.Pkg.changes dict
103     @param changes: Changes dictionary
104
105     @type files: Upload.Pkg.files dict
106     @param files: Files dictionary
107
108     @type warn: bool
109     @param warn: Warn if overrides are added for (old)stable
110
111     @rtype: dict
112     @return: dictionary of NEW components.
113
114     """
115     # TODO: This should all use the database instead of parsing the changes
116     # file again
117     new = {}
118     byhand = {}
119
120     dbchg = get_dbchange(filename, session)
121     if dbchg is None:
122         print "Warning: cannot find changes file in database; won't check byhand"
123
124     # Build up a list of potentially new things
125     for name, f in files.items():
126         # Keep a record of byhand elements
127         if f["section"] == "byhand":
128             byhand[name] = 1
129             continue
130
131         pkg = f["package"]
132         priority = f["priority"]
133         section = f["section"]
134         file_type = get_type(f, session)
135         component = f["component"]
136
137         if file_type == "dsc":
138             priority = "source"
139
140         if not new.has_key(pkg):
141             new[pkg] = {}
142             new[pkg]["priority"] = priority
143             new[pkg]["section"] = section
144             new[pkg]["type"] = file_type
145             new[pkg]["component"] = component
146             new[pkg]["files"] = []
147         else:
148             old_type = new[pkg]["type"]
149             if old_type != file_type:
150                 # source gets trumped by deb or udeb
151                 if old_type == "dsc":
152                     new[pkg]["priority"] = priority
153                     new[pkg]["section"] = section
154                     new[pkg]["type"] = file_type
155                     new[pkg]["component"] = component
156
157         new[pkg]["files"].append(name)
158
159         if f.has_key("othercomponents"):
160             new[pkg]["othercomponents"] = f["othercomponents"]
161
162     # Fix up the list of target suites
163     cnf = Config()
164     for suite in changes["suite"].keys():
165         oldsuite = get_suite(suite, session)
166         if not oldsuite:
167             print "WARNING: Invalid suite %s found" % suite
168             continue
169
170         if oldsuite.overridesuite:
171             newsuite = get_suite(oldsuite.overridesuite, session)
172
173             if newsuite:
174                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
175                     oldsuite.overridesuite, suite)
176                 del changes["suite"][suite]
177                 changes["suite"][oldsuite.overridesuite] = 1
178             else:
179                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
180                     oldsuite.overridesuite, suite)
181
182     # Check for unprocessed byhand files
183     if dbchg is not None:
184         for b in byhand.keys():
185             # Find the file entry in the database
186             found = False
187             for f in dbchg.files:
188                 if f.filename == b:
189                     found = True
190                     # If it's processed, we can ignore it
191                     if f.processed:
192                         del byhand[b]
193                     break
194
195             if not found:
196                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
197
198     # Check for new stuff
199     for suite in changes["suite"].keys():
200         for pkg in new.keys():
201             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
202             if len(ql) > 0:
203                 for file_entry in new[pkg]["files"]:
204                     if files[file_entry].has_key("new"):
205                         del files[file_entry]["new"]
206                 del new[pkg]
207
208     if warn:
209         for s in ['stable', 'oldstable']:
210             if changes["suite"].has_key(s):
211                 print "WARNING: overrides will be added for %s!" % s
212         for pkg in new.keys():
213             if new[pkg].has_key("othercomponents"):
214                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
215
216     return new, byhand
217
218 ################################################################################
219
220 def check_valid(new, session = None):
221     """
222     Check if section and priority for NEW packages exist in database.
223     Additionally does sanity checks:
224       - debian-installer packages have to be udeb (or source)
225       - non debian-installer packages can not be udeb
226       - source priority can only be assigned to dsc file types
227
228     @type new: dict
229     @param new: Dict of new packages with their section, priority and type.
230
231     """
232     for pkg in new.keys():
233         section_name = new[pkg]["section"]
234         priority_name = new[pkg]["priority"]
235         file_type = new[pkg]["type"]
236
237         section = get_section(section_name, session)
238         if section is None:
239             new[pkg]["section id"] = -1
240         else:
241             new[pkg]["section id"] = section.section_id
242
243         priority = get_priority(priority_name, session)
244         if priority is None:
245             new[pkg]["priority id"] = -1
246         else:
247             new[pkg]["priority id"] = priority.priority_id
248
249         # Sanity checks
250         di = section_name.find("debian-installer") != -1
251
252         # If d-i, we must be udeb and vice-versa
253         if     (di and file_type not in ("udeb", "dsc")) or \
254            (not di and file_type == "udeb"):
255             new[pkg]["section id"] = -1
256
257         # If dsc we need to be source and vice-versa
258         if (priority == "source" and file_type != "dsc") or \
259            (priority != "source" and file_type == "dsc"):
260             new[pkg]["priority id"] = -1
261
262 ###############################################################################
263
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266     def __init__(self, future_cutoff, past_cutoff):
267         self.reset()
268         self.future_cutoff = future_cutoff
269         self.past_cutoff = past_cutoff
270
271     def reset(self):
272         self.future_files = {}
273         self.ancient_files = {}
274
275     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276         if MTime > self.future_cutoff:
277             self.future_files[Name] = MTime
278         if MTime < self.past_cutoff:
279             self.ancient_files[Name] = MTime
280
281 ###############################################################################
282
283 def prod_maintainer(notes, upload):
284     cnf = Config()
285
286     # Here we prepare an editor and get them ready to prod...
287     (fd, temp_filename) = utils.temp_filename()
288     temp_file = os.fdopen(fd, 'w')
289     for note in notes:
290         temp_file.write(note.comment)
291     temp_file.close()
292     editor = os.environ.get("EDITOR","vi")
293     answer = 'E'
294     while answer == 'E':
295         os.system("%s %s" % (editor, temp_filename))
296         temp_fh = utils.open_file(temp_filename)
297         prod_message = "".join(temp_fh.readlines())
298         temp_fh.close()
299         print "Prod message:"
300         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
301         prompt = "[P]rod, Edit, Abandon, Quit ?"
302         answer = "XXX"
303         while prompt.find(answer) == -1:
304             answer = utils.our_raw_input(prompt)
305             m = re_default_answer.search(prompt)
306             if answer == "":
307                 answer = m.group(1)
308             answer = answer[:1].upper()
309     os.unlink(temp_filename)
310     if answer == 'A':
311         return
312     elif answer == 'Q':
313         end()
314         sys.exit(0)
315     # Otherwise, do the proding...
316     user_email_address = utils.whoami() + " <%s>" % (
317         cnf["Dinstall::MyAdminAddress"])
318
319     Subst = upload.Subst
320
321     Subst["__FROM_ADDRESS__"] = user_email_address
322     Subst["__PROD_MESSAGE__"] = prod_message
323     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
324
325     prod_mail_message = utils.TemplateSubst(
326         Subst,cnf["Dir::Templates"]+"/process-new.prod")
327
328     # Send the prod mail
329     utils.send_mail(prod_mail_message)
330
331     print "Sent prodding message"
332
333 ################################################################################
334
335 def edit_note(note, upload, session, trainee=False):
336     # Write the current data to a temporary file
337     (fd, temp_filename) = utils.temp_filename()
338     editor = os.environ.get("EDITOR","vi")
339     answer = 'E'
340     while answer == 'E':
341         os.system("%s %s" % (editor, temp_filename))
342         temp_file = utils.open_file(temp_filename)
343         newnote = temp_file.read().rstrip()
344         temp_file.close()
345         print "New Note:"
346         print utils.prefix_multi_line_string(newnote,"  ")
347         prompt = "[D]one, Edit, Abandon, Quit ?"
348         answer = "XXX"
349         while prompt.find(answer) == -1:
350             answer = utils.our_raw_input(prompt)
351             m = re_default_answer.search(prompt)
352             if answer == "":
353                 answer = m.group(1)
354             answer = answer[:1].upper()
355     os.unlink(temp_filename)
356     if answer == 'A':
357         return
358     elif answer == 'Q':
359         end()
360         sys.exit(0)
361
362     comment = NewComment()
363     comment.package = upload.pkg.changes["source"]
364     comment.version = upload.pkg.changes["version"]
365     comment.comment = newnote
366     comment.author  = utils.whoami()
367     comment.trainee = trainee
368     session.add(comment)
369     session.commit()
370
371 ###############################################################################
372
373 # suite names DMs can upload to
374 dm_suites = ['unstable', 'experimental']
375
376 def get_newest_source(source, session):
377     'returns the newest DBSource object in dm_suites'
378     ## the most recent version of the package uploaded to unstable or
379     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
380     ## section of its control file
381     q = session.query(DBSource).filter_by(source = source). \
382         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
383         order_by(desc('source.version'))
384     return q.first()
385
386 class Upload(object):
387     """
388     Everything that has to do with an upload processed.
389
390     """
391     def __init__(self):
392         self.logger = None
393         self.pkg = Changes()
394         self.reset()
395
396     ###########################################################################
397
398     def reset (self):
399         """ Reset a number of internal variables."""
400
401         # Initialize the substitution template map
402         cnf = Config()
403         self.Subst = {}
404         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
405         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
406         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
407         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
408
409         self.rejects = []
410         self.warnings = []
411         self.notes = []
412
413         self.later_check_files = []
414
415         self.pkg.reset()
416
417     def package_info(self):
418         """
419         Format various messages from this Upload to send to the maintainer.
420         """
421
422         msgs = (
423             ('Reject Reasons', self.rejects),
424             ('Warnings', self.warnings),
425             ('Notes', self.notes),
426         )
427
428         msg = ''
429         for title, messages in msgs:
430             if messages:
431                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
432         msg += '\n\n'
433
434         return msg
435
436     ###########################################################################
437     def update_subst(self):
438         """ Set up the per-package template substitution mappings """
439
440         cnf = Config()
441
442         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
443         if not self.pkg.changes.has_key("architecture") or not \
444            isinstance(self.pkg.changes["architecture"], dict):
445             self.pkg.changes["architecture"] = { "Unknown" : "" }
446
447         # and maintainer2047 may not exist.
448         if not self.pkg.changes.has_key("maintainer2047"):
449             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
450
451         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
452         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
453         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
454
455         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
456         if self.pkg.changes["architecture"].has_key("source") and \
457            self.pkg.changes["changedby822"] != "" and \
458            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
459
460             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
461             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
462             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
463         else:
464             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
465             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
466             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
467
468         # Process policy doesn't set the fingerprint field and I don't want to make it
469         # do it for now as I don't want to have to deal with the case where we accepted
470         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
471         # the meantime so the package will be remarked as rejectable.  Urgh.
472         # TODO: Fix this properly
473         if self.pkg.changes.has_key('fingerprint'):
474             session = DBConn().session()
475             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
476             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
477                 if self.pkg.changes.has_key("sponsoremail"):
478                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
479             session.close()
480
481         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
482             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
483
484         # Apply any global override of the Maintainer field
485         if cnf.get("Dinstall::OverrideMaintainer"):
486             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
487             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
488
489         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
490         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
491         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
492         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
493
494     ###########################################################################
495     def load_changes(self, filename):
496         """
497         Load a changes file and setup a dictionary around it. Also checks for mandantory
498         fields  within.
499
500         @type filename: string
501         @param filename: Changes filename, full path.
502
503         @rtype: boolean
504         @return: whether the changes file was valid or not.  We may want to
505                  reject even if this is True (see what gets put in self.rejects).
506                  This is simply to prevent us even trying things later which will
507                  fail because we couldn't properly parse the file.
508         """
509         Cnf = Config()
510         self.pkg.changes_file = filename
511
512         # Parse the .changes field into a dictionary
513         try:
514             self.pkg.changes.update(parse_changes(filename))
515         except CantOpenError:
516             self.rejects.append("%s: can't read file." % (filename))
517             return False
518         except ParseChangesError, line:
519             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
520             return False
521         except ChangesUnicodeError:
522             self.rejects.append("%s: changes file not proper utf-8" % (filename))
523             return False
524
525         # Parse the Files field from the .changes into another dictionary
526         try:
527             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
528         except ParseChangesError, line:
529             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
530             return False
531         except UnknownFormatError, format:
532             self.rejects.append("%s: unknown format '%s'." % (filename, format))
533             return False
534
535         # Check for mandatory fields
536         for i in ("distribution", "source", "binary", "architecture",
537                   "version", "maintainer", "files", "changes", "description"):
538             if not self.pkg.changes.has_key(i):
539                 # Avoid undefined errors later
540                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
541                 return False
542
543         # Strip a source version in brackets from the source field
544         if re_strip_srcver.search(self.pkg.changes["source"]):
545             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
546
547         # Ensure the source field is a valid package name.
548         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
549             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
550
551         # Split multi-value fields into a lower-level dictionary
552         for i in ("architecture", "distribution", "binary", "closes"):
553             o = self.pkg.changes.get(i, "")
554             if o != "":
555                 del self.pkg.changes[i]
556
557             self.pkg.changes[i] = {}
558
559             for j in o.split():
560                 self.pkg.changes[i][j] = 1
561
562         # Fix the Maintainer: field to be RFC822/2047 compatible
563         try:
564             (self.pkg.changes["maintainer822"],
565              self.pkg.changes["maintainer2047"],
566              self.pkg.changes["maintainername"],
567              self.pkg.changes["maintaineremail"]) = \
568                    fix_maintainer (self.pkg.changes["maintainer"])
569         except ParseMaintError, msg:
570             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
571                    % (filename, self.pkg.changes["maintainer"], msg))
572
573         # ...likewise for the Changed-By: field if it exists.
574         try:
575             (self.pkg.changes["changedby822"],
576              self.pkg.changes["changedby2047"],
577              self.pkg.changes["changedbyname"],
578              self.pkg.changes["changedbyemail"]) = \
579                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
580         except ParseMaintError, msg:
581             self.pkg.changes["changedby822"] = ""
582             self.pkg.changes["changedby2047"] = ""
583             self.pkg.changes["changedbyname"] = ""
584             self.pkg.changes["changedbyemail"] = ""
585
586             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
587                    % (filename, self.pkg.changes["changed-by"], msg))
588
589         # Ensure all the values in Closes: are numbers
590         if self.pkg.changes.has_key("closes"):
591             for i in self.pkg.changes["closes"].keys():
592                 if re_isanum.match (i) == None:
593                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
594
595         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
596         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
597         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
598
599         # Check the .changes is non-empty
600         if not self.pkg.files:
601             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
602             return False
603
604         # Changes was syntactically valid even if we'll reject
605         return True
606
607     ###########################################################################
608
609     def check_distributions(self):
610         "Check and map the Distribution field"
611
612         Cnf = Config()
613
614         # Handle suite mappings
615         for m in Cnf.ValueList("SuiteMappings"):
616             args = m.split()
617             mtype = args[0]
618             if mtype == "map" or mtype == "silent-map":
619                 (source, dest) = args[1:3]
620                 if self.pkg.changes["distribution"].has_key(source):
621                     del self.pkg.changes["distribution"][source]
622                     self.pkg.changes["distribution"][dest] = 1
623                     if mtype != "silent-map":
624                         self.notes.append("Mapping %s to %s." % (source, dest))
625                 if self.pkg.changes.has_key("distribution-version"):
626                     if self.pkg.changes["distribution-version"].has_key(source):
627                         self.pkg.changes["distribution-version"][source]=dest
628             elif mtype == "map-unreleased":
629                 (source, dest) = args[1:3]
630                 if self.pkg.changes["distribution"].has_key(source):
631                     for arch in self.pkg.changes["architecture"].keys():
632                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
633                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
634                             del self.pkg.changes["distribution"][source]
635                             self.pkg.changes["distribution"][dest] = 1
636                             break
637             elif mtype == "ignore":
638                 suite = args[1]
639                 if self.pkg.changes["distribution"].has_key(suite):
640                     del self.pkg.changes["distribution"][suite]
641                     self.warnings.append("Ignoring %s as a target suite." % (suite))
642             elif mtype == "reject":
643                 suite = args[1]
644                 if self.pkg.changes["distribution"].has_key(suite):
645                     self.rejects.append("Uploads to %s are not accepted." % (suite))
646             elif mtype == "propup-version":
647                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
648                 #
649                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
650                 if self.pkg.changes["distribution"].has_key(args[1]):
651                     self.pkg.changes.setdefault("distribution-version", {})
652                     for suite in args[2:]:
653                         self.pkg.changes["distribution-version"][suite] = suite
654
655         # Ensure there is (still) a target distribution
656         if len(self.pkg.changes["distribution"].keys()) < 1:
657             self.rejects.append("No valid distribution remaining.")
658
659         # Ensure target distributions exist
660         for suite in self.pkg.changes["distribution"].keys():
661             if not Cnf.has_key("Suite::%s" % (suite)):
662                 self.rejects.append("Unknown distribution `%s'." % (suite))
663
664     ###########################################################################
665
666     def binary_file_checks(self, f, session):
667         cnf = Config()
668         entry = self.pkg.files[f]
669
670         # Extract package control information
671         deb_file = utils.open_file(f)
672         try:
673             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
674         except:
675             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
676             deb_file.close()
677             # Can't continue, none of the checks on control would work.
678             return
679
680         # Check for mandantory "Description:"
681         deb_file.seek(0)
682         try:
683             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
684         except:
685             self.rejects.append("%s: Missing Description in binary package" % (f))
686             return
687
688         deb_file.close()
689
690         # Check for mandatory fields
691         for field in [ "Package", "Architecture", "Version" ]:
692             if control.Find(field) == None:
693                 # Can't continue
694                 self.rejects.append("%s: No %s field in control." % (f, field))
695                 return
696
697         # Ensure the package name matches the one give in the .changes
698         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
699             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
700
701         # Validate the package field
702         package = control.Find("Package")
703         if not re_valid_pkg_name.match(package):
704             self.rejects.append("%s: invalid package name '%s'." % (f, package))
705
706         # Validate the version field
707         version = control.Find("Version")
708         if not re_valid_version.match(version):
709             self.rejects.append("%s: invalid version number '%s'." % (f, version))
710
711         # Ensure the architecture of the .deb is one we know about.
712         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
713         architecture = control.Find("Architecture")
714         upload_suite = self.pkg.changes["distribution"].keys()[0]
715
716         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
717             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
718             self.rejects.append("Unknown architecture '%s'." % (architecture))
719
720         # Ensure the architecture of the .deb is one of the ones
721         # listed in the .changes.
722         if not self.pkg.changes["architecture"].has_key(architecture):
723             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
724
725         # Sanity-check the Depends field
726         depends = control.Find("Depends")
727         if depends == '':
728             self.rejects.append("%s: Depends field is empty." % (f))
729
730         # Sanity-check the Provides field
731         provides = control.Find("Provides")
732         if provides:
733             provide = re_spacestrip.sub('', provides)
734             if provide == '':
735                 self.rejects.append("%s: Provides field is empty." % (f))
736             prov_list = provide.split(",")
737             for prov in prov_list:
738                 if not re_valid_pkg_name.match(prov):
739                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
740
741         # Check the section & priority match those given in the .changes (non-fatal)
742         if     control.Find("Section") and entry["section"] != "" \
743            and entry["section"] != control.Find("Section"):
744             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
745                                 (f, control.Find("Section", ""), entry["section"]))
746         if control.Find("Priority") and entry["priority"] != "" \
747            and entry["priority"] != control.Find("Priority"):
748             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
749                                 (f, control.Find("Priority", ""), entry["priority"]))
750
751         entry["package"] = package
752         entry["architecture"] = architecture
753         entry["version"] = version
754         entry["maintainer"] = control.Find("Maintainer", "")
755
756         if f.endswith(".udeb"):
757             self.pkg.files[f]["dbtype"] = "udeb"
758         elif f.endswith(".deb"):
759             self.pkg.files[f]["dbtype"] = "deb"
760         else:
761             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
762
763         entry["source"] = control.Find("Source", entry["package"])
764
765         # Get the source version
766         source = entry["source"]
767         source_version = ""
768
769         if source.find("(") != -1:
770             m = re_extract_src_version.match(source)
771             source = m.group(1)
772             source_version = m.group(2)
773
774         if not source_version:
775             source_version = self.pkg.files[f]["version"]
776
777         entry["source package"] = source
778         entry["source version"] = source_version
779
780         # Ensure the filename matches the contents of the .deb
781         m = re_isadeb.match(f)
782
783         #  package name
784         file_package = m.group(1)
785         if entry["package"] != file_package:
786             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
787                                 (f, file_package, entry["dbtype"], entry["package"]))
788         epochless_version = re_no_epoch.sub('', control.Find("Version"))
789
790         #  version
791         file_version = m.group(2)
792         if epochless_version != file_version:
793             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
794                                 (f, file_version, entry["dbtype"], epochless_version))
795
796         #  architecture
797         file_architecture = m.group(3)
798         if entry["architecture"] != file_architecture:
799             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
800                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
801
802         # Check for existent source
803         source_version = entry["source version"]
804         source_package = entry["source package"]
805         if self.pkg.changes["architecture"].has_key("source"):
806             if source_version != self.pkg.changes["version"]:
807                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
808                                     (source_version, f, self.pkg.changes["version"]))
809         else:
810             # Check in the SQL database
811             if not source_exists(source_package, source_version, suites = \
812                 self.pkg.changes["distribution"].keys(), session = session):
813                 # Check in one of the other directories
814                 source_epochless_version = re_no_epoch.sub('', source_version)
815                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
816                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
817                     entry["byhand"] = 1
818                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
819                     entry["new"] = 1
820                 else:
821                     dsc_file_exists = False
822                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
823                         if cnf.has_key("Dir::Queue::%s" % (myq)):
824                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
825                                 dsc_file_exists = True
826                                 break
827
828                     if not dsc_file_exists:
829                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
830
831         # Check the version and for file overwrites
832         self.check_binary_against_db(f, session)
833
834         # Temporarily disable contents generation until we change the table storage layout
835         #b = Binary(f)
836         #b.scan_package()
837         #if len(b.rejects) > 0:
838         #    for j in b.rejects:
839         #        self.rejects.append(j)
840
841     def source_file_checks(self, f, session):
842         entry = self.pkg.files[f]
843
844         m = re_issource.match(f)
845         if not m:
846             return
847
848         entry["package"] = m.group(1)
849         entry["version"] = m.group(2)
850         entry["type"] = m.group(3)
851
852         # Ensure the source package name matches the Source filed in the .changes
853         if self.pkg.changes["source"] != entry["package"]:
854             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
855
856         # Ensure the source version matches the version in the .changes file
857         if re_is_orig_source.match(f):
858             changes_version = self.pkg.changes["chopversion2"]
859         else:
860             changes_version = self.pkg.changes["chopversion"]
861
862         if changes_version != entry["version"]:
863             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
864
865         # Ensure the .changes lists source in the Architecture field
866         if not self.pkg.changes["architecture"].has_key("source"):
867             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
868
869         # Check the signature of a .dsc file
870         if entry["type"] == "dsc":
871             # check_signature returns either:
872             #  (None, [list, of, rejects]) or (signature, [])
873             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
874             for j in rejects:
875                 self.rejects.append(j)
876
877         entry["architecture"] = "source"
878
879     def per_suite_file_checks(self, f, suite, session):
880         cnf = Config()
881         entry = self.pkg.files[f]
882
883         # Skip byhand
884         if entry.has_key("byhand"):
885             return
886
887         # Check we have fields we need to do these checks
888         oktogo = True
889         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
890             if not entry.has_key(m):
891                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
892                 oktogo = False
893
894         if not oktogo:
895             return
896
897         # Handle component mappings
898         for m in cnf.ValueList("ComponentMappings"):
899             (source, dest) = m.split()
900             if entry["component"] == source:
901                 entry["original component"] = source
902                 entry["component"] = dest
903
904         # Ensure the component is valid for the target suite
905         if cnf.has_key("Suite:%s::Components" % (suite)) and \
906            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
907             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
908             return
909
910         # Validate the component
911         if not get_component(entry["component"], session):
912             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
913             return
914
915         # See if the package is NEW
916         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
917             entry["new"] = 1
918
919         # Validate the priority
920         if entry["priority"].find('/') != -1:
921             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
922
923         # Determine the location
924         location = cnf["Dir::Pool"]
925         l = get_location(location, entry["component"], session=session)
926         if l is None:
927             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
928             entry["location id"] = -1
929         else:
930             entry["location id"] = l.location_id
931
932         # Check the md5sum & size against existing files (if any)
933         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
934
935         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
936                                          entry["size"], entry["md5sum"], entry["location id"])
937
938         if found is None:
939             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
940         elif found is False and poolfile is not None:
941             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
942         else:
943             if poolfile is None:
944                 entry["files id"] = None
945             else:
946                 entry["files id"] = poolfile.file_id
947
948         # Check for packages that have moved from one component to another
949         entry['suite'] = suite
950         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
951         if res.rowcount > 0:
952             entry["othercomponents"] = res.fetchone()[0]
953
954     def check_files(self, action=True):
955         file_keys = self.pkg.files.keys()
956         holding = Holding()
957         cnf = Config()
958
959         if action:
960             cwd = os.getcwd()
961             os.chdir(self.pkg.directory)
962             for f in file_keys:
963                 ret = holding.copy_to_holding(f)
964                 if ret is not None:
965                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
966
967             os.chdir(cwd)
968
969         # check we already know the changes file
970         # [NB: this check must be done post-suite mapping]
971         base_filename = os.path.basename(self.pkg.changes_file)
972
973         session = DBConn().session()
974
975         try:
976             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
977             # if in the pool or in a queue other than unchecked, reject
978             if (dbc.in_queue is None) \
979                    or (dbc.in_queue is not None
980                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
981                 self.rejects.append("%s file already known to dak" % base_filename)
982         except NoResultFound, e:
983             # not known, good
984             pass
985
986         has_binaries = False
987         has_source = False
988
989         for f, entry in self.pkg.files.items():
990             # Ensure the file does not already exist in one of the accepted directories
991             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
992                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
993                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
994                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
995
996             if not re_taint_free.match(f):
997                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
998
999             # Check the file is readable
1000             if os.access(f, os.R_OK) == 0:
1001                 # When running in -n, copy_to_holding() won't have
1002                 # generated the reject_message, so we need to.
1003                 if action:
1004                     if os.path.exists(f):
1005                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1006                     else:
1007                         # Don't directly reject, mark to check later to deal with orig's
1008                         # we can find in the pool
1009                         self.later_check_files.append(f)
1010                 entry["type"] = "unreadable"
1011                 continue
1012
1013             # If it's byhand skip remaining checks
1014             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1015                 entry["byhand"] = 1
1016                 entry["type"] = "byhand"
1017
1018             # Checks for a binary package...
1019             elif re_isadeb.match(f):
1020                 has_binaries = True
1021                 entry["type"] = "deb"
1022
1023                 # This routine appends to self.rejects/warnings as appropriate
1024                 self.binary_file_checks(f, session)
1025
1026             # Checks for a source package...
1027             elif re_issource.match(f):
1028                 has_source = True
1029
1030                 # This routine appends to self.rejects/warnings as appropriate
1031                 self.source_file_checks(f, session)
1032
1033             # Not a binary or source package?  Assume byhand...
1034             else:
1035                 entry["byhand"] = 1
1036                 entry["type"] = "byhand"
1037
1038             # Per-suite file checks
1039             entry["oldfiles"] = {}
1040             for suite in self.pkg.changes["distribution"].keys():
1041                 self.per_suite_file_checks(f, suite, session)
1042
1043         session.close()
1044
1045         # If the .changes file says it has source, it must have source.
1046         if self.pkg.changes["architecture"].has_key("source"):
1047             if not has_source:
1048                 self.rejects.append("no source found and Architecture line in changes mention source.")
1049
1050             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1051                 self.rejects.append("source only uploads are not supported.")
1052
1053     ###########################################################################
1054     def check_dsc(self, action=True, session=None):
1055         """Returns bool indicating whether or not the source changes are valid"""
1056         # Ensure there is source to check
1057         if not self.pkg.changes["architecture"].has_key("source"):
1058             return True
1059
1060         # Find the .dsc
1061         dsc_filename = None
1062         for f, entry in self.pkg.files.items():
1063             if entry["type"] == "dsc":
1064                 if dsc_filename:
1065                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1066                     return False
1067                 else:
1068                     dsc_filename = f
1069
1070         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1071         if not dsc_filename:
1072             self.rejects.append("source uploads must contain a dsc file")
1073             return False
1074
1075         # Parse the .dsc file
1076         try:
1077             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1078         except CantOpenError:
1079             # if not -n copy_to_holding() will have done this for us...
1080             if not action:
1081                 self.rejects.append("%s: can't read file." % (dsc_filename))
1082         except ParseChangesError, line:
1083             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1084         except InvalidDscError, line:
1085             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1086         except ChangesUnicodeError:
1087             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1088
1089         # Build up the file list of files mentioned by the .dsc
1090         try:
1091             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1092         except NoFilesFieldError:
1093             self.rejects.append("%s: no Files: field." % (dsc_filename))
1094             return False
1095         except UnknownFormatError, format:
1096             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1097             return False
1098         except ParseChangesError, line:
1099             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1100             return False
1101
1102         # Enforce mandatory fields
1103         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1104             if not self.pkg.dsc.has_key(i):
1105                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1106                 return False
1107
1108         # Validate the source and version fields
1109         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1110             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1111         if not re_valid_version.match(self.pkg.dsc["version"]):
1112             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1113
1114         # Only a limited list of source formats are allowed in each suite
1115         for dist in self.pkg.changes["distribution"].keys():
1116             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1117             if self.pkg.dsc["format"] not in allowed:
1118                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1119
1120         # Validate the Maintainer field
1121         try:
1122             # We ignore the return value
1123             fix_maintainer(self.pkg.dsc["maintainer"])
1124         except ParseMaintError, msg:
1125             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1126                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1127
1128         # Validate the build-depends field(s)
1129         for field_name in [ "build-depends", "build-depends-indep" ]:
1130             field = self.pkg.dsc.get(field_name)
1131             if field:
1132                 # Have apt try to parse them...
1133                 try:
1134                     apt_pkg.ParseSrcDepends(field)
1135                 except:
1136                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1137
1138         # Ensure the version number in the .dsc matches the version number in the .changes
1139         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1140         changes_version = self.pkg.files[dsc_filename]["version"]
1141
1142         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1143             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1144
1145         # Ensure the Files field contain only what's expected
1146         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1147
1148         # Ensure source is newer than existing source in target suites
1149         session = DBConn().session()
1150         self.check_source_against_db(dsc_filename, session)
1151         self.check_dsc_against_db(dsc_filename, session)
1152
1153         dbchg = get_dbchange(self.pkg.changes_file, session)
1154
1155         # Finally, check if we're missing any files
1156         for f in self.later_check_files:
1157             print 'XXX: %s' % f
1158             # Check if we've already processed this file if we have a dbchg object
1159             ok = False
1160             if dbchg:
1161                 for pf in dbchg.files:
1162                     if pf.filename == f and pf.processed:
1163                         self.notes.append('%s was already processed so we can go ahead' % f)
1164                         ok = True
1165                         del self.pkg.files[f]
1166             if not ok:
1167                 self.rejects.append("Could not find file %s references in changes" % f)
1168
1169         session.close()
1170
1171         return True
1172
1173     ###########################################################################
1174
1175     def get_changelog_versions(self, source_dir):
1176         """Extracts a the source package and (optionally) grabs the
1177         version history out of debian/changelog for the BTS."""
1178
1179         cnf = Config()
1180
1181         # Find the .dsc (again)
1182         dsc_filename = None
1183         for f in self.pkg.files.keys():
1184             if self.pkg.files[f]["type"] == "dsc":
1185                 dsc_filename = f
1186
1187         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1188         if not dsc_filename:
1189             return
1190
1191         # Create a symlink mirror of the source files in our temporary directory
1192         for f in self.pkg.files.keys():
1193             m = re_issource.match(f)
1194             if m:
1195                 src = os.path.join(source_dir, f)
1196                 # If a file is missing for whatever reason, give up.
1197                 if not os.path.exists(src):
1198                     return
1199                 ftype = m.group(3)
1200                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1201                    self.pkg.orig_files[f].has_key("path"):
1202                     continue
1203                 dest = os.path.join(os.getcwd(), f)
1204                 os.symlink(src, dest)
1205
1206         # If the orig files are not a part of the upload, create symlinks to the
1207         # existing copies.
1208         for orig_file in self.pkg.orig_files.keys():
1209             if not self.pkg.orig_files[orig_file].has_key("path"):
1210                 continue
1211             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1212             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1213
1214         # Extract the source
1215         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1216         (result, output) = commands.getstatusoutput(cmd)
1217         if (result != 0):
1218             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1219             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1220             return
1221
1222         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1223             return
1224
1225         # Get the upstream version
1226         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1227         if re_strip_revision.search(upstr_version):
1228             upstr_version = re_strip_revision.sub('', upstr_version)
1229
1230         # Ensure the changelog file exists
1231         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1232         if not os.path.exists(changelog_filename):
1233             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1234             return
1235
1236         # Parse the changelog
1237         self.pkg.dsc["bts changelog"] = ""
1238         changelog_file = utils.open_file(changelog_filename)
1239         for line in changelog_file.readlines():
1240             m = re_changelog_versions.match(line)
1241             if m:
1242                 self.pkg.dsc["bts changelog"] += line
1243         changelog_file.close()
1244
1245         # Check we found at least one revision in the changelog
1246         if not self.pkg.dsc["bts changelog"]:
1247             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1248
1249     def check_source(self):
1250         # Bail out if:
1251         #    a) there's no source
1252         if not self.pkg.changes["architecture"].has_key("source"):
1253             return
1254
1255         tmpdir = utils.temp_dirname()
1256
1257         # Move into the temporary directory
1258         cwd = os.getcwd()
1259         os.chdir(tmpdir)
1260
1261         # Get the changelog version history
1262         self.get_changelog_versions(cwd)
1263
1264         # Move back and cleanup the temporary tree
1265         os.chdir(cwd)
1266
1267         try:
1268             shutil.rmtree(tmpdir)
1269         except OSError, e:
1270             if e.errno != errno.EACCES:
1271                 print "foobar"
1272                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1273
1274             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1275             # We probably have u-r or u-w directories so chmod everything
1276             # and try again.
1277             cmd = "chmod -R u+rwx %s" % (tmpdir)
1278             result = os.system(cmd)
1279             if result != 0:
1280                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1281             shutil.rmtree(tmpdir)
1282         except Exception, e:
1283             print "foobar2 (%s)" % e
1284             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1285
1286     ###########################################################################
1287     def ensure_hashes(self):
1288         # Make sure we recognise the format of the Files: field in the .changes
1289         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1290         if len(format) == 2:
1291             format = int(format[0]), int(format[1])
1292         else:
1293             format = int(float(format[0])), 0
1294
1295         # We need to deal with the original changes blob, as the fields we need
1296         # might not be in the changes dict serialised into the .dak anymore.
1297         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1298
1299         # Copy the checksums over to the current changes dict.  This will keep
1300         # the existing modifications to it intact.
1301         for field in orig_changes:
1302             if field.startswith('checksums-'):
1303                 self.pkg.changes[field] = orig_changes[field]
1304
1305         # Check for unsupported hashes
1306         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1307             self.rejects.append(j)
1308
1309         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1310             self.rejects.append(j)
1311
1312         # We have to calculate the hash if we have an earlier changes version than
1313         # the hash appears in rather than require it exist in the changes file
1314         for hashname, hashfunc, version in utils.known_hashes:
1315             # TODO: Move _ensure_changes_hash into this class
1316             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1317                 self.rejects.append(j)
1318             if "source" in self.pkg.changes["architecture"]:
1319                 # TODO: Move _ensure_dsc_hash into this class
1320                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1321                     self.rejects.append(j)
1322
1323     def check_hashes(self):
1324         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1325             self.rejects.append(m)
1326
1327         for m in utils.check_size(".changes", self.pkg.files):
1328             self.rejects.append(m)
1329
1330         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1331             self.rejects.append(m)
1332
1333         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1334             self.rejects.append(m)
1335
1336         self.ensure_hashes()
1337
1338     ###########################################################################
1339
1340     def ensure_orig(self, target_dir='.', session=None):
1341         """
1342         Ensures that all orig files mentioned in the changes file are present
1343         in target_dir. If they do not exist, they are symlinked into place.
1344
1345         An list containing the symlinks that were created are returned (so they
1346         can be removed).
1347         """
1348
1349         symlinked = []
1350         cnf = Config()
1351
1352         for filename, entry in self.pkg.dsc_files.iteritems():
1353             if not re_is_orig_source.match(filename):
1354                 # File is not an orig; ignore
1355                 continue
1356
1357             if os.path.exists(filename):
1358                 # File exists, no need to continue
1359                 continue
1360
1361             def symlink_if_valid(path):
1362                 f = utils.open_file(path)
1363                 md5sum = apt_pkg.md5sum(f)
1364                 f.close()
1365
1366                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1367                 expected = (int(entry['size']), entry['md5sum'])
1368
1369                 if fingerprint != expected:
1370                     return False
1371
1372                 dest = os.path.join(target_dir, filename)
1373
1374                 os.symlink(path, dest)
1375                 symlinked.append(dest)
1376
1377                 return True
1378
1379             session_ = session
1380             if session is None:
1381                 session_ = DBConn().session()
1382
1383             found = False
1384
1385             # Look in the pool
1386             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1387                 poolfile_path = os.path.join(
1388                     poolfile.location.path, poolfile.filename
1389                 )
1390
1391                 if symlink_if_valid(poolfile_path):
1392                     found = True
1393                     break
1394
1395             if session is None:
1396                 session_.close()
1397
1398             if found:
1399                 continue
1400
1401             # Look in some other queues for the file
1402             queues = ('New', 'Byhand', 'ProposedUpdates',
1403                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1404
1405             for queue in queues:
1406                 if not cnf.get('Dir::Queue::%s' % queue):
1407                     continue
1408
1409                 queuefile_path = os.path.join(
1410                     cnf['Dir::Queue::%s' % queue], filename
1411                 )
1412
1413                 if not os.path.exists(queuefile_path):
1414                     # Does not exist in this queue
1415                     continue
1416
1417                 if symlink_if_valid(queuefile_path):
1418                     break
1419
1420         return symlinked
1421
1422     ###########################################################################
1423
1424     def check_lintian(self):
1425         """
1426         Extends self.rejects by checking the output of lintian against tags
1427         specified in Dinstall::LintianTags.
1428         """
1429
1430         cnf = Config()
1431
1432         # Don't reject binary uploads
1433         if not self.pkg.changes['architecture'].has_key('source'):
1434             return
1435
1436         # Only check some distributions
1437         for dist in ('unstable', 'experimental'):
1438             if dist in self.pkg.changes['distribution']:
1439                 break
1440         else:
1441             return
1442
1443         # If we do not have a tagfile, don't do anything
1444         tagfile = cnf.get("Dinstall::LintianTags")
1445         if tagfile is None:
1446             return
1447
1448         # Parse the yaml file
1449         sourcefile = file(tagfile, 'r')
1450         sourcecontent = sourcefile.read()
1451         sourcefile.close()
1452
1453         try:
1454             lintiantags = yaml.load(sourcecontent)['lintian']
1455         except yaml.YAMLError, msg:
1456             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1457             return
1458
1459         # Try and find all orig mentioned in the .dsc
1460         symlinked = self.ensure_orig()
1461
1462         # Setup the input file for lintian
1463         fd, temp_filename = utils.temp_filename()
1464         temptagfile = os.fdopen(fd, 'w')
1465         for tags in lintiantags.values():
1466             temptagfile.writelines(['%s\n' % x for x in tags])
1467         temptagfile.close()
1468
1469         try:
1470             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1471                 (temp_filename, self.pkg.changes_file)
1472
1473             result, output = commands.getstatusoutput(cmd)
1474         finally:
1475             # Remove our tempfile and any symlinks we created
1476             os.unlink(temp_filename)
1477
1478             for symlink in symlinked:
1479                 os.unlink(symlink)
1480
1481         if result == 2:
1482             utils.warn("lintian failed for %s [return code: %s]." % \
1483                 (self.pkg.changes_file, result))
1484             utils.warn(utils.prefix_multi_line_string(output, \
1485                 " [possible output:] "))
1486
1487         def log(*txt):
1488             if self.logger:
1489                 self.logger.log(
1490                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1491                 )
1492
1493         # Generate messages
1494         parsed_tags = parse_lintian_output(output)
1495         self.rejects.extend(
1496             generate_reject_messages(parsed_tags, lintiantags, log=log)
1497         )
1498
1499     ###########################################################################
1500     def check_urgency(self):
1501         cnf = Config()
1502         if self.pkg.changes["architecture"].has_key("source"):
1503             if not self.pkg.changes.has_key("urgency"):
1504                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1505             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1506             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1507                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1508                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1509                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1510
1511     ###########################################################################
1512
1513     # Sanity check the time stamps of files inside debs.
1514     # [Files in the near future cause ugly warnings and extreme time
1515     #  travel can cause errors on extraction]
1516
1517     def check_timestamps(self):
1518         Cnf = Config()
1519
1520         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1521         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1522         tar = TarTime(future_cutoff, past_cutoff)
1523
1524         for filename, entry in self.pkg.files.items():
1525             if entry["type"] == "deb":
1526                 tar.reset()
1527                 try:
1528                     deb_file = utils.open_file(filename)
1529                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1530                     deb_file.seek(0)
1531                     try:
1532                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1533                     except SystemError, e:
1534                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1535                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1536                             raise
1537                         deb_file.seek(0)
1538                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1539
1540                     deb_file.close()
1541
1542                     future_files = tar.future_files.keys()
1543                     if future_files:
1544                         num_future_files = len(future_files)
1545                         future_file = future_files[0]
1546                         future_date = tar.future_files[future_file]
1547                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1548                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1549
1550                     ancient_files = tar.ancient_files.keys()
1551                     if ancient_files:
1552                         num_ancient_files = len(ancient_files)
1553                         ancient_file = ancient_files[0]
1554                         ancient_date = tar.ancient_files[ancient_file]
1555                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1556                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1557                 except:
1558                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1559
1560     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1561         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1562             sponsored = False
1563         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1564             sponsored = False
1565             if uid_name == "":
1566                 sponsored = True
1567         else:
1568             sponsored = True
1569             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1570                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1571                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1572                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1573                         self.pkg.changes["sponsoremail"] = uid_email
1574
1575         return sponsored
1576
1577
1578     ###########################################################################
1579     # check_signed_by_key checks
1580     ###########################################################################
1581
1582     def check_signed_by_key(self):
1583         """Ensure the .changes is signed by an authorized uploader."""
1584         session = DBConn().session()
1585
1586         # First of all we check that the person has proper upload permissions
1587         # and that this upload isn't blocked
1588         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1589
1590         if fpr is None:
1591             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1592             return
1593
1594         # TODO: Check that import-keyring adds UIDs properly
1595         if not fpr.uid:
1596             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1597             return
1598
1599         # Check that the fingerprint which uploaded has permission to do so
1600         self.check_upload_permissions(fpr, session)
1601
1602         # Check that this package is not in a transition
1603         self.check_transition(session)
1604
1605         session.close()
1606
1607
1608     def check_upload_permissions(self, fpr, session):
1609         # Check any one-off upload blocks
1610         self.check_upload_blocks(fpr, session)
1611
1612         # Start with DM as a special case
1613         # DM is a special case unfortunately, so we check it first
1614         # (keys with no source access get more access than DMs in one
1615         #  way; DMs can only upload for their packages whether source
1616         #  or binary, whereas keys with no access might be able to
1617         #  upload some binaries)
1618         if fpr.source_acl.access_level == 'dm':
1619             self.check_dm_upload(fpr, session)
1620         else:
1621             # Check source-based permissions for other types
1622             if self.pkg.changes["architecture"].has_key("source") and \
1623                 fpr.source_acl.access_level is None:
1624                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1625                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1626                 self.rejects.append(rej)
1627                 return
1628             # If not a DM, we allow full upload rights
1629             uid_email = "%s@debian.org" % (fpr.uid.uid)
1630             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1631
1632
1633         # Check binary upload permissions
1634         # By this point we know that DMs can't have got here unless they
1635         # are allowed to deal with the package concerned so just apply
1636         # normal checks
1637         if fpr.binary_acl.access_level == 'full':
1638             return
1639
1640         # Otherwise we're in the map case
1641         tmparches = self.pkg.changes["architecture"].copy()
1642         tmparches.pop('source', None)
1643
1644         for bam in fpr.binary_acl_map:
1645             tmparches.pop(bam.architecture.arch_string, None)
1646
1647         if len(tmparches.keys()) > 0:
1648             if fpr.binary_reject:
1649                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1650                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1651                 self.rejects.append(rej)
1652             else:
1653                 # TODO: This is where we'll implement reject vs throw away binaries later
1654                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1655                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1656                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1657                 self.rejects.append(rej)
1658
1659
1660     def check_upload_blocks(self, fpr, session):
1661         """Check whether any upload blocks apply to this source, source
1662            version, uid / fpr combination"""
1663
1664         def block_rej_template(fb):
1665             rej = 'Manual upload block in place for package %s' % fb.source
1666             if fb.version is not None:
1667                 rej += ', version %s' % fb.version
1668             return rej
1669
1670         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1671             # version is None if the block applies to all versions
1672             if fb.version is None or fb.version == self.pkg.changes['version']:
1673                 # Check both fpr and uid - either is enough to cause a reject
1674                 if fb.fpr is not None:
1675                     if fb.fpr.fingerprint == fpr.fingerprint:
1676                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1677                 if fb.uid is not None:
1678                     if fb.uid == fpr.uid:
1679                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1680
1681
1682     def check_dm_upload(self, fpr, session):
1683         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1684         ## none of the uploaded packages are NEW
1685         rej = False
1686         for f in self.pkg.files.keys():
1687             if self.pkg.files[f].has_key("byhand"):
1688                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1689                 rej = True
1690             if self.pkg.files[f].has_key("new"):
1691                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1692                 rej = True
1693
1694         if rej:
1695             return
1696
1697         r = get_newest_source(self.pkg.changes["source"], session)
1698
1699         if r is None:
1700             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1701             self.rejects.append(rej)
1702             return
1703
1704         if not r.dm_upload_allowed:
1705             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1706             self.rejects.append(rej)
1707             return
1708
1709         ## the Maintainer: field of the uploaded .changes file corresponds with
1710         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1711         ## uploads)
1712         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1713             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1714
1715         ## the most recent version of the package uploaded to unstable or
1716         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1717         ## non-developer maintainers cannot NMU or hijack packages)
1718
1719         # srcuploaders includes the maintainer
1720         accept = False
1721         for sup in r.srcuploaders:
1722             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1723             # Eww - I hope we never have two people with the same name in Debian
1724             if email == fpr.uid.uid or name == fpr.uid.name:
1725                 accept = True
1726                 break
1727
1728         if not accept:
1729             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1730             return
1731
1732         ## none of the packages are being taken over from other source packages
1733         for b in self.pkg.changes["binary"].keys():
1734             for suite in self.pkg.changes["distribution"].keys():
1735                 q = session.query(DBSource)
1736                 q = q.join(DBBinary).filter_by(package=b)
1737                 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1738
1739                 for s in q.all():
1740                     if s.source != self.pkg.changes["source"]:
1741                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1742
1743
1744
1745     def check_transition(self, session):
1746         cnf = Config()
1747
1748         sourcepkg = self.pkg.changes["source"]
1749
1750         # No sourceful upload -> no need to do anything else, direct return
1751         # We also work with unstable uploads, not experimental or those going to some
1752         # proposed-updates queue
1753         if "source" not in self.pkg.changes["architecture"] or \
1754            "unstable" not in self.pkg.changes["distribution"]:
1755             return
1756
1757         # Also only check if there is a file defined (and existant) with
1758         # checks.
1759         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1760         if transpath == "" or not os.path.exists(transpath):
1761             return
1762
1763         # Parse the yaml file
1764         sourcefile = file(transpath, 'r')
1765         sourcecontent = sourcefile.read()
1766         try:
1767             transitions = yaml.load(sourcecontent)
1768         except yaml.YAMLError, msg:
1769             # This shouldn't happen, there is a wrapper to edit the file which
1770             # checks it, but we prefer to be safe than ending up rejecting
1771             # everything.
1772             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1773             return
1774
1775         # Now look through all defined transitions
1776         for trans in transitions:
1777             t = transitions[trans]
1778             source = t["source"]
1779             expected = t["new"]
1780
1781             # Will be None if nothing is in testing.
1782             current = get_source_in_suite(source, "testing", session)
1783             if current is not None:
1784                 compare = apt_pkg.VersionCompare(current.version, expected)
1785
1786             if current is None or compare < 0:
1787                 # This is still valid, the current version in testing is older than
1788                 # the new version we wait for, or there is none in testing yet
1789
1790                 # Check if the source we look at is affected by this.
1791                 if sourcepkg in t['packages']:
1792                     # The source is affected, lets reject it.
1793
1794                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1795                         sourcepkg, trans)
1796
1797                     if current is not None:
1798                         currentlymsg = "at version %s" % (current.version)
1799                     else:
1800                         currentlymsg = "not present in testing"
1801
1802                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1803
1804                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1805 is part of a testing transition designed to get %s migrated (it is
1806 currently %s, we need version %s).  This transition is managed by the
1807 Release Team, and %s is the Release-Team member responsible for it.
1808 Please mail debian-release@lists.debian.org or contact %s directly if you
1809 need further assistance.  You might want to upload to experimental until this
1810 transition is done."""
1811                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1812
1813                     self.rejects.append(rejectmsg)
1814                     return
1815
1816     ###########################################################################
1817     # End check_signed_by_key checks
1818     ###########################################################################
1819
1820     def build_summaries(self):
1821         """ Build a summary of changes the upload introduces. """
1822
1823         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1824
1825         short_summary = summary
1826
1827         # This is for direport's benefit...
1828         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1829
1830         if byhand or new:
1831             summary += "Changes: " + f
1832
1833         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1834
1835         summary += self.announce(short_summary, 0)
1836
1837         return (summary, short_summary)
1838
1839     ###########################################################################
1840
1841     def close_bugs(self, summary, action):
1842         """
1843         Send mail to close bugs as instructed by the closes field in the changes file.
1844         Also add a line to summary if any work was done.
1845
1846         @type summary: string
1847         @param summary: summary text, as given by L{build_summaries}
1848
1849         @type action: bool
1850         @param action: Set to false no real action will be done.
1851
1852         @rtype: string
1853         @return: summary. If action was taken, extended by the list of closed bugs.
1854
1855         """
1856
1857         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1858
1859         bugs = self.pkg.changes["closes"].keys()
1860
1861         if not bugs:
1862             return summary
1863
1864         bugs.sort()
1865         summary += "Closing bugs: "
1866         for bug in bugs:
1867             summary += "%s " % (bug)
1868             if action:
1869                 self.update_subst()
1870                 self.Subst["__BUG_NUMBER__"] = bug
1871                 if self.pkg.changes["distribution"].has_key("stable"):
1872                     self.Subst["__STABLE_WARNING__"] = """
1873 Note that this package is not part of the released stable Debian
1874 distribution.  It may have dependencies on other unreleased software,
1875 or other instabilities.  Please take care if you wish to install it.
1876 The update will eventually make its way into the next released Debian
1877 distribution."""
1878                 else:
1879                     self.Subst["__STABLE_WARNING__"] = ""
1880                 mail_message = utils.TemplateSubst(self.Subst, template)
1881                 utils.send_mail(mail_message)
1882
1883                 # Clear up after ourselves
1884                 del self.Subst["__BUG_NUMBER__"]
1885                 del self.Subst["__STABLE_WARNING__"]
1886
1887         if action and self.logger:
1888             self.logger.log(["closing bugs"] + bugs)
1889
1890         summary += "\n"
1891
1892         return summary
1893
1894     ###########################################################################
1895
1896     def announce(self, short_summary, action):
1897         """
1898         Send an announce mail about a new upload.
1899
1900         @type short_summary: string
1901         @param short_summary: Short summary text to include in the mail
1902
1903         @type action: bool
1904         @param action: Set to false no real action will be done.
1905
1906         @rtype: string
1907         @return: Textstring about action taken.
1908
1909         """
1910
1911         cnf = Config()
1912         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1913
1914         # Only do announcements for source uploads with a recent dpkg-dev installed
1915         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1916            self.pkg.changes["architecture"].has_key("source"):
1917             return ""
1918
1919         lists_done = {}
1920         summary = ""
1921
1922         self.Subst["__SHORT_SUMMARY__"] = short_summary
1923
1924         for dist in self.pkg.changes["distribution"].keys():
1925             suite = get_suite(dist)
1926             if suite is None: continue
1927             announce_list = suite.announce
1928             if announce_list == "" or lists_done.has_key(announce_list):
1929                 continue
1930
1931             lists_done[announce_list] = 1
1932             summary += "Announcing to %s\n" % (announce_list)
1933
1934             if action:
1935                 self.update_subst()
1936                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1937                 if cnf.get("Dinstall::TrackingServer") and \
1938                    self.pkg.changes["architecture"].has_key("source"):
1939                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1940                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1941
1942                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1943                 utils.send_mail(mail_message)
1944
1945                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1946
1947         if cnf.FindB("Dinstall::CloseBugs"):
1948             summary = self.close_bugs(summary, action)
1949
1950         del self.Subst["__SHORT_SUMMARY__"]
1951
1952         return summary
1953
1954     ###########################################################################
1955     @session_wrapper
1956     def accept (self, summary, short_summary, session=None):
1957         """
1958         Accept an upload.
1959
1960         This moves all files referenced from the .changes into the pool,
1961         sends the accepted mail, announces to lists, closes bugs and
1962         also checks for override disparities. If enabled it will write out
1963         the version history for the BTS Version Tracking and will finally call
1964         L{queue_build}.
1965
1966         @type summary: string
1967         @param summary: Summary text
1968
1969         @type short_summary: string
1970         @param short_summary: Short summary
1971         """
1972
1973         cnf = Config()
1974         stats = SummaryStats()
1975
1976         print "Installing."
1977         self.logger.log(["installing changes", self.pkg.changes_file])
1978
1979         poolfiles = []
1980
1981         # Add the .dsc file to the DB first
1982         for newfile, entry in self.pkg.files.items():
1983             if entry["type"] == "dsc":
1984                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1985                 for j in pfs:
1986                     poolfiles.append(j)
1987
1988         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1989         for newfile, entry in self.pkg.files.items():
1990             if entry["type"] == "deb":
1991                 poolfiles.append(add_deb_to_db(self, newfile, session))
1992
1993         # If this is a sourceful diff only upload that is moving
1994         # cross-component we need to copy the .orig files into the new
1995         # component too for the same reasons as above.
1996         # XXX: mhy: I think this should be in add_dsc_to_db
1997         if self.pkg.changes["architecture"].has_key("source"):
1998             for orig_file in self.pkg.orig_files.keys():
1999                 if not self.pkg.orig_files[orig_file].has_key("id"):
2000                     continue # Skip if it's not in the pool
2001                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2002                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2003                     continue # Skip if the location didn't change
2004
2005                 # Do the move
2006                 oldf = get_poolfile_by_id(orig_file_id, session)
2007                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2008                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2009                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2010
2011                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2012
2013                 # TODO: Care about size/md5sum collisions etc
2014                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2015
2016                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2017                 if newf is None:
2018                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2019                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2020
2021                     session.flush()
2022
2023                     # Don't reference the old file from this changes
2024                     for p in poolfiles:
2025                         if p.file_id == oldf.file_id:
2026                             poolfiles.remove(p)
2027
2028                     poolfiles.append(newf)
2029
2030                     # Fix up the DSC references
2031                     toremove = []
2032
2033                     for df in source.srcfiles:
2034                         if df.poolfile.file_id == oldf.file_id:
2035                             # Add a new DSC entry and mark the old one for deletion
2036                             # Don't do it in the loop so we don't change the thing we're iterating over
2037                             newdscf = DSCFile()
2038                             newdscf.source_id = source.source_id
2039                             newdscf.poolfile_id = newf.file_id
2040                             session.add(newdscf)
2041
2042                             toremove.append(df)
2043
2044                     for df in toremove:
2045                         session.delete(df)
2046
2047                     # Flush our changes
2048                     session.flush()
2049
2050                     # Make sure that our source object is up-to-date
2051                     session.expire(source)
2052
2053         # Add changelog information to the database
2054         self.store_changelog()
2055
2056         # Install the files into the pool
2057         for newfile, entry in self.pkg.files.items():
2058             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2059             utils.move(newfile, destination)
2060             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2061             stats.accept_bytes += float(entry["size"])
2062
2063         # Copy the .changes file across for suite which need it.
2064         copy_changes = dict([(x.copychanges, '')
2065                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2066                              if x.copychanges is not None])
2067
2068         for dest in copy_changes.keys():
2069             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2070
2071         # We're done - commit the database changes
2072         session.commit()
2073         # Our SQL session will automatically start a new transaction after
2074         # the last commit
2075
2076         # Move the .changes into the 'done' directory
2077         utils.move(self.pkg.changes_file,
2078                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2079
2080         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2081             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2082
2083         self.update_subst()
2084         self.Subst["__SUMMARY__"] = summary
2085         mail_message = utils.TemplateSubst(self.Subst,
2086                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2087         utils.send_mail(mail_message)
2088         self.announce(short_summary, 1)
2089
2090         ## Helper stuff for DebBugs Version Tracking
2091         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2092             if self.pkg.changes["architecture"].has_key("source"):
2093                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2094                 version_history = os.fdopen(fd, 'w')
2095                 version_history.write(self.pkg.dsc["bts changelog"])
2096                 version_history.close()
2097                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2098                                       self.pkg.changes_file[:-8]+".versions")
2099                 os.rename(temp_filename, filename)
2100                 os.chmod(filename, 0644)
2101
2102             # Write out the binary -> source mapping.
2103             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2104             debinfo = os.fdopen(fd, 'w')
2105             for name, entry in sorted(self.pkg.files.items()):
2106                 if entry["type"] == "deb":
2107                     line = " ".join([entry["package"], entry["version"],
2108                                      entry["architecture"], entry["source package"],
2109                                      entry["source version"]])
2110                     debinfo.write(line+"\n")
2111             debinfo.close()
2112             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2113                                   self.pkg.changes_file[:-8]+".debinfo")
2114             os.rename(temp_filename, filename)
2115             os.chmod(filename, 0644)
2116
2117         session.commit()
2118
2119         # Set up our copy queues (e.g. buildd queues)
2120         for suite_name in self.pkg.changes["distribution"].keys():
2121             suite = get_suite(suite_name, session)
2122             for q in suite.copy_queues:
2123                 for f in poolfiles:
2124                     q.add_file_from_pool(f)
2125
2126         session.commit()
2127
2128         # Finally...
2129         stats.accept_count += 1
2130
2131     def check_override(self):
2132         """
2133         Checks override entries for validity. Mails "Override disparity" warnings,
2134         if that feature is enabled.
2135
2136         Abandons the check if
2137           - override disparity checks are disabled
2138           - mail sending is disabled
2139         """
2140
2141         cnf = Config()
2142
2143         # Abandon the check if override disparity checks have been disabled
2144         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2145             return
2146
2147         summary = self.pkg.check_override()
2148
2149         if summary == "":
2150             return
2151
2152         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2153
2154         self.update_subst()
2155         self.Subst["__SUMMARY__"] = summary
2156         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2157         utils.send_mail(mail_message)
2158         del self.Subst["__SUMMARY__"]
2159
2160     ###########################################################################
2161
2162     def remove(self, from_dir=None):
2163         """
2164         Used (for instance) in p-u to remove the package from unchecked
2165
2166         Also removes the package from holding area.
2167         """
2168         if from_dir is None:
2169             from_dir = self.pkg.directory
2170         h = Holding()
2171
2172         for f in self.pkg.files.keys():
2173             os.unlink(os.path.join(from_dir, f))
2174             if os.path.exists(os.path.join(h.holding_dir, f)):
2175                 os.unlink(os.path.join(h.holding_dir, f))
2176
2177         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2178         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2179             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2180
2181     ###########################################################################
2182
2183     def move_to_queue (self, queue):
2184         """
2185         Move files to a destination queue using the permissions in the table
2186         """
2187         h = Holding()
2188         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2189                    queue.path, perms=int(queue.change_perms, 8))
2190         for f in self.pkg.files.keys():
2191             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2192
2193     ###########################################################################
2194
2195     def force_reject(self, reject_files):
2196         """
2197         Forcefully move files from the current directory to the
2198         reject directory.  If any file already exists in the reject
2199         directory it will be moved to the morgue to make way for
2200         the new file.
2201
2202         @type reject_files: dict
2203         @param reject_files: file dictionary
2204
2205         """
2206
2207         cnf = Config()
2208
2209         for file_entry in reject_files:
2210             # Skip any files which don't exist or which we don't have permission to copy.
2211             if os.access(file_entry, os.R_OK) == 0:
2212                 continue
2213
2214             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2215
2216             try:
2217                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2218             except OSError, e:
2219                 # File exists?  Let's find a new name by adding a number
2220                 if e.errno == errno.EEXIST:
2221                     try:
2222                         dest_file = utils.find_next_free(dest_file, 255)
2223                     except NoFreeFilenameError:
2224                         # Something's either gone badly Pete Tong, or
2225                         # someone is trying to exploit us.
2226                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2227                         return
2228
2229                     # Make sure we really got it
2230                     try:
2231                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2232                     except OSError, e:
2233                         # Likewise
2234                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2235                         return
2236                 else:
2237                     raise
2238             # If we got here, we own the destination file, so we can
2239             # safely overwrite it.
2240             utils.move(file_entry, dest_file, 1, perms=0660)
2241             os.close(dest_fd)
2242
2243     ###########################################################################
2244     def do_reject (self, manual=0, reject_message="", notes=""):
2245         """
2246         Reject an upload. If called without a reject message or C{manual} is
2247         true, spawn an editor so the user can write one.
2248
2249         @type manual: bool
2250         @param manual: manual or automated rejection
2251
2252         @type reject_message: string
2253         @param reject_message: A reject message
2254
2255         @return: 0
2256
2257         """
2258         # If we weren't given a manual rejection message, spawn an
2259         # editor so the user can add one in...
2260         if manual and not reject_message:
2261             (fd, temp_filename) = utils.temp_filename()
2262             temp_file = os.fdopen(fd, 'w')
2263             if len(notes) > 0:
2264                 for note in notes:
2265                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2266                                     % (note.author, note.version, note.notedate, note.comment))
2267             temp_file.close()
2268             editor = os.environ.get("EDITOR","vi")
2269             answer = 'E'
2270             while answer == 'E':
2271                 os.system("%s %s" % (editor, temp_filename))
2272                 temp_fh = utils.open_file(temp_filename)
2273                 reject_message = "".join(temp_fh.readlines())
2274                 temp_fh.close()
2275                 print "Reject message:"
2276                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2277                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2278                 answer = "XXX"
2279                 while prompt.find(answer) == -1:
2280                     answer = utils.our_raw_input(prompt)
2281                     m = re_default_answer.search(prompt)
2282                     if answer == "":
2283                         answer = m.group(1)
2284                     answer = answer[:1].upper()
2285             os.unlink(temp_filename)
2286             if answer == 'A':
2287                 return 1
2288             elif answer == 'Q':
2289                 sys.exit(0)
2290
2291         print "Rejecting.\n"
2292
2293         cnf = Config()
2294
2295         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2296         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2297
2298         # Move all the files into the reject directory
2299         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2300         self.force_reject(reject_files)
2301
2302         # If we fail here someone is probably trying to exploit the race
2303         # so let's just raise an exception ...
2304         if os.path.exists(reason_filename):
2305             os.unlink(reason_filename)
2306         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2307
2308         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2309
2310         self.update_subst()
2311         if not manual:
2312             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2313             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2314             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2315             os.write(reason_fd, reject_message)
2316             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2317         else:
2318             # Build up the rejection email
2319             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2320             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2321             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2322             self.Subst["__REJECT_MESSAGE__"] = ""
2323             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2324             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2325             # Write the rejection email out as the <foo>.reason file
2326             os.write(reason_fd, reject_mail_message)
2327
2328         del self.Subst["__REJECTOR_ADDRESS__"]
2329         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2330         del self.Subst["__CC__"]
2331
2332         os.close(reason_fd)
2333
2334         # Send the rejection mail
2335         utils.send_mail(reject_mail_message)
2336
2337         if self.logger:
2338             self.logger.log(["rejected", self.pkg.changes_file])
2339
2340         return 0
2341
2342     ################################################################################
2343     def in_override_p(self, package, component, suite, binary_type, filename, session):
2344         """
2345         Check if a package already has override entries in the DB
2346
2347         @type package: string
2348         @param package: package name
2349
2350         @type component: string
2351         @param component: database id of the component
2352
2353         @type suite: int
2354         @param suite: database id of the suite
2355
2356         @type binary_type: string
2357         @param binary_type: type of the package
2358
2359         @type filename: string
2360         @param filename: filename we check
2361
2362         @return: the database result. But noone cares anyway.
2363
2364         """
2365
2366         cnf = Config()
2367
2368         if binary_type == "": # must be source
2369             file_type = "dsc"
2370         else:
2371             file_type = binary_type
2372
2373         # Override suite name; used for example with proposed-updates
2374         oldsuite = get_suite(suite, session)
2375         if (not oldsuite is None) and oldsuite.overridesuite:
2376             suite = oldsuite.overridesuite
2377
2378         result = get_override(package, suite, component, file_type, session)
2379
2380         # If checking for a source package fall back on the binary override type
2381         if file_type == "dsc" and len(result) < 1:
2382             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2383
2384         # Remember the section and priority so we can check them later if appropriate
2385         if len(result) > 0:
2386             result = result[0]
2387             self.pkg.files[filename]["override section"] = result.section.section
2388             self.pkg.files[filename]["override priority"] = result.priority.priority
2389             return result
2390
2391         return None
2392
2393     ################################################################################
2394     def get_anyversion(self, sv_list, suite):
2395         """
2396         @type sv_list: list
2397         @param sv_list: list of (suite, version) tuples to check
2398
2399         @type suite: string
2400         @param suite: suite name
2401
2402         Description: TODO
2403         """
2404         Cnf = Config()
2405         anyversion = None
2406         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2407         for (s, v) in sv_list:
2408             if s in [ x.lower() for x in anysuite ]:
2409                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2410                     anyversion = v
2411
2412         return anyversion
2413
2414     ################################################################################
2415
2416     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2417         """
2418         @type sv_list: list
2419         @param sv_list: list of (suite, version) tuples to check
2420
2421         @type filename: string
2422         @param filename: XXX
2423
2424         @type new_version: string
2425         @param new_version: XXX
2426
2427         Ensure versions are newer than existing packages in target
2428         suites and that cross-suite version checking rules as
2429         set out in the conf file are satisfied.
2430         """
2431
2432         cnf = Config()
2433
2434         # Check versions for each target suite
2435         for target_suite in self.pkg.changes["distribution"].keys():
2436             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2437             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2438
2439             # Enforce "must be newer than target suite" even if conffile omits it
2440             if target_suite not in must_be_newer_than:
2441                 must_be_newer_than.append(target_suite)
2442
2443             for (suite, existent_version) in sv_list:
2444                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2445
2446                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2447                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2448
2449                 if suite in must_be_older_than and vercmp > -1:
2450                     cansave = 0
2451
2452                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2453                         # we really use the other suite, ignoring the conflicting one ...
2454                         addsuite = self.pkg.changes["distribution-version"][suite]
2455
2456                         add_version = self.get_anyversion(sv_list, addsuite)
2457                         target_version = self.get_anyversion(sv_list, target_suite)
2458
2459                         if not add_version:
2460                             # not add_version can only happen if we map to a suite
2461                             # that doesn't enhance the suite we're propup'ing from.
2462                             # so "propup-ver x a b c; map a d" is a problem only if
2463                             # d doesn't enhance a.
2464                             #
2465                             # i think we could always propagate in this case, rather
2466                             # than complaining. either way, this isn't a REJECT issue
2467                             #
2468                             # And - we really should complain to the dorks who configured dak
2469                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2470                             self.pkg.changes.setdefault("propdistribution", {})
2471                             self.pkg.changes["propdistribution"][addsuite] = 1
2472                             cansave = 1
2473                         elif not target_version:
2474                             # not targets_version is true when the package is NEW
2475                             # we could just stick with the "...old version..." REJECT
2476                             # for this, I think.
2477                             self.rejects.append("Won't propogate NEW packages.")
2478                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2479                             # propogation would be redundant. no need to reject though.
2480                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2481                             cansave = 1
2482                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2483                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2484                             # propogate!!
2485                             self.warnings.append("Propogating upload to %s" % (addsuite))
2486                             self.pkg.changes.setdefault("propdistribution", {})
2487                             self.pkg.changes["propdistribution"][addsuite] = 1
2488                             cansave = 1
2489
2490                     if not cansave:
2491                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2492
2493     ################################################################################
2494     def check_binary_against_db(self, filename, session):
2495         # Ensure version is sane
2496         q = session.query(BinAssociation)
2497         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2498         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2499
2500         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2501                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2502
2503         # Check for any existing copies of the file
2504         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2505         q = q.filter_by(version=self.pkg.files[filename]["version"])
2506         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2507
2508         if q.count() > 0:
2509             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2510
2511     ################################################################################
2512
2513     def check_source_against_db(self, filename, session):
2514         source = self.pkg.dsc.get("source")
2515         version = self.pkg.dsc.get("version")
2516
2517         # Ensure version is sane
2518         q = session.query(SrcAssociation)
2519         q = q.join(DBSource).filter(DBSource.source==source)
2520
2521         self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2522                                        filename, version, sourceful=True)
2523
2524     ################################################################################
2525     def check_dsc_against_db(self, filename, session):
2526         """
2527
2528         @warning: NB: this function can remove entries from the 'files' index [if
2529          the orig tarball is a duplicate of the one in the archive]; if
2530          you're iterating over 'files' and call this function as part of
2531          the loop, be sure to add a check to the top of the loop to
2532          ensure you haven't just tried to dereference the deleted entry.
2533
2534         """
2535
2536         Cnf = Config()
2537         self.pkg.orig_files = {} # XXX: do we need to clear it?
2538         orig_files = self.pkg.orig_files
2539
2540         # Try and find all files mentioned in the .dsc.  This has
2541         # to work harder to cope with the multiple possible
2542         # locations of an .orig.tar.gz.
2543         # The ordering on the select is needed to pick the newest orig
2544         # when it exists in multiple places.
2545         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2546             found = None
2547             if self.pkg.files.has_key(dsc_name):
2548                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2549                 actual_size = int(self.pkg.files[dsc_name]["size"])
2550                 found = "%s in incoming" % (dsc_name)
2551
2552                 # Check the file does not already exist in the archive
2553                 ql = get_poolfile_like_name(dsc_name, session)
2554
2555                 # Strip out anything that isn't '%s' or '/%s$'
2556                 for i in ql:
2557                     if not i.filename.endswith(dsc_name):
2558                         ql.remove(i)
2559
2560                 # "[dak] has not broken them.  [dak] has fixed a
2561                 # brokenness.  Your crappy hack exploited a bug in
2562                 # the old dinstall.
2563                 #
2564                 # "(Come on!  I thought it was always obvious that
2565                 # one just doesn't release different files with
2566                 # the same name and version.)"
2567                 #                        -- ajk@ on d-devel@l.d.o
2568
2569                 if len(ql) > 0:
2570                     # Ignore exact matches for .orig.tar.gz
2571                     match = 0
2572                     if re_is_orig_source.match(dsc_name):
2573                         for i in ql:
2574                             if self.pkg.files.has_key(dsc_name) and \
2575                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2576                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2577                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2578                                 # TODO: Don't delete the entry, just mark it as not needed
2579                                 # This would fix the stupidity of changing something we often iterate over
2580                                 # whilst we're doing it
2581                                 del self.pkg.files[dsc_name]
2582                                 dsc_entry["files id"] = i.file_id
2583                                 if not orig_files.has_key(dsc_name):
2584                                     orig_files[dsc_name] = {}
2585                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2586                                 match = 1
2587
2588                                 # Don't bitch that we couldn't find this file later
2589                                 try:
2590                                     self.later_check_files.remove(dsc_name)
2591                                 except ValueError:
2592                                     pass
2593
2594
2595                     if not match:
2596                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2597
2598             elif re_is_orig_source.match(dsc_name):
2599                 # Check in the pool
2600                 ql = get_poolfile_like_name(dsc_name, session)
2601
2602                 # Strip out anything that isn't '%s' or '/%s$'
2603                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2604                 for i in ql:
2605                     if not i.filename.endswith(dsc_name):
2606                         ql.remove(i)
2607
2608                 if len(ql) > 0:
2609                     # Unfortunately, we may get more than one match here if,
2610                     # for example, the package was in potato but had an -sa
2611                     # upload in woody.  So we need to choose the right one.
2612
2613                     # default to something sane in case we don't match any or have only one
2614                     x = ql[0]
2615
2616                     if len(ql) > 1:
2617                         for i in ql:
2618                             old_file = os.path.join(i.location.path, i.filename)
2619                             old_file_fh = utils.open_file(old_file)
2620                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2621                             old_file_fh.close()
2622                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2623                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2624                                 x = i
2625
2626                     old_file = os.path.join(i.location.path, i.filename)
2627                     old_file_fh = utils.open_file(old_file)
2628                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2629                     old_file_fh.close()
2630                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2631                     found = old_file
2632                     suite_type = x.location.archive_type
2633                     # need this for updating dsc_files in install()
2634                     dsc_entry["files id"] = x.file_id
2635                     # See install() in process-accepted...
2636                     if not orig_files.has_key(dsc_name):
2637                         orig_files[dsc_name] = {}
2638                     orig_files[dsc_name]["id"] = x.file_id
2639                     orig_files[dsc_name]["path"] = old_file
2640                     orig_files[dsc_name]["location"] = x.location.location_id
2641                 else:
2642                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2643                     # Not there? Check the queue directories...
2644                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2645                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2646                             continue
2647                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2648                         if os.path.exists(in_otherdir):
2649                             in_otherdir_fh = utils.open_file(in_otherdir)
2650                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2651                             in_otherdir_fh.close()
2652                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2653                             found = in_otherdir
2654                             if not orig_files.has_key(dsc_name):
2655                                 orig_files[dsc_name] = {}
2656                             orig_files[dsc_name]["path"] = in_otherdir
2657
2658                     if not found:
2659                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2660                         continue
2661             else:
2662                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2663                 continue
2664             if actual_md5 != dsc_entry["md5sum"]:
2665                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2666             if actual_size != int(dsc_entry["size"]):
2667                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2668
2669     ################################################################################
2670     # This is used by process-new and process-holding to recheck a changes file
2671     # at the time we're running.  It mainly wraps various other internal functions
2672     # and is similar to accepted_checks - these should probably be tidied up
2673     # and combined
2674     def recheck(self, session):
2675         cnf = Config()
2676         for f in self.pkg.files.keys():
2677             # The .orig.tar.gz can disappear out from under us is it's a
2678             # duplicate of one in the archive.
2679             if not self.pkg.files.has_key(f):
2680                 continue
2681
2682             entry = self.pkg.files[f]
2683
2684             # Check that the source still exists
2685             if entry["type"] == "deb":
2686                 source_version = entry["source version"]
2687                 source_package = entry["source package"]
2688                 if not self.pkg.changes["architecture"].has_key("source") \
2689                    and not source_exists(source_package, source_version, \
2690                     suites = self.pkg.changes["distribution"].keys(), session = session):
2691                     source_epochless_version = re_no_epoch.sub('', source_version)
2692                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2693                     found = False
2694                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2695                         if cnf.has_key("Dir::Queue::%s" % (q)):
2696                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2697                                 found = True
2698                     if not found:
2699                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2700
2701             # Version and file overwrite checks
2702             if entry["type"] == "deb":
2703                 self.check_binary_against_db(f, session)
2704             elif entry["type"] == "dsc":
2705                 self.check_source_against_db(f, session)
2706                 self.check_dsc_against_db(f, session)
2707
2708     ################################################################################
2709     def accepted_checks(self, overwrite_checks, session):
2710         # Recheck anything that relies on the database; since that's not
2711         # frozen between accept and our run time when called from p-a.
2712
2713         # overwrite_checks is set to False when installing to stable/oldstable
2714
2715         propogate={}
2716         nopropogate={}
2717
2718         # Find the .dsc (again)
2719         dsc_filename = None
2720         for f in self.pkg.files.keys():
2721             if self.pkg.files[f]["type"] == "dsc":
2722                 dsc_filename = f
2723
2724         for checkfile in self.pkg.files.keys():
2725             # The .orig.tar.gz can disappear out from under us is it's a
2726             # duplicate of one in the archive.
2727             if not self.pkg.files.has_key(checkfile):
2728                 continue
2729
2730             entry = self.pkg.files[checkfile]
2731
2732             # Check that the source still exists
2733             if entry["type"] == "deb":
2734                 source_version = entry["source version"]
2735                 source_package = entry["source package"]
2736                 if not self.pkg.changes["architecture"].has_key("source") \
2737                    and not source_exists(source_package, source_version, \
2738                     suites = self.pkg.changes["distribution"].keys(), \
2739                     session = session):
2740                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2741
2742             # Version and file overwrite checks
2743             if overwrite_checks:
2744                 if entry["type"] == "deb":
2745                     self.check_binary_against_db(checkfile, session)
2746                 elif entry["type"] == "dsc":
2747                     self.check_source_against_db(checkfile, session)
2748                     self.check_dsc_against_db(dsc_filename, session)
2749
2750             # propogate in the case it is in the override tables:
2751             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2752                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2753                     propogate[suite] = 1
2754                 else:
2755                     nopropogate[suite] = 1
2756
2757         for suite in propogate.keys():
2758             if suite in nopropogate:
2759                 continue
2760             self.pkg.changes["distribution"][suite] = 1
2761
2762         for checkfile in self.pkg.files.keys():
2763             # Check the package is still in the override tables
2764             for suite in self.pkg.changes["distribution"].keys():
2765                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2766                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2767
2768     ################################################################################
2769     # If any file of an upload has a recent mtime then chances are good
2770     # the file is still being uploaded.
2771
2772     def upload_too_new(self):
2773         cnf = Config()
2774         too_new = False
2775         # Move back to the original directory to get accurate time stamps
2776         cwd = os.getcwd()
2777         os.chdir(self.pkg.directory)
2778         file_list = self.pkg.files.keys()
2779         file_list.extend(self.pkg.dsc_files.keys())
2780         file_list.append(self.pkg.changes_file)
2781         for f in file_list:
2782             try:
2783                 last_modified = time.time()-os.path.getmtime(f)
2784                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2785                     too_new = True
2786                     break
2787             except:
2788                 pass
2789
2790         os.chdir(cwd)
2791         return too_new
2792
2793     def store_changelog(self):
2794
2795         # Skip binary-only upload if it is not a bin-NMU
2796         if not self.pkg.changes['architecture'].has_key('source'):
2797             from daklib.regexes import re_bin_only_nmu
2798             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2799                 return
2800
2801         session = DBConn().session()
2802
2803         # Check if upload already has a changelog entry
2804         query = """SELECT changelog_id FROM changes WHERE source = :source
2805                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2806         if session.execute(query, {'source': self.pkg.changes['source'], \
2807                                    'version': self.pkg.changes['version'], \
2808                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2809             session.commit()
2810             return
2811
2812         # Add current changelog text into changelogs_text table, return created ID
2813         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2814         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2815
2816         # Link ID to the upload available in changes table
2817         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2818                    AND version = :version AND architecture = :architecture"""
2819         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2820                                 'version': self.pkg.changes['version'], \
2821                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2822
2823         session.commit()