]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Create, test, and refactor get_source_by_package_and_suite().
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(filename, changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type filename: str
100     @param filename: changes filename
101
102     @type changes: Upload.Pkg.changes dict
103     @param changes: Changes dictionary
104
105     @type files: Upload.Pkg.files dict
106     @param files: Files dictionary
107
108     @type warn: bool
109     @param warn: Warn if overrides are added for (old)stable
110
111     @rtype: dict
112     @return: dictionary of NEW components.
113
114     """
115     # TODO: This should all use the database instead of parsing the changes
116     # file again
117     new = {}
118     byhand = {}
119
120     dbchg = get_dbchange(filename, session)
121     if dbchg is None:
122         print "Warning: cannot find changes file in database; won't check byhand"
123
124     # Build up a list of potentially new things
125     for name, f in files.items():
126         # Keep a record of byhand elements
127         if f["section"] == "byhand":
128             byhand[name] = 1
129             continue
130
131         pkg = f["package"]
132         priority = f["priority"]
133         section = f["section"]
134         file_type = get_type(f, session)
135         component = f["component"]
136
137         if file_type == "dsc":
138             priority = "source"
139
140         if not new.has_key(pkg):
141             new[pkg] = {}
142             new[pkg]["priority"] = priority
143             new[pkg]["section"] = section
144             new[pkg]["type"] = file_type
145             new[pkg]["component"] = component
146             new[pkg]["files"] = []
147         else:
148             old_type = new[pkg]["type"]
149             if old_type != file_type:
150                 # source gets trumped by deb or udeb
151                 if old_type == "dsc":
152                     new[pkg]["priority"] = priority
153                     new[pkg]["section"] = section
154                     new[pkg]["type"] = file_type
155                     new[pkg]["component"] = component
156
157         new[pkg]["files"].append(name)
158
159         if f.has_key("othercomponents"):
160             new[pkg]["othercomponents"] = f["othercomponents"]
161
162     # Fix up the list of target suites
163     cnf = Config()
164     for suite in changes["suite"].keys():
165         oldsuite = get_suite(suite, session)
166         if not oldsuite:
167             print "WARNING: Invalid suite %s found" % suite
168             continue
169
170         if oldsuite.overridesuite:
171             newsuite = get_suite(oldsuite.overridesuite, session)
172
173             if newsuite:
174                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
175                     oldsuite.overridesuite, suite)
176                 del changes["suite"][suite]
177                 changes["suite"][oldsuite.overridesuite] = 1
178             else:
179                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
180                     oldsuite.overridesuite, suite)
181
182     # Check for unprocessed byhand files
183     if dbchg is not None:
184         for b in byhand.keys():
185             # Find the file entry in the database
186             found = False
187             for f in dbchg.files:
188                 if f.filename == b:
189                     found = True
190                     # If it's processed, we can ignore it
191                     if f.processed:
192                         del byhand[b]
193                     break
194
195             if not found:
196                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
197
198     # Check for new stuff
199     for suite in changes["suite"].keys():
200         for pkg in new.keys():
201             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
202             if len(ql) > 0:
203                 for file_entry in new[pkg]["files"]:
204                     if files[file_entry].has_key("new"):
205                         del files[file_entry]["new"]
206                 del new[pkg]
207
208     if warn:
209         for s in ['stable', 'oldstable']:
210             if changes["suite"].has_key(s):
211                 print "WARNING: overrides will be added for %s!" % s
212         for pkg in new.keys():
213             if new[pkg].has_key("othercomponents"):
214                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
215
216     return new, byhand
217
218 ################################################################################
219
220 def check_valid(new, session = None):
221     """
222     Check if section and priority for NEW packages exist in database.
223     Additionally does sanity checks:
224       - debian-installer packages have to be udeb (or source)
225       - non debian-installer packages can not be udeb
226       - source priority can only be assigned to dsc file types
227
228     @type new: dict
229     @param new: Dict of new packages with their section, priority and type.
230
231     """
232     for pkg in new.keys():
233         section_name = new[pkg]["section"]
234         priority_name = new[pkg]["priority"]
235         file_type = new[pkg]["type"]
236
237         section = get_section(section_name, session)
238         if section is None:
239             new[pkg]["section id"] = -1
240         else:
241             new[pkg]["section id"] = section.section_id
242
243         priority = get_priority(priority_name, session)
244         if priority is None:
245             new[pkg]["priority id"] = -1
246         else:
247             new[pkg]["priority id"] = priority.priority_id
248
249         # Sanity checks
250         di = section_name.find("debian-installer") != -1
251
252         # If d-i, we must be udeb and vice-versa
253         if     (di and file_type not in ("udeb", "dsc")) or \
254            (not di and file_type == "udeb"):
255             new[pkg]["section id"] = -1
256
257         # If dsc we need to be source and vice-versa
258         if (priority == "source" and file_type != "dsc") or \
259            (priority != "source" and file_type == "dsc"):
260             new[pkg]["priority id"] = -1
261
262 ###############################################################################
263
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266     def __init__(self, future_cutoff, past_cutoff):
267         self.reset()
268         self.future_cutoff = future_cutoff
269         self.past_cutoff = past_cutoff
270
271     def reset(self):
272         self.future_files = {}
273         self.ancient_files = {}
274
275     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276         if MTime > self.future_cutoff:
277             self.future_files[Name] = MTime
278         if MTime < self.past_cutoff:
279             self.ancient_files[Name] = MTime
280
281 ###############################################################################
282
283 def prod_maintainer(notes, upload):
284     cnf = Config()
285
286     # Here we prepare an editor and get them ready to prod...
287     (fd, temp_filename) = utils.temp_filename()
288     temp_file = os.fdopen(fd, 'w')
289     for note in notes:
290         temp_file.write(note.comment)
291     temp_file.close()
292     editor = os.environ.get("EDITOR","vi")
293     answer = 'E'
294     while answer == 'E':
295         os.system("%s %s" % (editor, temp_filename))
296         temp_fh = utils.open_file(temp_filename)
297         prod_message = "".join(temp_fh.readlines())
298         temp_fh.close()
299         print "Prod message:"
300         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
301         prompt = "[P]rod, Edit, Abandon, Quit ?"
302         answer = "XXX"
303         while prompt.find(answer) == -1:
304             answer = utils.our_raw_input(prompt)
305             m = re_default_answer.search(prompt)
306             if answer == "":
307                 answer = m.group(1)
308             answer = answer[:1].upper()
309     os.unlink(temp_filename)
310     if answer == 'A':
311         return
312     elif answer == 'Q':
313         end()
314         sys.exit(0)
315     # Otherwise, do the proding...
316     user_email_address = utils.whoami() + " <%s>" % (
317         cnf["Dinstall::MyAdminAddress"])
318
319     Subst = upload.Subst
320
321     Subst["__FROM_ADDRESS__"] = user_email_address
322     Subst["__PROD_MESSAGE__"] = prod_message
323     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
324
325     prod_mail_message = utils.TemplateSubst(
326         Subst,cnf["Dir::Templates"]+"/process-new.prod")
327
328     # Send the prod mail
329     utils.send_mail(prod_mail_message)
330
331     print "Sent prodding message"
332
333 ################################################################################
334
335 def edit_note(note, upload, session, trainee=False):
336     # Write the current data to a temporary file
337     (fd, temp_filename) = utils.temp_filename()
338     editor = os.environ.get("EDITOR","vi")
339     answer = 'E'
340     while answer == 'E':
341         os.system("%s %s" % (editor, temp_filename))
342         temp_file = utils.open_file(temp_filename)
343         newnote = temp_file.read().rstrip()
344         temp_file.close()
345         print "New Note:"
346         print utils.prefix_multi_line_string(newnote,"  ")
347         prompt = "[D]one, Edit, Abandon, Quit ?"
348         answer = "XXX"
349         while prompt.find(answer) == -1:
350             answer = utils.our_raw_input(prompt)
351             m = re_default_answer.search(prompt)
352             if answer == "":
353                 answer = m.group(1)
354             answer = answer[:1].upper()
355     os.unlink(temp_filename)
356     if answer == 'A':
357         return
358     elif answer == 'Q':
359         end()
360         sys.exit(0)
361
362     comment = NewComment()
363     comment.package = upload.pkg.changes["source"]
364     comment.version = upload.pkg.changes["version"]
365     comment.comment = newnote
366     comment.author  = utils.whoami()
367     comment.trainee = trainee
368     session.add(comment)
369     session.commit()
370
371 ###############################################################################
372
373 # suite names DMs can upload to
374 dm_suites = ['unstable', 'experimental']
375
376 def get_newest_source(source, session):
377     'returns the newest DBSource object in dm_suites'
378     ## the most recent version of the package uploaded to unstable or
379     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
380     ## section of its control file
381     q = session.query(DBSource).filter_by(source = source). \
382         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
383         order_by(desc('source.version'))
384     return q.first()
385
386 def get_suite_version(source, session):
387     'returns a list of tuples (suite_name, version) for source package'
388     q = session.query(Suite.suite_name, DBSource.version). \
389         join(Suite.sources).filter_by(source = source)
390     return q.all()
391
392 def get_source_by_package_and_suite(package, suite_name, session):
393     '''
394     returns a DBSource query filtered by DBBinary.package and this package's
395     suite_name
396     '''
397     return session.query(DBSource). \
398         join(DBSource.binaries).filter_by(package = package). \
399         join(DBBinary.suites).filter_by(suite_name = suite_name)
400
401 class Upload(object):
402     """
403     Everything that has to do with an upload processed.
404
405     """
406     def __init__(self):
407         self.logger = None
408         self.pkg = Changes()
409         self.reset()
410
411     ###########################################################################
412
413     def reset (self):
414         """ Reset a number of internal variables."""
415
416         # Initialize the substitution template map
417         cnf = Config()
418         self.Subst = {}
419         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
420         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
421         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
422         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
423
424         self.rejects = []
425         self.warnings = []
426         self.notes = []
427
428         self.later_check_files = []
429
430         self.pkg.reset()
431
432     def package_info(self):
433         """
434         Format various messages from this Upload to send to the maintainer.
435         """
436
437         msgs = (
438             ('Reject Reasons', self.rejects),
439             ('Warnings', self.warnings),
440             ('Notes', self.notes),
441         )
442
443         msg = ''
444         for title, messages in msgs:
445             if messages:
446                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
447         msg += '\n\n'
448
449         return msg
450
451     ###########################################################################
452     def update_subst(self):
453         """ Set up the per-package template substitution mappings """
454
455         cnf = Config()
456
457         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
458         if not self.pkg.changes.has_key("architecture") or not \
459            isinstance(self.pkg.changes["architecture"], dict):
460             self.pkg.changes["architecture"] = { "Unknown" : "" }
461
462         # and maintainer2047 may not exist.
463         if not self.pkg.changes.has_key("maintainer2047"):
464             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
465
466         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
467         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
468         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
469
470         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
471         if self.pkg.changes["architecture"].has_key("source") and \
472            self.pkg.changes["changedby822"] != "" and \
473            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
474
475             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
476             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
477             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
478         else:
479             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
480             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
481             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
482
483         # Process policy doesn't set the fingerprint field and I don't want to make it
484         # do it for now as I don't want to have to deal with the case where we accepted
485         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
486         # the meantime so the package will be remarked as rejectable.  Urgh.
487         # TODO: Fix this properly
488         if self.pkg.changes.has_key('fingerprint'):
489             session = DBConn().session()
490             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
491             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
492                 if self.pkg.changes.has_key("sponsoremail"):
493                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
494             session.close()
495
496         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
497             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
498
499         # Apply any global override of the Maintainer field
500         if cnf.get("Dinstall::OverrideMaintainer"):
501             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
502             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
503
504         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
505         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
506         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
507         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
508
509     ###########################################################################
510     def load_changes(self, filename):
511         """
512         Load a changes file and setup a dictionary around it. Also checks for mandantory
513         fields  within.
514
515         @type filename: string
516         @param filename: Changes filename, full path.
517
518         @rtype: boolean
519         @return: whether the changes file was valid or not.  We may want to
520                  reject even if this is True (see what gets put in self.rejects).
521                  This is simply to prevent us even trying things later which will
522                  fail because we couldn't properly parse the file.
523         """
524         Cnf = Config()
525         self.pkg.changes_file = filename
526
527         # Parse the .changes field into a dictionary
528         try:
529             self.pkg.changes.update(parse_changes(filename))
530         except CantOpenError:
531             self.rejects.append("%s: can't read file." % (filename))
532             return False
533         except ParseChangesError, line:
534             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
535             return False
536         except ChangesUnicodeError:
537             self.rejects.append("%s: changes file not proper utf-8" % (filename))
538             return False
539
540         # Parse the Files field from the .changes into another dictionary
541         try:
542             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
543         except ParseChangesError, line:
544             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
545             return False
546         except UnknownFormatError, format:
547             self.rejects.append("%s: unknown format '%s'." % (filename, format))
548             return False
549
550         # Check for mandatory fields
551         for i in ("distribution", "source", "binary", "architecture",
552                   "version", "maintainer", "files", "changes", "description"):
553             if not self.pkg.changes.has_key(i):
554                 # Avoid undefined errors later
555                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
556                 return False
557
558         # Strip a source version in brackets from the source field
559         if re_strip_srcver.search(self.pkg.changes["source"]):
560             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
561
562         # Ensure the source field is a valid package name.
563         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
564             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
565
566         # Split multi-value fields into a lower-level dictionary
567         for i in ("architecture", "distribution", "binary", "closes"):
568             o = self.pkg.changes.get(i, "")
569             if o != "":
570                 del self.pkg.changes[i]
571
572             self.pkg.changes[i] = {}
573
574             for j in o.split():
575                 self.pkg.changes[i][j] = 1
576
577         # Fix the Maintainer: field to be RFC822/2047 compatible
578         try:
579             (self.pkg.changes["maintainer822"],
580              self.pkg.changes["maintainer2047"],
581              self.pkg.changes["maintainername"],
582              self.pkg.changes["maintaineremail"]) = \
583                    fix_maintainer (self.pkg.changes["maintainer"])
584         except ParseMaintError, msg:
585             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
586                    % (filename, self.pkg.changes["maintainer"], msg))
587
588         # ...likewise for the Changed-By: field if it exists.
589         try:
590             (self.pkg.changes["changedby822"],
591              self.pkg.changes["changedby2047"],
592              self.pkg.changes["changedbyname"],
593              self.pkg.changes["changedbyemail"]) = \
594                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
595         except ParseMaintError, msg:
596             self.pkg.changes["changedby822"] = ""
597             self.pkg.changes["changedby2047"] = ""
598             self.pkg.changes["changedbyname"] = ""
599             self.pkg.changes["changedbyemail"] = ""
600
601             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
602                    % (filename, self.pkg.changes["changed-by"], msg))
603
604         # Ensure all the values in Closes: are numbers
605         if self.pkg.changes.has_key("closes"):
606             for i in self.pkg.changes["closes"].keys():
607                 if re_isanum.match (i) == None:
608                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
609
610         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
611         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
612         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
613
614         # Check the .changes is non-empty
615         if not self.pkg.files:
616             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
617             return False
618
619         # Changes was syntactically valid even if we'll reject
620         return True
621
622     ###########################################################################
623
624     def check_distributions(self):
625         "Check and map the Distribution field"
626
627         Cnf = Config()
628
629         # Handle suite mappings
630         for m in Cnf.ValueList("SuiteMappings"):
631             args = m.split()
632             mtype = args[0]
633             if mtype == "map" or mtype == "silent-map":
634                 (source, dest) = args[1:3]
635                 if self.pkg.changes["distribution"].has_key(source):
636                     del self.pkg.changes["distribution"][source]
637                     self.pkg.changes["distribution"][dest] = 1
638                     if mtype != "silent-map":
639                         self.notes.append("Mapping %s to %s." % (source, dest))
640                 if self.pkg.changes.has_key("distribution-version"):
641                     if self.pkg.changes["distribution-version"].has_key(source):
642                         self.pkg.changes["distribution-version"][source]=dest
643             elif mtype == "map-unreleased":
644                 (source, dest) = args[1:3]
645                 if self.pkg.changes["distribution"].has_key(source):
646                     for arch in self.pkg.changes["architecture"].keys():
647                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
648                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
649                             del self.pkg.changes["distribution"][source]
650                             self.pkg.changes["distribution"][dest] = 1
651                             break
652             elif mtype == "ignore":
653                 suite = args[1]
654                 if self.pkg.changes["distribution"].has_key(suite):
655                     del self.pkg.changes["distribution"][suite]
656                     self.warnings.append("Ignoring %s as a target suite." % (suite))
657             elif mtype == "reject":
658                 suite = args[1]
659                 if self.pkg.changes["distribution"].has_key(suite):
660                     self.rejects.append("Uploads to %s are not accepted." % (suite))
661             elif mtype == "propup-version":
662                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
663                 #
664                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
665                 if self.pkg.changes["distribution"].has_key(args[1]):
666                     self.pkg.changes.setdefault("distribution-version", {})
667                     for suite in args[2:]:
668                         self.pkg.changes["distribution-version"][suite] = suite
669
670         # Ensure there is (still) a target distribution
671         if len(self.pkg.changes["distribution"].keys()) < 1:
672             self.rejects.append("No valid distribution remaining.")
673
674         # Ensure target distributions exist
675         for suite in self.pkg.changes["distribution"].keys():
676             if not Cnf.has_key("Suite::%s" % (suite)):
677                 self.rejects.append("Unknown distribution `%s'." % (suite))
678
679     ###########################################################################
680
681     def binary_file_checks(self, f, session):
682         cnf = Config()
683         entry = self.pkg.files[f]
684
685         # Extract package control information
686         deb_file = utils.open_file(f)
687         try:
688             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
689         except:
690             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
691             deb_file.close()
692             # Can't continue, none of the checks on control would work.
693             return
694
695         # Check for mandantory "Description:"
696         deb_file.seek(0)
697         try:
698             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
699         except:
700             self.rejects.append("%s: Missing Description in binary package" % (f))
701             return
702
703         deb_file.close()
704
705         # Check for mandatory fields
706         for field in [ "Package", "Architecture", "Version" ]:
707             if control.Find(field) == None:
708                 # Can't continue
709                 self.rejects.append("%s: No %s field in control." % (f, field))
710                 return
711
712         # Ensure the package name matches the one give in the .changes
713         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
714             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
715
716         # Validate the package field
717         package = control.Find("Package")
718         if not re_valid_pkg_name.match(package):
719             self.rejects.append("%s: invalid package name '%s'." % (f, package))
720
721         # Validate the version field
722         version = control.Find("Version")
723         if not re_valid_version.match(version):
724             self.rejects.append("%s: invalid version number '%s'." % (f, version))
725
726         # Ensure the architecture of the .deb is one we know about.
727         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
728         architecture = control.Find("Architecture")
729         upload_suite = self.pkg.changes["distribution"].keys()[0]
730
731         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
732             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
733             self.rejects.append("Unknown architecture '%s'." % (architecture))
734
735         # Ensure the architecture of the .deb is one of the ones
736         # listed in the .changes.
737         if not self.pkg.changes["architecture"].has_key(architecture):
738             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
739
740         # Sanity-check the Depends field
741         depends = control.Find("Depends")
742         if depends == '':
743             self.rejects.append("%s: Depends field is empty." % (f))
744
745         # Sanity-check the Provides field
746         provides = control.Find("Provides")
747         if provides:
748             provide = re_spacestrip.sub('', provides)
749             if provide == '':
750                 self.rejects.append("%s: Provides field is empty." % (f))
751             prov_list = provide.split(",")
752             for prov in prov_list:
753                 if not re_valid_pkg_name.match(prov):
754                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
755
756         # Check the section & priority match those given in the .changes (non-fatal)
757         if     control.Find("Section") and entry["section"] != "" \
758            and entry["section"] != control.Find("Section"):
759             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
760                                 (f, control.Find("Section", ""), entry["section"]))
761         if control.Find("Priority") and entry["priority"] != "" \
762            and entry["priority"] != control.Find("Priority"):
763             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
764                                 (f, control.Find("Priority", ""), entry["priority"]))
765
766         entry["package"] = package
767         entry["architecture"] = architecture
768         entry["version"] = version
769         entry["maintainer"] = control.Find("Maintainer", "")
770
771         if f.endswith(".udeb"):
772             self.pkg.files[f]["dbtype"] = "udeb"
773         elif f.endswith(".deb"):
774             self.pkg.files[f]["dbtype"] = "deb"
775         else:
776             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
777
778         entry["source"] = control.Find("Source", entry["package"])
779
780         # Get the source version
781         source = entry["source"]
782         source_version = ""
783
784         if source.find("(") != -1:
785             m = re_extract_src_version.match(source)
786             source = m.group(1)
787             source_version = m.group(2)
788
789         if not source_version:
790             source_version = self.pkg.files[f]["version"]
791
792         entry["source package"] = source
793         entry["source version"] = source_version
794
795         # Ensure the filename matches the contents of the .deb
796         m = re_isadeb.match(f)
797
798         #  package name
799         file_package = m.group(1)
800         if entry["package"] != file_package:
801             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
802                                 (f, file_package, entry["dbtype"], entry["package"]))
803         epochless_version = re_no_epoch.sub('', control.Find("Version"))
804
805         #  version
806         file_version = m.group(2)
807         if epochless_version != file_version:
808             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
809                                 (f, file_version, entry["dbtype"], epochless_version))
810
811         #  architecture
812         file_architecture = m.group(3)
813         if entry["architecture"] != file_architecture:
814             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
815                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
816
817         # Check for existent source
818         source_version = entry["source version"]
819         source_package = entry["source package"]
820         if self.pkg.changes["architecture"].has_key("source"):
821             if source_version != self.pkg.changes["version"]:
822                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
823                                     (source_version, f, self.pkg.changes["version"]))
824         else:
825             # Check in the SQL database
826             if not source_exists(source_package, source_version, suites = \
827                 self.pkg.changes["distribution"].keys(), session = session):
828                 # Check in one of the other directories
829                 source_epochless_version = re_no_epoch.sub('', source_version)
830                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
831                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
832                     entry["byhand"] = 1
833                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
834                     entry["new"] = 1
835                 else:
836                     dsc_file_exists = False
837                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
838                         if cnf.has_key("Dir::Queue::%s" % (myq)):
839                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
840                                 dsc_file_exists = True
841                                 break
842
843                     if not dsc_file_exists:
844                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
845
846         # Check the version and for file overwrites
847         self.check_binary_against_db(f, session)
848
849         # Temporarily disable contents generation until we change the table storage layout
850         #b = Binary(f)
851         #b.scan_package()
852         #if len(b.rejects) > 0:
853         #    for j in b.rejects:
854         #        self.rejects.append(j)
855
856     def source_file_checks(self, f, session):
857         entry = self.pkg.files[f]
858
859         m = re_issource.match(f)
860         if not m:
861             return
862
863         entry["package"] = m.group(1)
864         entry["version"] = m.group(2)
865         entry["type"] = m.group(3)
866
867         # Ensure the source package name matches the Source filed in the .changes
868         if self.pkg.changes["source"] != entry["package"]:
869             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
870
871         # Ensure the source version matches the version in the .changes file
872         if re_is_orig_source.match(f):
873             changes_version = self.pkg.changes["chopversion2"]
874         else:
875             changes_version = self.pkg.changes["chopversion"]
876
877         if changes_version != entry["version"]:
878             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
879
880         # Ensure the .changes lists source in the Architecture field
881         if not self.pkg.changes["architecture"].has_key("source"):
882             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
883
884         # Check the signature of a .dsc file
885         if entry["type"] == "dsc":
886             # check_signature returns either:
887             #  (None, [list, of, rejects]) or (signature, [])
888             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
889             for j in rejects:
890                 self.rejects.append(j)
891
892         entry["architecture"] = "source"
893
894     def per_suite_file_checks(self, f, suite, session):
895         cnf = Config()
896         entry = self.pkg.files[f]
897
898         # Skip byhand
899         if entry.has_key("byhand"):
900             return
901
902         # Check we have fields we need to do these checks
903         oktogo = True
904         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
905             if not entry.has_key(m):
906                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
907                 oktogo = False
908
909         if not oktogo:
910             return
911
912         # Handle component mappings
913         for m in cnf.ValueList("ComponentMappings"):
914             (source, dest) = m.split()
915             if entry["component"] == source:
916                 entry["original component"] = source
917                 entry["component"] = dest
918
919         # Ensure the component is valid for the target suite
920         if cnf.has_key("Suite:%s::Components" % (suite)) and \
921            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
922             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
923             return
924
925         # Validate the component
926         if not get_component(entry["component"], session):
927             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
928             return
929
930         # See if the package is NEW
931         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
932             entry["new"] = 1
933
934         # Validate the priority
935         if entry["priority"].find('/') != -1:
936             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
937
938         # Determine the location
939         location = cnf["Dir::Pool"]
940         l = get_location(location, entry["component"], session=session)
941         if l is None:
942             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
943             entry["location id"] = -1
944         else:
945             entry["location id"] = l.location_id
946
947         # Check the md5sum & size against existing files (if any)
948         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
949
950         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
951                                          entry["size"], entry["md5sum"], entry["location id"])
952
953         if found is None:
954             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
955         elif found is False and poolfile is not None:
956             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
957         else:
958             if poolfile is None:
959                 entry["files id"] = None
960             else:
961                 entry["files id"] = poolfile.file_id
962
963         # Check for packages that have moved from one component to another
964         entry['suite'] = suite
965         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
966         if res.rowcount > 0:
967             entry["othercomponents"] = res.fetchone()[0]
968
969     def check_files(self, action=True):
970         file_keys = self.pkg.files.keys()
971         holding = Holding()
972         cnf = Config()
973
974         if action:
975             cwd = os.getcwd()
976             os.chdir(self.pkg.directory)
977             for f in file_keys:
978                 ret = holding.copy_to_holding(f)
979                 if ret is not None:
980                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
981
982             os.chdir(cwd)
983
984         # check we already know the changes file
985         # [NB: this check must be done post-suite mapping]
986         base_filename = os.path.basename(self.pkg.changes_file)
987
988         session = DBConn().session()
989
990         try:
991             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
992             # if in the pool or in a queue other than unchecked, reject
993             if (dbc.in_queue is None) \
994                    or (dbc.in_queue is not None
995                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
996                 self.rejects.append("%s file already known to dak" % base_filename)
997         except NoResultFound, e:
998             # not known, good
999             pass
1000
1001         has_binaries = False
1002         has_source = False
1003
1004         for f, entry in self.pkg.files.items():
1005             # Ensure the file does not already exist in one of the accepted directories
1006             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1007                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1008                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1009                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1010
1011             if not re_taint_free.match(f):
1012                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1013
1014             # Check the file is readable
1015             if os.access(f, os.R_OK) == 0:
1016                 # When running in -n, copy_to_holding() won't have
1017                 # generated the reject_message, so we need to.
1018                 if action:
1019                     if os.path.exists(f):
1020                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1021                     else:
1022                         # Don't directly reject, mark to check later to deal with orig's
1023                         # we can find in the pool
1024                         self.later_check_files.append(f)
1025                 entry["type"] = "unreadable"
1026                 continue
1027
1028             # If it's byhand skip remaining checks
1029             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1030                 entry["byhand"] = 1
1031                 entry["type"] = "byhand"
1032
1033             # Checks for a binary package...
1034             elif re_isadeb.match(f):
1035                 has_binaries = True
1036                 entry["type"] = "deb"
1037
1038                 # This routine appends to self.rejects/warnings as appropriate
1039                 self.binary_file_checks(f, session)
1040
1041             # Checks for a source package...
1042             elif re_issource.match(f):
1043                 has_source = True
1044
1045                 # This routine appends to self.rejects/warnings as appropriate
1046                 self.source_file_checks(f, session)
1047
1048             # Not a binary or source package?  Assume byhand...
1049             else:
1050                 entry["byhand"] = 1
1051                 entry["type"] = "byhand"
1052
1053             # Per-suite file checks
1054             entry["oldfiles"] = {}
1055             for suite in self.pkg.changes["distribution"].keys():
1056                 self.per_suite_file_checks(f, suite, session)
1057
1058         session.close()
1059
1060         # If the .changes file says it has source, it must have source.
1061         if self.pkg.changes["architecture"].has_key("source"):
1062             if not has_source:
1063                 self.rejects.append("no source found and Architecture line in changes mention source.")
1064
1065             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1066                 self.rejects.append("source only uploads are not supported.")
1067
1068     ###########################################################################
1069     def check_dsc(self, action=True, session=None):
1070         """Returns bool indicating whether or not the source changes are valid"""
1071         # Ensure there is source to check
1072         if not self.pkg.changes["architecture"].has_key("source"):
1073             return True
1074
1075         # Find the .dsc
1076         dsc_filename = None
1077         for f, entry in self.pkg.files.items():
1078             if entry["type"] == "dsc":
1079                 if dsc_filename:
1080                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1081                     return False
1082                 else:
1083                     dsc_filename = f
1084
1085         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1086         if not dsc_filename:
1087             self.rejects.append("source uploads must contain a dsc file")
1088             return False
1089
1090         # Parse the .dsc file
1091         try:
1092             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1093         except CantOpenError:
1094             # if not -n copy_to_holding() will have done this for us...
1095             if not action:
1096                 self.rejects.append("%s: can't read file." % (dsc_filename))
1097         except ParseChangesError, line:
1098             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1099         except InvalidDscError, line:
1100             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1101         except ChangesUnicodeError:
1102             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1103
1104         # Build up the file list of files mentioned by the .dsc
1105         try:
1106             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1107         except NoFilesFieldError:
1108             self.rejects.append("%s: no Files: field." % (dsc_filename))
1109             return False
1110         except UnknownFormatError, format:
1111             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1112             return False
1113         except ParseChangesError, line:
1114             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1115             return False
1116
1117         # Enforce mandatory fields
1118         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1119             if not self.pkg.dsc.has_key(i):
1120                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1121                 return False
1122
1123         # Validate the source and version fields
1124         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1125             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1126         if not re_valid_version.match(self.pkg.dsc["version"]):
1127             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1128
1129         # Only a limited list of source formats are allowed in each suite
1130         for dist in self.pkg.changes["distribution"].keys():
1131             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1132             if self.pkg.dsc["format"] not in allowed:
1133                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1134
1135         # Validate the Maintainer field
1136         try:
1137             # We ignore the return value
1138             fix_maintainer(self.pkg.dsc["maintainer"])
1139         except ParseMaintError, msg:
1140             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1141                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1142
1143         # Validate the build-depends field(s)
1144         for field_name in [ "build-depends", "build-depends-indep" ]:
1145             field = self.pkg.dsc.get(field_name)
1146             if field:
1147                 # Have apt try to parse them...
1148                 try:
1149                     apt_pkg.ParseSrcDepends(field)
1150                 except:
1151                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1152
1153         # Ensure the version number in the .dsc matches the version number in the .changes
1154         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1155         changes_version = self.pkg.files[dsc_filename]["version"]
1156
1157         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1158             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1159
1160         # Ensure the Files field contain only what's expected
1161         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1162
1163         # Ensure source is newer than existing source in target suites
1164         session = DBConn().session()
1165         self.check_source_against_db(dsc_filename, session)
1166         self.check_dsc_against_db(dsc_filename, session)
1167
1168         dbchg = get_dbchange(self.pkg.changes_file, session)
1169
1170         # Finally, check if we're missing any files
1171         for f in self.later_check_files:
1172             print 'XXX: %s' % f
1173             # Check if we've already processed this file if we have a dbchg object
1174             ok = False
1175             if dbchg:
1176                 for pf in dbchg.files:
1177                     if pf.filename == f and pf.processed:
1178                         self.notes.append('%s was already processed so we can go ahead' % f)
1179                         ok = True
1180                         del self.pkg.files[f]
1181             if not ok:
1182                 self.rejects.append("Could not find file %s references in changes" % f)
1183
1184         session.close()
1185
1186         return True
1187
1188     ###########################################################################
1189
1190     def get_changelog_versions(self, source_dir):
1191         """Extracts a the source package and (optionally) grabs the
1192         version history out of debian/changelog for the BTS."""
1193
1194         cnf = Config()
1195
1196         # Find the .dsc (again)
1197         dsc_filename = None
1198         for f in self.pkg.files.keys():
1199             if self.pkg.files[f]["type"] == "dsc":
1200                 dsc_filename = f
1201
1202         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1203         if not dsc_filename:
1204             return
1205
1206         # Create a symlink mirror of the source files in our temporary directory
1207         for f in self.pkg.files.keys():
1208             m = re_issource.match(f)
1209             if m:
1210                 src = os.path.join(source_dir, f)
1211                 # If a file is missing for whatever reason, give up.
1212                 if not os.path.exists(src):
1213                     return
1214                 ftype = m.group(3)
1215                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1216                    self.pkg.orig_files[f].has_key("path"):
1217                     continue
1218                 dest = os.path.join(os.getcwd(), f)
1219                 os.symlink(src, dest)
1220
1221         # If the orig files are not a part of the upload, create symlinks to the
1222         # existing copies.
1223         for orig_file in self.pkg.orig_files.keys():
1224             if not self.pkg.orig_files[orig_file].has_key("path"):
1225                 continue
1226             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1227             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1228
1229         # Extract the source
1230         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1231         (result, output) = commands.getstatusoutput(cmd)
1232         if (result != 0):
1233             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1234             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1235             return
1236
1237         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1238             return
1239
1240         # Get the upstream version
1241         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1242         if re_strip_revision.search(upstr_version):
1243             upstr_version = re_strip_revision.sub('', upstr_version)
1244
1245         # Ensure the changelog file exists
1246         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1247         if not os.path.exists(changelog_filename):
1248             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1249             return
1250
1251         # Parse the changelog
1252         self.pkg.dsc["bts changelog"] = ""
1253         changelog_file = utils.open_file(changelog_filename)
1254         for line in changelog_file.readlines():
1255             m = re_changelog_versions.match(line)
1256             if m:
1257                 self.pkg.dsc["bts changelog"] += line
1258         changelog_file.close()
1259
1260         # Check we found at least one revision in the changelog
1261         if not self.pkg.dsc["bts changelog"]:
1262             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1263
1264     def check_source(self):
1265         # Bail out if:
1266         #    a) there's no source
1267         if not self.pkg.changes["architecture"].has_key("source"):
1268             return
1269
1270         tmpdir = utils.temp_dirname()
1271
1272         # Move into the temporary directory
1273         cwd = os.getcwd()
1274         os.chdir(tmpdir)
1275
1276         # Get the changelog version history
1277         self.get_changelog_versions(cwd)
1278
1279         # Move back and cleanup the temporary tree
1280         os.chdir(cwd)
1281
1282         try:
1283             shutil.rmtree(tmpdir)
1284         except OSError, e:
1285             if e.errno != errno.EACCES:
1286                 print "foobar"
1287                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1288
1289             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1290             # We probably have u-r or u-w directories so chmod everything
1291             # and try again.
1292             cmd = "chmod -R u+rwx %s" % (tmpdir)
1293             result = os.system(cmd)
1294             if result != 0:
1295                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1296             shutil.rmtree(tmpdir)
1297         except Exception, e:
1298             print "foobar2 (%s)" % e
1299             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1300
1301     ###########################################################################
1302     def ensure_hashes(self):
1303         # Make sure we recognise the format of the Files: field in the .changes
1304         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1305         if len(format) == 2:
1306             format = int(format[0]), int(format[1])
1307         else:
1308             format = int(float(format[0])), 0
1309
1310         # We need to deal with the original changes blob, as the fields we need
1311         # might not be in the changes dict serialised into the .dak anymore.
1312         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1313
1314         # Copy the checksums over to the current changes dict.  This will keep
1315         # the existing modifications to it intact.
1316         for field in orig_changes:
1317             if field.startswith('checksums-'):
1318                 self.pkg.changes[field] = orig_changes[field]
1319
1320         # Check for unsupported hashes
1321         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1322             self.rejects.append(j)
1323
1324         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1325             self.rejects.append(j)
1326
1327         # We have to calculate the hash if we have an earlier changes version than
1328         # the hash appears in rather than require it exist in the changes file
1329         for hashname, hashfunc, version in utils.known_hashes:
1330             # TODO: Move _ensure_changes_hash into this class
1331             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1332                 self.rejects.append(j)
1333             if "source" in self.pkg.changes["architecture"]:
1334                 # TODO: Move _ensure_dsc_hash into this class
1335                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1336                     self.rejects.append(j)
1337
1338     def check_hashes(self):
1339         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1340             self.rejects.append(m)
1341
1342         for m in utils.check_size(".changes", self.pkg.files):
1343             self.rejects.append(m)
1344
1345         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1346             self.rejects.append(m)
1347
1348         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1349             self.rejects.append(m)
1350
1351         self.ensure_hashes()
1352
1353     ###########################################################################
1354
1355     def ensure_orig(self, target_dir='.', session=None):
1356         """
1357         Ensures that all orig files mentioned in the changes file are present
1358         in target_dir. If they do not exist, they are symlinked into place.
1359
1360         An list containing the symlinks that were created are returned (so they
1361         can be removed).
1362         """
1363
1364         symlinked = []
1365         cnf = Config()
1366
1367         for filename, entry in self.pkg.dsc_files.iteritems():
1368             if not re_is_orig_source.match(filename):
1369                 # File is not an orig; ignore
1370                 continue
1371
1372             if os.path.exists(filename):
1373                 # File exists, no need to continue
1374                 continue
1375
1376             def symlink_if_valid(path):
1377                 f = utils.open_file(path)
1378                 md5sum = apt_pkg.md5sum(f)
1379                 f.close()
1380
1381                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1382                 expected = (int(entry['size']), entry['md5sum'])
1383
1384                 if fingerprint != expected:
1385                     return False
1386
1387                 dest = os.path.join(target_dir, filename)
1388
1389                 os.symlink(path, dest)
1390                 symlinked.append(dest)
1391
1392                 return True
1393
1394             session_ = session
1395             if session is None:
1396                 session_ = DBConn().session()
1397
1398             found = False
1399
1400             # Look in the pool
1401             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1402                 poolfile_path = os.path.join(
1403                     poolfile.location.path, poolfile.filename
1404                 )
1405
1406                 if symlink_if_valid(poolfile_path):
1407                     found = True
1408                     break
1409
1410             if session is None:
1411                 session_.close()
1412
1413             if found:
1414                 continue
1415
1416             # Look in some other queues for the file
1417             queues = ('New', 'Byhand', 'ProposedUpdates',
1418                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1419
1420             for queue in queues:
1421                 if not cnf.get('Dir::Queue::%s' % queue):
1422                     continue
1423
1424                 queuefile_path = os.path.join(
1425                     cnf['Dir::Queue::%s' % queue], filename
1426                 )
1427
1428                 if not os.path.exists(queuefile_path):
1429                     # Does not exist in this queue
1430                     continue
1431
1432                 if symlink_if_valid(queuefile_path):
1433                     break
1434
1435         return symlinked
1436
1437     ###########################################################################
1438
1439     def check_lintian(self):
1440         """
1441         Extends self.rejects by checking the output of lintian against tags
1442         specified in Dinstall::LintianTags.
1443         """
1444
1445         cnf = Config()
1446
1447         # Don't reject binary uploads
1448         if not self.pkg.changes['architecture'].has_key('source'):
1449             return
1450
1451         # Only check some distributions
1452         for dist in ('unstable', 'experimental'):
1453             if dist in self.pkg.changes['distribution']:
1454                 break
1455         else:
1456             return
1457
1458         # If we do not have a tagfile, don't do anything
1459         tagfile = cnf.get("Dinstall::LintianTags")
1460         if tagfile is None:
1461             return
1462
1463         # Parse the yaml file
1464         sourcefile = file(tagfile, 'r')
1465         sourcecontent = sourcefile.read()
1466         sourcefile.close()
1467
1468         try:
1469             lintiantags = yaml.load(sourcecontent)['lintian']
1470         except yaml.YAMLError, msg:
1471             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1472             return
1473
1474         # Try and find all orig mentioned in the .dsc
1475         symlinked = self.ensure_orig()
1476
1477         # Setup the input file for lintian
1478         fd, temp_filename = utils.temp_filename()
1479         temptagfile = os.fdopen(fd, 'w')
1480         for tags in lintiantags.values():
1481             temptagfile.writelines(['%s\n' % x for x in tags])
1482         temptagfile.close()
1483
1484         try:
1485             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1486                 (temp_filename, self.pkg.changes_file)
1487
1488             result, output = commands.getstatusoutput(cmd)
1489         finally:
1490             # Remove our tempfile and any symlinks we created
1491             os.unlink(temp_filename)
1492
1493             for symlink in symlinked:
1494                 os.unlink(symlink)
1495
1496         if result == 2:
1497             utils.warn("lintian failed for %s [return code: %s]." % \
1498                 (self.pkg.changes_file, result))
1499             utils.warn(utils.prefix_multi_line_string(output, \
1500                 " [possible output:] "))
1501
1502         def log(*txt):
1503             if self.logger:
1504                 self.logger.log(
1505                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1506                 )
1507
1508         # Generate messages
1509         parsed_tags = parse_lintian_output(output)
1510         self.rejects.extend(
1511             generate_reject_messages(parsed_tags, lintiantags, log=log)
1512         )
1513
1514     ###########################################################################
1515     def check_urgency(self):
1516         cnf = Config()
1517         if self.pkg.changes["architecture"].has_key("source"):
1518             if not self.pkg.changes.has_key("urgency"):
1519                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1520             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1521             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1522                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1523                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1524                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1525
1526     ###########################################################################
1527
1528     # Sanity check the time stamps of files inside debs.
1529     # [Files in the near future cause ugly warnings and extreme time
1530     #  travel can cause errors on extraction]
1531
1532     def check_timestamps(self):
1533         Cnf = Config()
1534
1535         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1536         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1537         tar = TarTime(future_cutoff, past_cutoff)
1538
1539         for filename, entry in self.pkg.files.items():
1540             if entry["type"] == "deb":
1541                 tar.reset()
1542                 try:
1543                     deb_file = utils.open_file(filename)
1544                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1545                     deb_file.seek(0)
1546                     try:
1547                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1548                     except SystemError, e:
1549                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1550                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1551                             raise
1552                         deb_file.seek(0)
1553                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1554
1555                     deb_file.close()
1556
1557                     future_files = tar.future_files.keys()
1558                     if future_files:
1559                         num_future_files = len(future_files)
1560                         future_file = future_files[0]
1561                         future_date = tar.future_files[future_file]
1562                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1563                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1564
1565                     ancient_files = tar.ancient_files.keys()
1566                     if ancient_files:
1567                         num_ancient_files = len(ancient_files)
1568                         ancient_file = ancient_files[0]
1569                         ancient_date = tar.ancient_files[ancient_file]
1570                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1571                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1572                 except:
1573                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1574
1575     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1576         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1577             sponsored = False
1578         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1579             sponsored = False
1580             if uid_name == "":
1581                 sponsored = True
1582         else:
1583             sponsored = True
1584             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1585                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1586                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1587                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1588                         self.pkg.changes["sponsoremail"] = uid_email
1589
1590         return sponsored
1591
1592
1593     ###########################################################################
1594     # check_signed_by_key checks
1595     ###########################################################################
1596
1597     def check_signed_by_key(self):
1598         """Ensure the .changes is signed by an authorized uploader."""
1599         session = DBConn().session()
1600
1601         # First of all we check that the person has proper upload permissions
1602         # and that this upload isn't blocked
1603         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1604
1605         if fpr is None:
1606             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1607             return
1608
1609         # TODO: Check that import-keyring adds UIDs properly
1610         if not fpr.uid:
1611             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1612             return
1613
1614         # Check that the fingerprint which uploaded has permission to do so
1615         self.check_upload_permissions(fpr, session)
1616
1617         # Check that this package is not in a transition
1618         self.check_transition(session)
1619
1620         session.close()
1621
1622
1623     def check_upload_permissions(self, fpr, session):
1624         # Check any one-off upload blocks
1625         self.check_upload_blocks(fpr, session)
1626
1627         # Start with DM as a special case
1628         # DM is a special case unfortunately, so we check it first
1629         # (keys with no source access get more access than DMs in one
1630         #  way; DMs can only upload for their packages whether source
1631         #  or binary, whereas keys with no access might be able to
1632         #  upload some binaries)
1633         if fpr.source_acl.access_level == 'dm':
1634             self.check_dm_upload(fpr, session)
1635         else:
1636             # Check source-based permissions for other types
1637             if self.pkg.changes["architecture"].has_key("source") and \
1638                 fpr.source_acl.access_level is None:
1639                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1640                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1641                 self.rejects.append(rej)
1642                 return
1643             # If not a DM, we allow full upload rights
1644             uid_email = "%s@debian.org" % (fpr.uid.uid)
1645             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1646
1647
1648         # Check binary upload permissions
1649         # By this point we know that DMs can't have got here unless they
1650         # are allowed to deal with the package concerned so just apply
1651         # normal checks
1652         if fpr.binary_acl.access_level == 'full':
1653             return
1654
1655         # Otherwise we're in the map case
1656         tmparches = self.pkg.changes["architecture"].copy()
1657         tmparches.pop('source', None)
1658
1659         for bam in fpr.binary_acl_map:
1660             tmparches.pop(bam.architecture.arch_string, None)
1661
1662         if len(tmparches.keys()) > 0:
1663             if fpr.binary_reject:
1664                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1665                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1666                 self.rejects.append(rej)
1667             else:
1668                 # TODO: This is where we'll implement reject vs throw away binaries later
1669                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1670                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1671                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1672                 self.rejects.append(rej)
1673
1674
1675     def check_upload_blocks(self, fpr, session):
1676         """Check whether any upload blocks apply to this source, source
1677            version, uid / fpr combination"""
1678
1679         def block_rej_template(fb):
1680             rej = 'Manual upload block in place for package %s' % fb.source
1681             if fb.version is not None:
1682                 rej += ', version %s' % fb.version
1683             return rej
1684
1685         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1686             # version is None if the block applies to all versions
1687             if fb.version is None or fb.version == self.pkg.changes['version']:
1688                 # Check both fpr and uid - either is enough to cause a reject
1689                 if fb.fpr is not None:
1690                     if fb.fpr.fingerprint == fpr.fingerprint:
1691                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1692                 if fb.uid is not None:
1693                     if fb.uid == fpr.uid:
1694                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1695
1696
1697     def check_dm_upload(self, fpr, session):
1698         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1699         ## none of the uploaded packages are NEW
1700         rej = False
1701         for f in self.pkg.files.keys():
1702             if self.pkg.files[f].has_key("byhand"):
1703                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1704                 rej = True
1705             if self.pkg.files[f].has_key("new"):
1706                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1707                 rej = True
1708
1709         if rej:
1710             return
1711
1712         r = get_newest_source(self.pkg.changes["source"], session)
1713
1714         if r is None:
1715             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1716             self.rejects.append(rej)
1717             return
1718
1719         if not r.dm_upload_allowed:
1720             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1721             self.rejects.append(rej)
1722             return
1723
1724         ## the Maintainer: field of the uploaded .changes file corresponds with
1725         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1726         ## uploads)
1727         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1728             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1729
1730         ## the most recent version of the package uploaded to unstable or
1731         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1732         ## non-developer maintainers cannot NMU or hijack packages)
1733
1734         # srcuploaders includes the maintainer
1735         accept = False
1736         for sup in r.srcuploaders:
1737             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1738             # Eww - I hope we never have two people with the same name in Debian
1739             if email == fpr.uid.uid or name == fpr.uid.name:
1740                 accept = True
1741                 break
1742
1743         if not accept:
1744             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1745             return
1746
1747         ## none of the packages are being taken over from other source packages
1748         for b in self.pkg.changes["binary"].keys():
1749             for suite in self.pkg.changes["distribution"].keys():
1750                 for s in get_source_by_package_and_suite(b, suite, session):
1751                     if s.source != self.pkg.changes["source"]:
1752                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1753
1754
1755
1756     def check_transition(self, session):
1757         cnf = Config()
1758
1759         sourcepkg = self.pkg.changes["source"]
1760
1761         # No sourceful upload -> no need to do anything else, direct return
1762         # We also work with unstable uploads, not experimental or those going to some
1763         # proposed-updates queue
1764         if "source" not in self.pkg.changes["architecture"] or \
1765            "unstable" not in self.pkg.changes["distribution"]:
1766             return
1767
1768         # Also only check if there is a file defined (and existant) with
1769         # checks.
1770         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1771         if transpath == "" or not os.path.exists(transpath):
1772             return
1773
1774         # Parse the yaml file
1775         sourcefile = file(transpath, 'r')
1776         sourcecontent = sourcefile.read()
1777         try:
1778             transitions = yaml.load(sourcecontent)
1779         except yaml.YAMLError, msg:
1780             # This shouldn't happen, there is a wrapper to edit the file which
1781             # checks it, but we prefer to be safe than ending up rejecting
1782             # everything.
1783             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1784             return
1785
1786         # Now look through all defined transitions
1787         for trans in transitions:
1788             t = transitions[trans]
1789             source = t["source"]
1790             expected = t["new"]
1791
1792             # Will be None if nothing is in testing.
1793             current = get_source_in_suite(source, "testing", session)
1794             if current is not None:
1795                 compare = apt_pkg.VersionCompare(current.version, expected)
1796
1797             if current is None or compare < 0:
1798                 # This is still valid, the current version in testing is older than
1799                 # the new version we wait for, or there is none in testing yet
1800
1801                 # Check if the source we look at is affected by this.
1802                 if sourcepkg in t['packages']:
1803                     # The source is affected, lets reject it.
1804
1805                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1806                         sourcepkg, trans)
1807
1808                     if current is not None:
1809                         currentlymsg = "at version %s" % (current.version)
1810                     else:
1811                         currentlymsg = "not present in testing"
1812
1813                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1814
1815                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1816 is part of a testing transition designed to get %s migrated (it is
1817 currently %s, we need version %s).  This transition is managed by the
1818 Release Team, and %s is the Release-Team member responsible for it.
1819 Please mail debian-release@lists.debian.org or contact %s directly if you
1820 need further assistance.  You might want to upload to experimental until this
1821 transition is done."""
1822                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1823
1824                     self.rejects.append(rejectmsg)
1825                     return
1826
1827     ###########################################################################
1828     # End check_signed_by_key checks
1829     ###########################################################################
1830
1831     def build_summaries(self):
1832         """ Build a summary of changes the upload introduces. """
1833
1834         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1835
1836         short_summary = summary
1837
1838         # This is for direport's benefit...
1839         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1840
1841         if byhand or new:
1842             summary += "Changes: " + f
1843
1844         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1845
1846         summary += self.announce(short_summary, 0)
1847
1848         return (summary, short_summary)
1849
1850     ###########################################################################
1851
1852     def close_bugs(self, summary, action):
1853         """
1854         Send mail to close bugs as instructed by the closes field in the changes file.
1855         Also add a line to summary if any work was done.
1856
1857         @type summary: string
1858         @param summary: summary text, as given by L{build_summaries}
1859
1860         @type action: bool
1861         @param action: Set to false no real action will be done.
1862
1863         @rtype: string
1864         @return: summary. If action was taken, extended by the list of closed bugs.
1865
1866         """
1867
1868         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1869
1870         bugs = self.pkg.changes["closes"].keys()
1871
1872         if not bugs:
1873             return summary
1874
1875         bugs.sort()
1876         summary += "Closing bugs: "
1877         for bug in bugs:
1878             summary += "%s " % (bug)
1879             if action:
1880                 self.update_subst()
1881                 self.Subst["__BUG_NUMBER__"] = bug
1882                 if self.pkg.changes["distribution"].has_key("stable"):
1883                     self.Subst["__STABLE_WARNING__"] = """
1884 Note that this package is not part of the released stable Debian
1885 distribution.  It may have dependencies on other unreleased software,
1886 or other instabilities.  Please take care if you wish to install it.
1887 The update will eventually make its way into the next released Debian
1888 distribution."""
1889                 else:
1890                     self.Subst["__STABLE_WARNING__"] = ""
1891                 mail_message = utils.TemplateSubst(self.Subst, template)
1892                 utils.send_mail(mail_message)
1893
1894                 # Clear up after ourselves
1895                 del self.Subst["__BUG_NUMBER__"]
1896                 del self.Subst["__STABLE_WARNING__"]
1897
1898         if action and self.logger:
1899             self.logger.log(["closing bugs"] + bugs)
1900
1901         summary += "\n"
1902
1903         return summary
1904
1905     ###########################################################################
1906
1907     def announce(self, short_summary, action):
1908         """
1909         Send an announce mail about a new upload.
1910
1911         @type short_summary: string
1912         @param short_summary: Short summary text to include in the mail
1913
1914         @type action: bool
1915         @param action: Set to false no real action will be done.
1916
1917         @rtype: string
1918         @return: Textstring about action taken.
1919
1920         """
1921
1922         cnf = Config()
1923         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1924
1925         # Only do announcements for source uploads with a recent dpkg-dev installed
1926         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1927            self.pkg.changes["architecture"].has_key("source"):
1928             return ""
1929
1930         lists_done = {}
1931         summary = ""
1932
1933         self.Subst["__SHORT_SUMMARY__"] = short_summary
1934
1935         for dist in self.pkg.changes["distribution"].keys():
1936             suite = get_suite(dist)
1937             if suite is None: continue
1938             announce_list = suite.announce
1939             if announce_list == "" or lists_done.has_key(announce_list):
1940                 continue
1941
1942             lists_done[announce_list] = 1
1943             summary += "Announcing to %s\n" % (announce_list)
1944
1945             if action:
1946                 self.update_subst()
1947                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1948                 if cnf.get("Dinstall::TrackingServer") and \
1949                    self.pkg.changes["architecture"].has_key("source"):
1950                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1951                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1952
1953                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1954                 utils.send_mail(mail_message)
1955
1956                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1957
1958         if cnf.FindB("Dinstall::CloseBugs"):
1959             summary = self.close_bugs(summary, action)
1960
1961         del self.Subst["__SHORT_SUMMARY__"]
1962
1963         return summary
1964
1965     ###########################################################################
1966     @session_wrapper
1967     def accept (self, summary, short_summary, session=None):
1968         """
1969         Accept an upload.
1970
1971         This moves all files referenced from the .changes into the pool,
1972         sends the accepted mail, announces to lists, closes bugs and
1973         also checks for override disparities. If enabled it will write out
1974         the version history for the BTS Version Tracking and will finally call
1975         L{queue_build}.
1976
1977         @type summary: string
1978         @param summary: Summary text
1979
1980         @type short_summary: string
1981         @param short_summary: Short summary
1982         """
1983
1984         cnf = Config()
1985         stats = SummaryStats()
1986
1987         print "Installing."
1988         self.logger.log(["installing changes", self.pkg.changes_file])
1989
1990         poolfiles = []
1991
1992         # Add the .dsc file to the DB first
1993         for newfile, entry in self.pkg.files.items():
1994             if entry["type"] == "dsc":
1995                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1996                 for j in pfs:
1997                     poolfiles.append(j)
1998
1999         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2000         for newfile, entry in self.pkg.files.items():
2001             if entry["type"] == "deb":
2002                 poolfiles.append(add_deb_to_db(self, newfile, session))
2003
2004         # If this is a sourceful diff only upload that is moving
2005         # cross-component we need to copy the .orig files into the new
2006         # component too for the same reasons as above.
2007         # XXX: mhy: I think this should be in add_dsc_to_db
2008         if self.pkg.changes["architecture"].has_key("source"):
2009             for orig_file in self.pkg.orig_files.keys():
2010                 if not self.pkg.orig_files[orig_file].has_key("id"):
2011                     continue # Skip if it's not in the pool
2012                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2013                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2014                     continue # Skip if the location didn't change
2015
2016                 # Do the move
2017                 oldf = get_poolfile_by_id(orig_file_id, session)
2018                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2019                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2020                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2021
2022                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2023
2024                 # TODO: Care about size/md5sum collisions etc
2025                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2026
2027                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2028                 if newf is None:
2029                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2030                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2031
2032                     session.flush()
2033
2034                     # Don't reference the old file from this changes
2035                     for p in poolfiles:
2036                         if p.file_id == oldf.file_id:
2037                             poolfiles.remove(p)
2038
2039                     poolfiles.append(newf)
2040
2041                     # Fix up the DSC references
2042                     toremove = []
2043
2044                     for df in source.srcfiles:
2045                         if df.poolfile.file_id == oldf.file_id:
2046                             # Add a new DSC entry and mark the old one for deletion
2047                             # Don't do it in the loop so we don't change the thing we're iterating over
2048                             newdscf = DSCFile()
2049                             newdscf.source_id = source.source_id
2050                             newdscf.poolfile_id = newf.file_id
2051                             session.add(newdscf)
2052
2053                             toremove.append(df)
2054
2055                     for df in toremove:
2056                         session.delete(df)
2057
2058                     # Flush our changes
2059                     session.flush()
2060
2061                     # Make sure that our source object is up-to-date
2062                     session.expire(source)
2063
2064         # Add changelog information to the database
2065         self.store_changelog()
2066
2067         # Install the files into the pool
2068         for newfile, entry in self.pkg.files.items():
2069             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2070             utils.move(newfile, destination)
2071             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2072             stats.accept_bytes += float(entry["size"])
2073
2074         # Copy the .changes file across for suite which need it.
2075         copy_changes = dict([(x.copychanges, '')
2076                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2077                              if x.copychanges is not None])
2078
2079         for dest in copy_changes.keys():
2080             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2081
2082         # We're done - commit the database changes
2083         session.commit()
2084         # Our SQL session will automatically start a new transaction after
2085         # the last commit
2086
2087         # Move the .changes into the 'done' directory
2088         utils.move(self.pkg.changes_file,
2089                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2090
2091         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2092             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2093
2094         self.update_subst()
2095         self.Subst["__SUMMARY__"] = summary
2096         mail_message = utils.TemplateSubst(self.Subst,
2097                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2098         utils.send_mail(mail_message)
2099         self.announce(short_summary, 1)
2100
2101         ## Helper stuff for DebBugs Version Tracking
2102         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2103             if self.pkg.changes["architecture"].has_key("source"):
2104                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2105                 version_history = os.fdopen(fd, 'w')
2106                 version_history.write(self.pkg.dsc["bts changelog"])
2107                 version_history.close()
2108                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2109                                       self.pkg.changes_file[:-8]+".versions")
2110                 os.rename(temp_filename, filename)
2111                 os.chmod(filename, 0644)
2112
2113             # Write out the binary -> source mapping.
2114             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2115             debinfo = os.fdopen(fd, 'w')
2116             for name, entry in sorted(self.pkg.files.items()):
2117                 if entry["type"] == "deb":
2118                     line = " ".join([entry["package"], entry["version"],
2119                                      entry["architecture"], entry["source package"],
2120                                      entry["source version"]])
2121                     debinfo.write(line+"\n")
2122             debinfo.close()
2123             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2124                                   self.pkg.changes_file[:-8]+".debinfo")
2125             os.rename(temp_filename, filename)
2126             os.chmod(filename, 0644)
2127
2128         session.commit()
2129
2130         # Set up our copy queues (e.g. buildd queues)
2131         for suite_name in self.pkg.changes["distribution"].keys():
2132             suite = get_suite(suite_name, session)
2133             for q in suite.copy_queues:
2134                 for f in poolfiles:
2135                     q.add_file_from_pool(f)
2136
2137         session.commit()
2138
2139         # Finally...
2140         stats.accept_count += 1
2141
2142     def check_override(self):
2143         """
2144         Checks override entries for validity. Mails "Override disparity" warnings,
2145         if that feature is enabled.
2146
2147         Abandons the check if
2148           - override disparity checks are disabled
2149           - mail sending is disabled
2150         """
2151
2152         cnf = Config()
2153
2154         # Abandon the check if override disparity checks have been disabled
2155         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2156             return
2157
2158         summary = self.pkg.check_override()
2159
2160         if summary == "":
2161             return
2162
2163         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2164
2165         self.update_subst()
2166         self.Subst["__SUMMARY__"] = summary
2167         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2168         utils.send_mail(mail_message)
2169         del self.Subst["__SUMMARY__"]
2170
2171     ###########################################################################
2172
2173     def remove(self, from_dir=None):
2174         """
2175         Used (for instance) in p-u to remove the package from unchecked
2176
2177         Also removes the package from holding area.
2178         """
2179         if from_dir is None:
2180             from_dir = self.pkg.directory
2181         h = Holding()
2182
2183         for f in self.pkg.files.keys():
2184             os.unlink(os.path.join(from_dir, f))
2185             if os.path.exists(os.path.join(h.holding_dir, f)):
2186                 os.unlink(os.path.join(h.holding_dir, f))
2187
2188         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2189         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2190             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2191
2192     ###########################################################################
2193
2194     def move_to_queue (self, queue):
2195         """
2196         Move files to a destination queue using the permissions in the table
2197         """
2198         h = Holding()
2199         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2200                    queue.path, perms=int(queue.change_perms, 8))
2201         for f in self.pkg.files.keys():
2202             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2203
2204     ###########################################################################
2205
2206     def force_reject(self, reject_files):
2207         """
2208         Forcefully move files from the current directory to the
2209         reject directory.  If any file already exists in the reject
2210         directory it will be moved to the morgue to make way for
2211         the new file.
2212
2213         @type reject_files: dict
2214         @param reject_files: file dictionary
2215
2216         """
2217
2218         cnf = Config()
2219
2220         for file_entry in reject_files:
2221             # Skip any files which don't exist or which we don't have permission to copy.
2222             if os.access(file_entry, os.R_OK) == 0:
2223                 continue
2224
2225             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2226
2227             try:
2228                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2229             except OSError, e:
2230                 # File exists?  Let's find a new name by adding a number
2231                 if e.errno == errno.EEXIST:
2232                     try:
2233                         dest_file = utils.find_next_free(dest_file, 255)
2234                     except NoFreeFilenameError:
2235                         # Something's either gone badly Pete Tong, or
2236                         # someone is trying to exploit us.
2237                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2238                         return
2239
2240                     # Make sure we really got it
2241                     try:
2242                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2243                     except OSError, e:
2244                         # Likewise
2245                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2246                         return
2247                 else:
2248                     raise
2249             # If we got here, we own the destination file, so we can
2250             # safely overwrite it.
2251             utils.move(file_entry, dest_file, 1, perms=0660)
2252             os.close(dest_fd)
2253
2254     ###########################################################################
2255     def do_reject (self, manual=0, reject_message="", notes=""):
2256         """
2257         Reject an upload. If called without a reject message or C{manual} is
2258         true, spawn an editor so the user can write one.
2259
2260         @type manual: bool
2261         @param manual: manual or automated rejection
2262
2263         @type reject_message: string
2264         @param reject_message: A reject message
2265
2266         @return: 0
2267
2268         """
2269         # If we weren't given a manual rejection message, spawn an
2270         # editor so the user can add one in...
2271         if manual and not reject_message:
2272             (fd, temp_filename) = utils.temp_filename()
2273             temp_file = os.fdopen(fd, 'w')
2274             if len(notes) > 0:
2275                 for note in notes:
2276                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2277                                     % (note.author, note.version, note.notedate, note.comment))
2278             temp_file.close()
2279             editor = os.environ.get("EDITOR","vi")
2280             answer = 'E'
2281             while answer == 'E':
2282                 os.system("%s %s" % (editor, temp_filename))
2283                 temp_fh = utils.open_file(temp_filename)
2284                 reject_message = "".join(temp_fh.readlines())
2285                 temp_fh.close()
2286                 print "Reject message:"
2287                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2288                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2289                 answer = "XXX"
2290                 while prompt.find(answer) == -1:
2291                     answer = utils.our_raw_input(prompt)
2292                     m = re_default_answer.search(prompt)
2293                     if answer == "":
2294                         answer = m.group(1)
2295                     answer = answer[:1].upper()
2296             os.unlink(temp_filename)
2297             if answer == 'A':
2298                 return 1
2299             elif answer == 'Q':
2300                 sys.exit(0)
2301
2302         print "Rejecting.\n"
2303
2304         cnf = Config()
2305
2306         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2307         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2308
2309         # Move all the files into the reject directory
2310         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2311         self.force_reject(reject_files)
2312
2313         # If we fail here someone is probably trying to exploit the race
2314         # so let's just raise an exception ...
2315         if os.path.exists(reason_filename):
2316             os.unlink(reason_filename)
2317         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2318
2319         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2320
2321         self.update_subst()
2322         if not manual:
2323             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2324             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2325             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2326             os.write(reason_fd, reject_message)
2327             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2328         else:
2329             # Build up the rejection email
2330             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2331             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2332             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2333             self.Subst["__REJECT_MESSAGE__"] = ""
2334             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2335             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2336             # Write the rejection email out as the <foo>.reason file
2337             os.write(reason_fd, reject_mail_message)
2338
2339         del self.Subst["__REJECTOR_ADDRESS__"]
2340         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2341         del self.Subst["__CC__"]
2342
2343         os.close(reason_fd)
2344
2345         # Send the rejection mail
2346         utils.send_mail(reject_mail_message)
2347
2348         if self.logger:
2349             self.logger.log(["rejected", self.pkg.changes_file])
2350
2351         return 0
2352
2353     ################################################################################
2354     def in_override_p(self, package, component, suite, binary_type, filename, session):
2355         """
2356         Check if a package already has override entries in the DB
2357
2358         @type package: string
2359         @param package: package name
2360
2361         @type component: string
2362         @param component: database id of the component
2363
2364         @type suite: int
2365         @param suite: database id of the suite
2366
2367         @type binary_type: string
2368         @param binary_type: type of the package
2369
2370         @type filename: string
2371         @param filename: filename we check
2372
2373         @return: the database result. But noone cares anyway.
2374
2375         """
2376
2377         cnf = Config()
2378
2379         if binary_type == "": # must be source
2380             file_type = "dsc"
2381         else:
2382             file_type = binary_type
2383
2384         # Override suite name; used for example with proposed-updates
2385         oldsuite = get_suite(suite, session)
2386         if (not oldsuite is None) and oldsuite.overridesuite:
2387             suite = oldsuite.overridesuite
2388
2389         result = get_override(package, suite, component, file_type, session)
2390
2391         # If checking for a source package fall back on the binary override type
2392         if file_type == "dsc" and len(result) < 1:
2393             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2394
2395         # Remember the section and priority so we can check them later if appropriate
2396         if len(result) > 0:
2397             result = result[0]
2398             self.pkg.files[filename]["override section"] = result.section.section
2399             self.pkg.files[filename]["override priority"] = result.priority.priority
2400             return result
2401
2402         return None
2403
2404     ################################################################################
2405     def get_anyversion(self, sv_list, suite):
2406         """
2407         @type sv_list: list
2408         @param sv_list: list of (suite, version) tuples to check
2409
2410         @type suite: string
2411         @param suite: suite name
2412
2413         Description: TODO
2414         """
2415         Cnf = Config()
2416         anyversion = None
2417         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2418         for (s, v) in sv_list:
2419             if s in [ x.lower() for x in anysuite ]:
2420                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2421                     anyversion = v
2422
2423         return anyversion
2424
2425     ################################################################################
2426
2427     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2428         """
2429         @type sv_list: list
2430         @param sv_list: list of (suite, version) tuples to check
2431
2432         @type filename: string
2433         @param filename: XXX
2434
2435         @type new_version: string
2436         @param new_version: XXX
2437
2438         Ensure versions are newer than existing packages in target
2439         suites and that cross-suite version checking rules as
2440         set out in the conf file are satisfied.
2441         """
2442
2443         cnf = Config()
2444
2445         # Check versions for each target suite
2446         for target_suite in self.pkg.changes["distribution"].keys():
2447             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2448             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2449
2450             # Enforce "must be newer than target suite" even if conffile omits it
2451             if target_suite not in must_be_newer_than:
2452                 must_be_newer_than.append(target_suite)
2453
2454             for (suite, existent_version) in sv_list:
2455                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2456
2457                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2458                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2459
2460                 if suite in must_be_older_than and vercmp > -1:
2461                     cansave = 0
2462
2463                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2464                         # we really use the other suite, ignoring the conflicting one ...
2465                         addsuite = self.pkg.changes["distribution-version"][suite]
2466
2467                         add_version = self.get_anyversion(sv_list, addsuite)
2468                         target_version = self.get_anyversion(sv_list, target_suite)
2469
2470                         if not add_version:
2471                             # not add_version can only happen if we map to a suite
2472                             # that doesn't enhance the suite we're propup'ing from.
2473                             # so "propup-ver x a b c; map a d" is a problem only if
2474                             # d doesn't enhance a.
2475                             #
2476                             # i think we could always propagate in this case, rather
2477                             # than complaining. either way, this isn't a REJECT issue
2478                             #
2479                             # And - we really should complain to the dorks who configured dak
2480                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2481                             self.pkg.changes.setdefault("propdistribution", {})
2482                             self.pkg.changes["propdistribution"][addsuite] = 1
2483                             cansave = 1
2484                         elif not target_version:
2485                             # not targets_version is true when the package is NEW
2486                             # we could just stick with the "...old version..." REJECT
2487                             # for this, I think.
2488                             self.rejects.append("Won't propogate NEW packages.")
2489                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2490                             # propogation would be redundant. no need to reject though.
2491                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2492                             cansave = 1
2493                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2494                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2495                             # propogate!!
2496                             self.warnings.append("Propogating upload to %s" % (addsuite))
2497                             self.pkg.changes.setdefault("propdistribution", {})
2498                             self.pkg.changes["propdistribution"][addsuite] = 1
2499                             cansave = 1
2500
2501                     if not cansave:
2502                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2503
2504     ################################################################################
2505     def check_binary_against_db(self, filename, session):
2506         # Ensure version is sane
2507         q = session.query(BinAssociation)
2508         q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2509         q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2510
2511         self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2512                                        filename, self.pkg.files[filename]["version"], sourceful=False)
2513
2514         # Check for any existing copies of the file
2515         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2516         q = q.filter_by(version=self.pkg.files[filename]["version"])
2517         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2518
2519         if q.count() > 0:
2520             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2521
2522     ################################################################################
2523
2524     def check_source_against_db(self, filename, session):
2525         source = self.pkg.dsc.get("source")
2526         version = self.pkg.dsc.get("version")
2527
2528         # Ensure version is sane
2529         self.cross_suite_version_check(get_suite_version(source, session),
2530                                        filename, version, sourceful=True)
2531
2532     ################################################################################
2533     def check_dsc_against_db(self, filename, session):
2534         """
2535
2536         @warning: NB: this function can remove entries from the 'files' index [if
2537          the orig tarball is a duplicate of the one in the archive]; if
2538          you're iterating over 'files' and call this function as part of
2539          the loop, be sure to add a check to the top of the loop to
2540          ensure you haven't just tried to dereference the deleted entry.
2541
2542         """
2543
2544         Cnf = Config()
2545         self.pkg.orig_files = {} # XXX: do we need to clear it?
2546         orig_files = self.pkg.orig_files
2547
2548         # Try and find all files mentioned in the .dsc.  This has
2549         # to work harder to cope with the multiple possible
2550         # locations of an .orig.tar.gz.
2551         # The ordering on the select is needed to pick the newest orig
2552         # when it exists in multiple places.
2553         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2554             found = None
2555             if self.pkg.files.has_key(dsc_name):
2556                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2557                 actual_size = int(self.pkg.files[dsc_name]["size"])
2558                 found = "%s in incoming" % (dsc_name)
2559
2560                 # Check the file does not already exist in the archive
2561                 ql = get_poolfile_like_name(dsc_name, session)
2562
2563                 # Strip out anything that isn't '%s' or '/%s$'
2564                 for i in ql:
2565                     if not i.filename.endswith(dsc_name):
2566                         ql.remove(i)
2567
2568                 # "[dak] has not broken them.  [dak] has fixed a
2569                 # brokenness.  Your crappy hack exploited a bug in
2570                 # the old dinstall.
2571                 #
2572                 # "(Come on!  I thought it was always obvious that
2573                 # one just doesn't release different files with
2574                 # the same name and version.)"
2575                 #                        -- ajk@ on d-devel@l.d.o
2576
2577                 if len(ql) > 0:
2578                     # Ignore exact matches for .orig.tar.gz
2579                     match = 0
2580                     if re_is_orig_source.match(dsc_name):
2581                         for i in ql:
2582                             if self.pkg.files.has_key(dsc_name) and \
2583                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2584                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2585                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2586                                 # TODO: Don't delete the entry, just mark it as not needed
2587                                 # This would fix the stupidity of changing something we often iterate over
2588                                 # whilst we're doing it
2589                                 del self.pkg.files[dsc_name]
2590                                 dsc_entry["files id"] = i.file_id
2591                                 if not orig_files.has_key(dsc_name):
2592                                     orig_files[dsc_name] = {}
2593                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2594                                 match = 1
2595
2596                                 # Don't bitch that we couldn't find this file later
2597                                 try:
2598                                     self.later_check_files.remove(dsc_name)
2599                                 except ValueError:
2600                                     pass
2601
2602
2603                     if not match:
2604                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2605
2606             elif re_is_orig_source.match(dsc_name):
2607                 # Check in the pool
2608                 ql = get_poolfile_like_name(dsc_name, session)
2609
2610                 # Strip out anything that isn't '%s' or '/%s$'
2611                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2612                 for i in ql:
2613                     if not i.filename.endswith(dsc_name):
2614                         ql.remove(i)
2615
2616                 if len(ql) > 0:
2617                     # Unfortunately, we may get more than one match here if,
2618                     # for example, the package was in potato but had an -sa
2619                     # upload in woody.  So we need to choose the right one.
2620
2621                     # default to something sane in case we don't match any or have only one
2622                     x = ql[0]
2623
2624                     if len(ql) > 1:
2625                         for i in ql:
2626                             old_file = os.path.join(i.location.path, i.filename)
2627                             old_file_fh = utils.open_file(old_file)
2628                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2629                             old_file_fh.close()
2630                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2631                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2632                                 x = i
2633
2634                     old_file = os.path.join(i.location.path, i.filename)
2635                     old_file_fh = utils.open_file(old_file)
2636                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2637                     old_file_fh.close()
2638                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2639                     found = old_file
2640                     suite_type = x.location.archive_type
2641                     # need this for updating dsc_files in install()
2642                     dsc_entry["files id"] = x.file_id
2643                     # See install() in process-accepted...
2644                     if not orig_files.has_key(dsc_name):
2645                         orig_files[dsc_name] = {}
2646                     orig_files[dsc_name]["id"] = x.file_id
2647                     orig_files[dsc_name]["path"] = old_file
2648                     orig_files[dsc_name]["location"] = x.location.location_id
2649                 else:
2650                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2651                     # Not there? Check the queue directories...
2652                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2653                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2654                             continue
2655                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2656                         if os.path.exists(in_otherdir):
2657                             in_otherdir_fh = utils.open_file(in_otherdir)
2658                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2659                             in_otherdir_fh.close()
2660                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2661                             found = in_otherdir
2662                             if not orig_files.has_key(dsc_name):
2663                                 orig_files[dsc_name] = {}
2664                             orig_files[dsc_name]["path"] = in_otherdir
2665
2666                     if not found:
2667                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2668                         continue
2669             else:
2670                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2671                 continue
2672             if actual_md5 != dsc_entry["md5sum"]:
2673                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2674             if actual_size != int(dsc_entry["size"]):
2675                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2676
2677     ################################################################################
2678     # This is used by process-new and process-holding to recheck a changes file
2679     # at the time we're running.  It mainly wraps various other internal functions
2680     # and is similar to accepted_checks - these should probably be tidied up
2681     # and combined
2682     def recheck(self, session):
2683         cnf = Config()
2684         for f in self.pkg.files.keys():
2685             # The .orig.tar.gz can disappear out from under us is it's a
2686             # duplicate of one in the archive.
2687             if not self.pkg.files.has_key(f):
2688                 continue
2689
2690             entry = self.pkg.files[f]
2691
2692             # Check that the source still exists
2693             if entry["type"] == "deb":
2694                 source_version = entry["source version"]
2695                 source_package = entry["source package"]
2696                 if not self.pkg.changes["architecture"].has_key("source") \
2697                    and not source_exists(source_package, source_version, \
2698                     suites = self.pkg.changes["distribution"].keys(), session = session):
2699                     source_epochless_version = re_no_epoch.sub('', source_version)
2700                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2701                     found = False
2702                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2703                         if cnf.has_key("Dir::Queue::%s" % (q)):
2704                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2705                                 found = True
2706                     if not found:
2707                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2708
2709             # Version and file overwrite checks
2710             if entry["type"] == "deb":
2711                 self.check_binary_against_db(f, session)
2712             elif entry["type"] == "dsc":
2713                 self.check_source_against_db(f, session)
2714                 self.check_dsc_against_db(f, session)
2715
2716     ################################################################################
2717     def accepted_checks(self, overwrite_checks, session):
2718         # Recheck anything that relies on the database; since that's not
2719         # frozen between accept and our run time when called from p-a.
2720
2721         # overwrite_checks is set to False when installing to stable/oldstable
2722
2723         propogate={}
2724         nopropogate={}
2725
2726         # Find the .dsc (again)
2727         dsc_filename = None
2728         for f in self.pkg.files.keys():
2729             if self.pkg.files[f]["type"] == "dsc":
2730                 dsc_filename = f
2731
2732         for checkfile in self.pkg.files.keys():
2733             # The .orig.tar.gz can disappear out from under us is it's a
2734             # duplicate of one in the archive.
2735             if not self.pkg.files.has_key(checkfile):
2736                 continue
2737
2738             entry = self.pkg.files[checkfile]
2739
2740             # Check that the source still exists
2741             if entry["type"] == "deb":
2742                 source_version = entry["source version"]
2743                 source_package = entry["source package"]
2744                 if not self.pkg.changes["architecture"].has_key("source") \
2745                    and not source_exists(source_package, source_version, \
2746                     suites = self.pkg.changes["distribution"].keys(), \
2747                     session = session):
2748                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2749
2750             # Version and file overwrite checks
2751             if overwrite_checks:
2752                 if entry["type"] == "deb":
2753                     self.check_binary_against_db(checkfile, session)
2754                 elif entry["type"] == "dsc":
2755                     self.check_source_against_db(checkfile, session)
2756                     self.check_dsc_against_db(dsc_filename, session)
2757
2758             # propogate in the case it is in the override tables:
2759             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2760                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2761                     propogate[suite] = 1
2762                 else:
2763                     nopropogate[suite] = 1
2764
2765         for suite in propogate.keys():
2766             if suite in nopropogate:
2767                 continue
2768             self.pkg.changes["distribution"][suite] = 1
2769
2770         for checkfile in self.pkg.files.keys():
2771             # Check the package is still in the override tables
2772             for suite in self.pkg.changes["distribution"].keys():
2773                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2774                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2775
2776     ################################################################################
2777     # If any file of an upload has a recent mtime then chances are good
2778     # the file is still being uploaded.
2779
2780     def upload_too_new(self):
2781         cnf = Config()
2782         too_new = False
2783         # Move back to the original directory to get accurate time stamps
2784         cwd = os.getcwd()
2785         os.chdir(self.pkg.directory)
2786         file_list = self.pkg.files.keys()
2787         file_list.extend(self.pkg.dsc_files.keys())
2788         file_list.append(self.pkg.changes_file)
2789         for f in file_list:
2790             try:
2791                 last_modified = time.time()-os.path.getmtime(f)
2792                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2793                     too_new = True
2794                     break
2795             except:
2796                 pass
2797
2798         os.chdir(cwd)
2799         return too_new
2800
2801     def store_changelog(self):
2802
2803         # Skip binary-only upload if it is not a bin-NMU
2804         if not self.pkg.changes['architecture'].has_key('source'):
2805             from daklib.regexes import re_bin_only_nmu
2806             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2807                 return
2808
2809         session = DBConn().session()
2810
2811         # Check if upload already has a changelog entry
2812         query = """SELECT changelog_id FROM changes WHERE source = :source
2813                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2814         if session.execute(query, {'source': self.pkg.changes['source'], \
2815                                    'version': self.pkg.changes['version'], \
2816                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2817             session.commit()
2818             return
2819
2820         # Add current changelog text into changelogs_text table, return created ID
2821         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2822         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2823
2824         # Link ID to the upload available in changes table
2825         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2826                    AND version = :version AND architecture = :architecture"""
2827         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2828                                 'version': self.pkg.changes['version'], \
2829                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2830
2831         session.commit()