]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
24cda1e50fd2684fc7828dfc3a59c88df44dbb7c
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     else:
81         file_type = f["type"]
82         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
83
84     # Validate the override type
85     type_id = get_override_type(file_type, session)
86     if type_id is None:
87         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
88
89     return file_type
90
91 ################################################################################
92
93 # Determine what parts in a .changes are NEW
94
95 def determine_new(filename, changes, files, warn=1, session = None):
96     """
97     Determine what parts in a C{changes} file are NEW.
98
99     @type filename: str
100     @param filename: changes filename
101
102     @type changes: Upload.Pkg.changes dict
103     @param changes: Changes dictionary
104
105     @type files: Upload.Pkg.files dict
106     @param files: Files dictionary
107
108     @type warn: bool
109     @param warn: Warn if overrides are added for (old)stable
110
111     @rtype: dict
112     @return: dictionary of NEW components.
113
114     """
115     # TODO: This should all use the database instead of parsing the changes
116     # file again
117     new = {}
118     byhand = {}
119
120     dbchg = get_dbchange(filename, session)
121     if dbchg is None:
122         print "Warning: cannot find changes file in database; won't check byhand"
123
124     # Build up a list of potentially new things
125     for name, f in files.items():
126         # Keep a record of byhand elements
127         if f["section"] == "byhand":
128             byhand[name] = 1
129             continue
130
131         pkg = f["package"]
132         priority = f["priority"]
133         section = f["section"]
134         file_type = get_type(f, session)
135         component = f["component"]
136
137         if file_type == "dsc":
138             priority = "source"
139
140         if not new.has_key(pkg):
141             new[pkg] = {}
142             new[pkg]["priority"] = priority
143             new[pkg]["section"] = section
144             new[pkg]["type"] = file_type
145             new[pkg]["component"] = component
146             new[pkg]["files"] = []
147         else:
148             old_type = new[pkg]["type"]
149             if old_type != file_type:
150                 # source gets trumped by deb or udeb
151                 if old_type == "dsc":
152                     new[pkg]["priority"] = priority
153                     new[pkg]["section"] = section
154                     new[pkg]["type"] = file_type
155                     new[pkg]["component"] = component
156
157         new[pkg]["files"].append(name)
158
159         if f.has_key("othercomponents"):
160             new[pkg]["othercomponents"] = f["othercomponents"]
161
162     # Fix up the list of target suites
163     cnf = Config()
164     for suite in changes["suite"].keys():
165         oldsuite = get_suite(suite, session)
166         if not oldsuite:
167             print "WARNING: Invalid suite %s found" % suite
168             continue
169
170         if oldsuite.overridesuite:
171             newsuite = get_suite(oldsuite.overridesuite, session)
172
173             if newsuite:
174                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
175                     oldsuite.overridesuite, suite)
176                 del changes["suite"][suite]
177                 changes["suite"][oldsuite.overridesuite] = 1
178             else:
179                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
180                     oldsuite.overridesuite, suite)
181
182     # Check for unprocessed byhand files
183     if dbchg is not None:
184         for b in byhand.keys():
185             # Find the file entry in the database
186             found = False
187             for f in dbchg.files:
188                 if f.filename == b:
189                     found = True
190                     # If it's processed, we can ignore it
191                     if f.processed:
192                         del byhand[b]
193                     break
194
195             if not found:
196                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
197
198     # Check for new stuff
199     for suite in changes["suite"].keys():
200         for pkg in new.keys():
201             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
202             if len(ql) > 0:
203                 for file_entry in new[pkg]["files"]:
204                     if files[file_entry].has_key("new"):
205                         del files[file_entry]["new"]
206                 del new[pkg]
207
208     if warn:
209         for s in ['stable', 'oldstable']:
210             if changes["suite"].has_key(s):
211                 print "WARNING: overrides will be added for %s!" % s
212         for pkg in new.keys():
213             if new[pkg].has_key("othercomponents"):
214                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
215
216     return new, byhand
217
218 ################################################################################
219
220 def check_valid(new, session = None):
221     """
222     Check if section and priority for NEW packages exist in database.
223     Additionally does sanity checks:
224       - debian-installer packages have to be udeb (or source)
225       - non debian-installer packages can not be udeb
226       - source priority can only be assigned to dsc file types
227
228     @type new: dict
229     @param new: Dict of new packages with their section, priority and type.
230
231     """
232     for pkg in new.keys():
233         section_name = new[pkg]["section"]
234         priority_name = new[pkg]["priority"]
235         file_type = new[pkg]["type"]
236
237         section = get_section(section_name, session)
238         if section is None:
239             new[pkg]["section id"] = -1
240         else:
241             new[pkg]["section id"] = section.section_id
242
243         priority = get_priority(priority_name, session)
244         if priority is None:
245             new[pkg]["priority id"] = -1
246         else:
247             new[pkg]["priority id"] = priority.priority_id
248
249         # Sanity checks
250         di = section_name.find("debian-installer") != -1
251
252         # If d-i, we must be udeb and vice-versa
253         if     (di and file_type not in ("udeb", "dsc")) or \
254            (not di and file_type == "udeb"):
255             new[pkg]["section id"] = -1
256
257         # If dsc we need to be source and vice-versa
258         if (priority == "source" and file_type != "dsc") or \
259            (priority != "source" and file_type == "dsc"):
260             new[pkg]["priority id"] = -1
261
262 ###############################################################################
263
264 # Used by Upload.check_timestamps
265 class TarTime(object):
266     def __init__(self, future_cutoff, past_cutoff):
267         self.reset()
268         self.future_cutoff = future_cutoff
269         self.past_cutoff = past_cutoff
270
271     def reset(self):
272         self.future_files = {}
273         self.ancient_files = {}
274
275     def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
276         if MTime > self.future_cutoff:
277             self.future_files[Name] = MTime
278         if MTime < self.past_cutoff:
279             self.ancient_files[Name] = MTime
280
281 ###############################################################################
282
283 def prod_maintainer(notes, upload):
284     cnf = Config()
285
286     # Here we prepare an editor and get them ready to prod...
287     (fd, temp_filename) = utils.temp_filename()
288     temp_file = os.fdopen(fd, 'w')
289     for note in notes:
290         temp_file.write(note.comment)
291     temp_file.close()
292     editor = os.environ.get("EDITOR","vi")
293     answer = 'E'
294     while answer == 'E':
295         os.system("%s %s" % (editor, temp_filename))
296         temp_fh = utils.open_file(temp_filename)
297         prod_message = "".join(temp_fh.readlines())
298         temp_fh.close()
299         print "Prod message:"
300         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
301         prompt = "[P]rod, Edit, Abandon, Quit ?"
302         answer = "XXX"
303         while prompt.find(answer) == -1:
304             answer = utils.our_raw_input(prompt)
305             m = re_default_answer.search(prompt)
306             if answer == "":
307                 answer = m.group(1)
308             answer = answer[:1].upper()
309     os.unlink(temp_filename)
310     if answer == 'A':
311         return
312     elif answer == 'Q':
313         end()
314         sys.exit(0)
315     # Otherwise, do the proding...
316     user_email_address = utils.whoami() + " <%s>" % (
317         cnf["Dinstall::MyAdminAddress"])
318
319     Subst = upload.Subst
320
321     Subst["__FROM_ADDRESS__"] = user_email_address
322     Subst["__PROD_MESSAGE__"] = prod_message
323     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
324
325     prod_mail_message = utils.TemplateSubst(
326         Subst,cnf["Dir::Templates"]+"/process-new.prod")
327
328     # Send the prod mail
329     utils.send_mail(prod_mail_message)
330
331     print "Sent prodding message"
332
333 ################################################################################
334
335 def edit_note(note, upload, session, trainee=False):
336     # Write the current data to a temporary file
337     (fd, temp_filename) = utils.temp_filename()
338     editor = os.environ.get("EDITOR","vi")
339     answer = 'E'
340     while answer == 'E':
341         os.system("%s %s" % (editor, temp_filename))
342         temp_file = utils.open_file(temp_filename)
343         newnote = temp_file.read().rstrip()
344         temp_file.close()
345         print "New Note:"
346         print utils.prefix_multi_line_string(newnote,"  ")
347         prompt = "[D]one, Edit, Abandon, Quit ?"
348         answer = "XXX"
349         while prompt.find(answer) == -1:
350             answer = utils.our_raw_input(prompt)
351             m = re_default_answer.search(prompt)
352             if answer == "":
353                 answer = m.group(1)
354             answer = answer[:1].upper()
355     os.unlink(temp_filename)
356     if answer == 'A':
357         return
358     elif answer == 'Q':
359         end()
360         sys.exit(0)
361
362     comment = NewComment()
363     comment.package = upload.pkg.changes["source"]
364     comment.version = upload.pkg.changes["version"]
365     comment.comment = newnote
366     comment.author  = utils.whoami()
367     comment.trainee = trainee
368     session.add(comment)
369     session.commit()
370
371 ###############################################################################
372
373 # suite names DMs can upload to
374 dm_suites = ['unstable', 'experimental']
375
376 def get_newest_source(source, session):
377     'returns the newest DBSource object in dm_suites'
378     ## the most recent version of the package uploaded to unstable or
379     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
380     ## section of its control file
381     q = session.query(DBSource).filter_by(source = source). \
382         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
383         order_by(desc('source.version'))
384     return q.first()
385
386 def get_suite_version_by_source(source, session):
387     'returns a list of tuples (suite_name, version) for source package'
388     q = session.query(Suite.suite_name, DBSource.version). \
389         join(Suite.sources).filter_by(source = source)
390     return q.all()
391
392 def get_source_by_package_and_suite(package, suite_name, session):
393     '''
394     returns a DBSource query filtered by DBBinary.package and this package's
395     suite_name
396     '''
397     return session.query(DBSource). \
398         join(DBSource.binaries).filter_by(package = package). \
399         join(DBBinary.suites).filter_by(suite_name = suite_name)
400
401 def get_suite_version_by_package(package, arch_string, session):
402     '''
403     returns a list of tuples (suite_name, version) for binary package and
404     arch_string
405     '''
406     return session.query(Suite.suite_name, DBBinary.version). \
407         join(Suite.binaries).filter_by(package = package). \
408         join(DBBinary.architecture). \
409         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
410
411 class Upload(object):
412     """
413     Everything that has to do with an upload processed.
414
415     """
416     def __init__(self):
417         self.logger = None
418         self.pkg = Changes()
419         self.reset()
420
421     ###########################################################################
422
423     def reset (self):
424         """ Reset a number of internal variables."""
425
426         # Initialize the substitution template map
427         cnf = Config()
428         self.Subst = {}
429         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
430         self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
431         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
432         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
433
434         self.rejects = []
435         self.warnings = []
436         self.notes = []
437
438         self.later_check_files = []
439
440         self.pkg.reset()
441
442     def package_info(self):
443         """
444         Format various messages from this Upload to send to the maintainer.
445         """
446
447         msgs = (
448             ('Reject Reasons', self.rejects),
449             ('Warnings', self.warnings),
450             ('Notes', self.notes),
451         )
452
453         msg = ''
454         for title, messages in msgs:
455             if messages:
456                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
457         msg += '\n\n'
458
459         return msg
460
461     ###########################################################################
462     def update_subst(self):
463         """ Set up the per-package template substitution mappings """
464
465         cnf = Config()
466
467         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
468         if not self.pkg.changes.has_key("architecture") or not \
469            isinstance(self.pkg.changes["architecture"], dict):
470             self.pkg.changes["architecture"] = { "Unknown" : "" }
471
472         # and maintainer2047 may not exist.
473         if not self.pkg.changes.has_key("maintainer2047"):
474             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
475
476         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
477         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
478         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
479
480         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
481         if self.pkg.changes["architecture"].has_key("source") and \
482            self.pkg.changes["changedby822"] != "" and \
483            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
484
485             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
486             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
487             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
488         else:
489             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
490             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
491             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
492
493         # Process policy doesn't set the fingerprint field and I don't want to make it
494         # do it for now as I don't want to have to deal with the case where we accepted
495         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
496         # the meantime so the package will be remarked as rejectable.  Urgh.
497         # TODO: Fix this properly
498         if self.pkg.changes.has_key('fingerprint'):
499             session = DBConn().session()
500             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
501             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
502                 if self.pkg.changes.has_key("sponsoremail"):
503                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
504             session.close()
505
506         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
507             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
508
509         # Apply any global override of the Maintainer field
510         if cnf.get("Dinstall::OverrideMaintainer"):
511             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
512             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
513
514         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
515         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
516         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
517         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
518
519     ###########################################################################
520     def load_changes(self, filename):
521         """
522         Load a changes file and setup a dictionary around it. Also checks for mandantory
523         fields  within.
524
525         @type filename: string
526         @param filename: Changes filename, full path.
527
528         @rtype: boolean
529         @return: whether the changes file was valid or not.  We may want to
530                  reject even if this is True (see what gets put in self.rejects).
531                  This is simply to prevent us even trying things later which will
532                  fail because we couldn't properly parse the file.
533         """
534         Cnf = Config()
535         self.pkg.changes_file = filename
536
537         # Parse the .changes field into a dictionary
538         try:
539             self.pkg.changes.update(parse_changes(filename))
540         except CantOpenError:
541             self.rejects.append("%s: can't read file." % (filename))
542             return False
543         except ParseChangesError, line:
544             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
545             return False
546         except ChangesUnicodeError:
547             self.rejects.append("%s: changes file not proper utf-8" % (filename))
548             return False
549
550         # Parse the Files field from the .changes into another dictionary
551         try:
552             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
553         except ParseChangesError, line:
554             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
555             return False
556         except UnknownFormatError, format:
557             self.rejects.append("%s: unknown format '%s'." % (filename, format))
558             return False
559
560         # Check for mandatory fields
561         for i in ("distribution", "source", "binary", "architecture",
562                   "version", "maintainer", "files", "changes", "description"):
563             if not self.pkg.changes.has_key(i):
564                 # Avoid undefined errors later
565                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
566                 return False
567
568         # Strip a source version in brackets from the source field
569         if re_strip_srcver.search(self.pkg.changes["source"]):
570             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
571
572         # Ensure the source field is a valid package name.
573         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
574             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
575
576         # Split multi-value fields into a lower-level dictionary
577         for i in ("architecture", "distribution", "binary", "closes"):
578             o = self.pkg.changes.get(i, "")
579             if o != "":
580                 del self.pkg.changes[i]
581
582             self.pkg.changes[i] = {}
583
584             for j in o.split():
585                 self.pkg.changes[i][j] = 1
586
587         # Fix the Maintainer: field to be RFC822/2047 compatible
588         try:
589             (self.pkg.changes["maintainer822"],
590              self.pkg.changes["maintainer2047"],
591              self.pkg.changes["maintainername"],
592              self.pkg.changes["maintaineremail"]) = \
593                    fix_maintainer (self.pkg.changes["maintainer"])
594         except ParseMaintError, msg:
595             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
596                    % (filename, self.pkg.changes["maintainer"], msg))
597
598         # ...likewise for the Changed-By: field if it exists.
599         try:
600             (self.pkg.changes["changedby822"],
601              self.pkg.changes["changedby2047"],
602              self.pkg.changes["changedbyname"],
603              self.pkg.changes["changedbyemail"]) = \
604                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
605         except ParseMaintError, msg:
606             self.pkg.changes["changedby822"] = ""
607             self.pkg.changes["changedby2047"] = ""
608             self.pkg.changes["changedbyname"] = ""
609             self.pkg.changes["changedbyemail"] = ""
610
611             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
612                    % (filename, self.pkg.changes["changed-by"], msg))
613
614         # Ensure all the values in Closes: are numbers
615         if self.pkg.changes.has_key("closes"):
616             for i in self.pkg.changes["closes"].keys():
617                 if re_isanum.match (i) == None:
618                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
619
620         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
621         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
622         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
623
624         # Check the .changes is non-empty
625         if not self.pkg.files:
626             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
627             return False
628
629         # Changes was syntactically valid even if we'll reject
630         return True
631
632     ###########################################################################
633
634     def check_distributions(self):
635         "Check and map the Distribution field"
636
637         Cnf = Config()
638
639         # Handle suite mappings
640         for m in Cnf.ValueList("SuiteMappings"):
641             args = m.split()
642             mtype = args[0]
643             if mtype == "map" or mtype == "silent-map":
644                 (source, dest) = args[1:3]
645                 if self.pkg.changes["distribution"].has_key(source):
646                     del self.pkg.changes["distribution"][source]
647                     self.pkg.changes["distribution"][dest] = 1
648                     if mtype != "silent-map":
649                         self.notes.append("Mapping %s to %s." % (source, dest))
650                 if self.pkg.changes.has_key("distribution-version"):
651                     if self.pkg.changes["distribution-version"].has_key(source):
652                         self.pkg.changes["distribution-version"][source]=dest
653             elif mtype == "map-unreleased":
654                 (source, dest) = args[1:3]
655                 if self.pkg.changes["distribution"].has_key(source):
656                     for arch in self.pkg.changes["architecture"].keys():
657                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
658                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
659                             del self.pkg.changes["distribution"][source]
660                             self.pkg.changes["distribution"][dest] = 1
661                             break
662             elif mtype == "ignore":
663                 suite = args[1]
664                 if self.pkg.changes["distribution"].has_key(suite):
665                     del self.pkg.changes["distribution"][suite]
666                     self.warnings.append("Ignoring %s as a target suite." % (suite))
667             elif mtype == "reject":
668                 suite = args[1]
669                 if self.pkg.changes["distribution"].has_key(suite):
670                     self.rejects.append("Uploads to %s are not accepted." % (suite))
671             elif mtype == "propup-version":
672                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
673                 #
674                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
675                 if self.pkg.changes["distribution"].has_key(args[1]):
676                     self.pkg.changes.setdefault("distribution-version", {})
677                     for suite in args[2:]:
678                         self.pkg.changes["distribution-version"][suite] = suite
679
680         # Ensure there is (still) a target distribution
681         if len(self.pkg.changes["distribution"].keys()) < 1:
682             self.rejects.append("No valid distribution remaining.")
683
684         # Ensure target distributions exist
685         for suite in self.pkg.changes["distribution"].keys():
686             if not Cnf.has_key("Suite::%s" % (suite)):
687                 self.rejects.append("Unknown distribution `%s'." % (suite))
688
689     ###########################################################################
690
691     def binary_file_checks(self, f, session):
692         cnf = Config()
693         entry = self.pkg.files[f]
694
695         # Extract package control information
696         deb_file = utils.open_file(f)
697         try:
698             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
699         except:
700             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
701             deb_file.close()
702             # Can't continue, none of the checks on control would work.
703             return
704
705         # Check for mandantory "Description:"
706         deb_file.seek(0)
707         try:
708             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
709         except:
710             self.rejects.append("%s: Missing Description in binary package" % (f))
711             return
712
713         deb_file.close()
714
715         # Check for mandatory fields
716         for field in [ "Package", "Architecture", "Version" ]:
717             if control.Find(field) == None:
718                 # Can't continue
719                 self.rejects.append("%s: No %s field in control." % (f, field))
720                 return
721
722         # Ensure the package name matches the one give in the .changes
723         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
724             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
725
726         # Validate the package field
727         package = control.Find("Package")
728         if not re_valid_pkg_name.match(package):
729             self.rejects.append("%s: invalid package name '%s'." % (f, package))
730
731         # Validate the version field
732         version = control.Find("Version")
733         if not re_valid_version.match(version):
734             self.rejects.append("%s: invalid version number '%s'." % (f, version))
735
736         # Ensure the architecture of the .deb is one we know about.
737         default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
738         architecture = control.Find("Architecture")
739         upload_suite = self.pkg.changes["distribution"].keys()[0]
740
741         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
742             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
743             self.rejects.append("Unknown architecture '%s'." % (architecture))
744
745         # Ensure the architecture of the .deb is one of the ones
746         # listed in the .changes.
747         if not self.pkg.changes["architecture"].has_key(architecture):
748             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
749
750         # Sanity-check the Depends field
751         depends = control.Find("Depends")
752         if depends == '':
753             self.rejects.append("%s: Depends field is empty." % (f))
754
755         # Sanity-check the Provides field
756         provides = control.Find("Provides")
757         if provides:
758             provide = re_spacestrip.sub('', provides)
759             if provide == '':
760                 self.rejects.append("%s: Provides field is empty." % (f))
761             prov_list = provide.split(",")
762             for prov in prov_list:
763                 if not re_valid_pkg_name.match(prov):
764                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
765
766         # Check the section & priority match those given in the .changes (non-fatal)
767         if     control.Find("Section") and entry["section"] != "" \
768            and entry["section"] != control.Find("Section"):
769             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
770                                 (f, control.Find("Section", ""), entry["section"]))
771         if control.Find("Priority") and entry["priority"] != "" \
772            and entry["priority"] != control.Find("Priority"):
773             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
774                                 (f, control.Find("Priority", ""), entry["priority"]))
775
776         entry["package"] = package
777         entry["architecture"] = architecture
778         entry["version"] = version
779         entry["maintainer"] = control.Find("Maintainer", "")
780
781         if f.endswith(".udeb"):
782             self.pkg.files[f]["dbtype"] = "udeb"
783         elif f.endswith(".deb"):
784             self.pkg.files[f]["dbtype"] = "deb"
785         else:
786             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
787
788         entry["source"] = control.Find("Source", entry["package"])
789
790         # Get the source version
791         source = entry["source"]
792         source_version = ""
793
794         if source.find("(") != -1:
795             m = re_extract_src_version.match(source)
796             source = m.group(1)
797             source_version = m.group(2)
798
799         if not source_version:
800             source_version = self.pkg.files[f]["version"]
801
802         entry["source package"] = source
803         entry["source version"] = source_version
804
805         # Ensure the filename matches the contents of the .deb
806         m = re_isadeb.match(f)
807
808         #  package name
809         file_package = m.group(1)
810         if entry["package"] != file_package:
811             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
812                                 (f, file_package, entry["dbtype"], entry["package"]))
813         epochless_version = re_no_epoch.sub('', control.Find("Version"))
814
815         #  version
816         file_version = m.group(2)
817         if epochless_version != file_version:
818             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
819                                 (f, file_version, entry["dbtype"], epochless_version))
820
821         #  architecture
822         file_architecture = m.group(3)
823         if entry["architecture"] != file_architecture:
824             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
825                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
826
827         # Check for existent source
828         source_version = entry["source version"]
829         source_package = entry["source package"]
830         if self.pkg.changes["architecture"].has_key("source"):
831             if source_version != self.pkg.changes["version"]:
832                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
833                                     (source_version, f, self.pkg.changes["version"]))
834         else:
835             # Check in the SQL database
836             if not source_exists(source_package, source_version, suites = \
837                 self.pkg.changes["distribution"].keys(), session = session):
838                 # Check in one of the other directories
839                 source_epochless_version = re_no_epoch.sub('', source_version)
840                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
841                 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
842                     entry["byhand"] = 1
843                 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
844                     entry["new"] = 1
845                 else:
846                     dsc_file_exists = False
847                     for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
848                         if cnf.has_key("Dir::Queue::%s" % (myq)):
849                             if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
850                                 dsc_file_exists = True
851                                 break
852
853                     if not dsc_file_exists:
854                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
855
856         # Check the version and for file overwrites
857         self.check_binary_against_db(f, session)
858
859         # Temporarily disable contents generation until we change the table storage layout
860         #b = Binary(f)
861         #b.scan_package()
862         #if len(b.rejects) > 0:
863         #    for j in b.rejects:
864         #        self.rejects.append(j)
865
866     def source_file_checks(self, f, session):
867         entry = self.pkg.files[f]
868
869         m = re_issource.match(f)
870         if not m:
871             return
872
873         entry["package"] = m.group(1)
874         entry["version"] = m.group(2)
875         entry["type"] = m.group(3)
876
877         # Ensure the source package name matches the Source filed in the .changes
878         if self.pkg.changes["source"] != entry["package"]:
879             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
880
881         # Ensure the source version matches the version in the .changes file
882         if re_is_orig_source.match(f):
883             changes_version = self.pkg.changes["chopversion2"]
884         else:
885             changes_version = self.pkg.changes["chopversion"]
886
887         if changes_version != entry["version"]:
888             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
889
890         # Ensure the .changes lists source in the Architecture field
891         if not self.pkg.changes["architecture"].has_key("source"):
892             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
893
894         # Check the signature of a .dsc file
895         if entry["type"] == "dsc":
896             # check_signature returns either:
897             #  (None, [list, of, rejects]) or (signature, [])
898             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
899             for j in rejects:
900                 self.rejects.append(j)
901
902         entry["architecture"] = "source"
903
904     def per_suite_file_checks(self, f, suite, session):
905         cnf = Config()
906         entry = self.pkg.files[f]
907
908         # Skip byhand
909         if entry.has_key("byhand"):
910             return
911
912         # Check we have fields we need to do these checks
913         oktogo = True
914         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
915             if not entry.has_key(m):
916                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
917                 oktogo = False
918
919         if not oktogo:
920             return
921
922         # Handle component mappings
923         for m in cnf.ValueList("ComponentMappings"):
924             (source, dest) = m.split()
925             if entry["component"] == source:
926                 entry["original component"] = source
927                 entry["component"] = dest
928
929         # Ensure the component is valid for the target suite
930         if cnf.has_key("Suite:%s::Components" % (suite)) and \
931            entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
932             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
933             return
934
935         # Validate the component
936         if not get_component(entry["component"], session):
937             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
938             return
939
940         # See if the package is NEW
941         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
942             entry["new"] = 1
943
944         # Validate the priority
945         if entry["priority"].find('/') != -1:
946             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
947
948         # Determine the location
949         location = cnf["Dir::Pool"]
950         l = get_location(location, entry["component"], session=session)
951         if l is None:
952             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
953             entry["location id"] = -1
954         else:
955             entry["location id"] = l.location_id
956
957         # Check the md5sum & size against existing files (if any)
958         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
959
960         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
961                                          entry["size"], entry["md5sum"], entry["location id"])
962
963         if found is None:
964             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
965         elif found is False and poolfile is not None:
966             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
967         else:
968             if poolfile is None:
969                 entry["files id"] = None
970             else:
971                 entry["files id"] = poolfile.file_id
972
973         # Check for packages that have moved from one component to another
974         entry['suite'] = suite
975         res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
976         if res.rowcount > 0:
977             entry["othercomponents"] = res.fetchone()[0]
978
979     def check_files(self, action=True):
980         file_keys = self.pkg.files.keys()
981         holding = Holding()
982         cnf = Config()
983
984         if action:
985             cwd = os.getcwd()
986             os.chdir(self.pkg.directory)
987             for f in file_keys:
988                 ret = holding.copy_to_holding(f)
989                 if ret is not None:
990                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
991
992             os.chdir(cwd)
993
994         # check we already know the changes file
995         # [NB: this check must be done post-suite mapping]
996         base_filename = os.path.basename(self.pkg.changes_file)
997
998         session = DBConn().session()
999
1000         try:
1001             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1002             # if in the pool or in a queue other than unchecked, reject
1003             if (dbc.in_queue is None) \
1004                    or (dbc.in_queue is not None
1005                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1006                 self.rejects.append("%s file already known to dak" % base_filename)
1007         except NoResultFound, e:
1008             # not known, good
1009             pass
1010
1011         has_binaries = False
1012         has_source = False
1013
1014         for f, entry in self.pkg.files.items():
1015             # Ensure the file does not already exist in one of the accepted directories
1016             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1017                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1018                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1019                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1020
1021             if not re_taint_free.match(f):
1022                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1023
1024             # Check the file is readable
1025             if os.access(f, os.R_OK) == 0:
1026                 # When running in -n, copy_to_holding() won't have
1027                 # generated the reject_message, so we need to.
1028                 if action:
1029                     if os.path.exists(f):
1030                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1031                     else:
1032                         # Don't directly reject, mark to check later to deal with orig's
1033                         # we can find in the pool
1034                         self.later_check_files.append(f)
1035                 entry["type"] = "unreadable"
1036                 continue
1037
1038             # If it's byhand skip remaining checks
1039             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1040                 entry["byhand"] = 1
1041                 entry["type"] = "byhand"
1042
1043             # Checks for a binary package...
1044             elif re_isadeb.match(f):
1045                 has_binaries = True
1046                 entry["type"] = "deb"
1047
1048                 # This routine appends to self.rejects/warnings as appropriate
1049                 self.binary_file_checks(f, session)
1050
1051             # Checks for a source package...
1052             elif re_issource.match(f):
1053                 has_source = True
1054
1055                 # This routine appends to self.rejects/warnings as appropriate
1056                 self.source_file_checks(f, session)
1057
1058             # Not a binary or source package?  Assume byhand...
1059             else:
1060                 entry["byhand"] = 1
1061                 entry["type"] = "byhand"
1062
1063             # Per-suite file checks
1064             entry["oldfiles"] = {}
1065             for suite in self.pkg.changes["distribution"].keys():
1066                 self.per_suite_file_checks(f, suite, session)
1067
1068         session.close()
1069
1070         # If the .changes file says it has source, it must have source.
1071         if self.pkg.changes["architecture"].has_key("source"):
1072             if not has_source:
1073                 self.rejects.append("no source found and Architecture line in changes mention source.")
1074
1075             if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1076                 self.rejects.append("source only uploads are not supported.")
1077
1078     ###########################################################################
1079     def check_dsc(self, action=True, session=None):
1080         """Returns bool indicating whether or not the source changes are valid"""
1081         # Ensure there is source to check
1082         if not self.pkg.changes["architecture"].has_key("source"):
1083             return True
1084
1085         # Find the .dsc
1086         dsc_filename = None
1087         for f, entry in self.pkg.files.items():
1088             if entry["type"] == "dsc":
1089                 if dsc_filename:
1090                     self.rejects.append("can not process a .changes file with multiple .dsc's.")
1091                     return False
1092                 else:
1093                     dsc_filename = f
1094
1095         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1096         if not dsc_filename:
1097             self.rejects.append("source uploads must contain a dsc file")
1098             return False
1099
1100         # Parse the .dsc file
1101         try:
1102             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
1103         except CantOpenError:
1104             # if not -n copy_to_holding() will have done this for us...
1105             if not action:
1106                 self.rejects.append("%s: can't read file." % (dsc_filename))
1107         except ParseChangesError, line:
1108             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1109         except InvalidDscError, line:
1110             self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
1111         except ChangesUnicodeError:
1112             self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
1113
1114         # Build up the file list of files mentioned by the .dsc
1115         try:
1116             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1117         except NoFilesFieldError:
1118             self.rejects.append("%s: no Files: field." % (dsc_filename))
1119             return False
1120         except UnknownFormatError, format:
1121             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1122             return False
1123         except ParseChangesError, line:
1124             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1125             return False
1126
1127         # Enforce mandatory fields
1128         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1129             if not self.pkg.dsc.has_key(i):
1130                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1131                 return False
1132
1133         # Validate the source and version fields
1134         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1135             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1136         if not re_valid_version.match(self.pkg.dsc["version"]):
1137             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1138
1139         # Only a limited list of source formats are allowed in each suite
1140         for dist in self.pkg.changes["distribution"].keys():
1141             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1142             if self.pkg.dsc["format"] not in allowed:
1143                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1144
1145         # Validate the Maintainer field
1146         try:
1147             # We ignore the return value
1148             fix_maintainer(self.pkg.dsc["maintainer"])
1149         except ParseMaintError, msg:
1150             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1151                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1152
1153         # Validate the build-depends field(s)
1154         for field_name in [ "build-depends", "build-depends-indep" ]:
1155             field = self.pkg.dsc.get(field_name)
1156             if field:
1157                 # Have apt try to parse them...
1158                 try:
1159                     apt_pkg.ParseSrcDepends(field)
1160                 except:
1161                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1162
1163         # Ensure the version number in the .dsc matches the version number in the .changes
1164         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1165         changes_version = self.pkg.files[dsc_filename]["version"]
1166
1167         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1168             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1169
1170         # Ensure the Files field contain only what's expected
1171         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1172
1173         # Ensure source is newer than existing source in target suites
1174         session = DBConn().session()
1175         self.check_source_against_db(dsc_filename, session)
1176         self.check_dsc_against_db(dsc_filename, session)
1177
1178         dbchg = get_dbchange(self.pkg.changes_file, session)
1179
1180         # Finally, check if we're missing any files
1181         for f in self.later_check_files:
1182             print 'XXX: %s' % f
1183             # Check if we've already processed this file if we have a dbchg object
1184             ok = False
1185             if dbchg:
1186                 for pf in dbchg.files:
1187                     if pf.filename == f and pf.processed:
1188                         self.notes.append('%s was already processed so we can go ahead' % f)
1189                         ok = True
1190                         del self.pkg.files[f]
1191             if not ok:
1192                 self.rejects.append("Could not find file %s references in changes" % f)
1193
1194         session.close()
1195
1196         return True
1197
1198     ###########################################################################
1199
1200     def get_changelog_versions(self, source_dir):
1201         """Extracts a the source package and (optionally) grabs the
1202         version history out of debian/changelog for the BTS."""
1203
1204         cnf = Config()
1205
1206         # Find the .dsc (again)
1207         dsc_filename = None
1208         for f in self.pkg.files.keys():
1209             if self.pkg.files[f]["type"] == "dsc":
1210                 dsc_filename = f
1211
1212         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1213         if not dsc_filename:
1214             return
1215
1216         # Create a symlink mirror of the source files in our temporary directory
1217         for f in self.pkg.files.keys():
1218             m = re_issource.match(f)
1219             if m:
1220                 src = os.path.join(source_dir, f)
1221                 # If a file is missing for whatever reason, give up.
1222                 if not os.path.exists(src):
1223                     return
1224                 ftype = m.group(3)
1225                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1226                    self.pkg.orig_files[f].has_key("path"):
1227                     continue
1228                 dest = os.path.join(os.getcwd(), f)
1229                 os.symlink(src, dest)
1230
1231         # If the orig files are not a part of the upload, create symlinks to the
1232         # existing copies.
1233         for orig_file in self.pkg.orig_files.keys():
1234             if not self.pkg.orig_files[orig_file].has_key("path"):
1235                 continue
1236             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1237             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1238
1239         # Extract the source
1240         cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1241         (result, output) = commands.getstatusoutput(cmd)
1242         if (result != 0):
1243             self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1244             self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1245             return
1246
1247         if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1248             return
1249
1250         # Get the upstream version
1251         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1252         if re_strip_revision.search(upstr_version):
1253             upstr_version = re_strip_revision.sub('', upstr_version)
1254
1255         # Ensure the changelog file exists
1256         changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1257         if not os.path.exists(changelog_filename):
1258             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1259             return
1260
1261         # Parse the changelog
1262         self.pkg.dsc["bts changelog"] = ""
1263         changelog_file = utils.open_file(changelog_filename)
1264         for line in changelog_file.readlines():
1265             m = re_changelog_versions.match(line)
1266             if m:
1267                 self.pkg.dsc["bts changelog"] += line
1268         changelog_file.close()
1269
1270         # Check we found at least one revision in the changelog
1271         if not self.pkg.dsc["bts changelog"]:
1272             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1273
1274     def check_source(self):
1275         # Bail out if:
1276         #    a) there's no source
1277         if not self.pkg.changes["architecture"].has_key("source"):
1278             return
1279
1280         tmpdir = utils.temp_dirname()
1281
1282         # Move into the temporary directory
1283         cwd = os.getcwd()
1284         os.chdir(tmpdir)
1285
1286         # Get the changelog version history
1287         self.get_changelog_versions(cwd)
1288
1289         # Move back and cleanup the temporary tree
1290         os.chdir(cwd)
1291
1292         try:
1293             shutil.rmtree(tmpdir)
1294         except OSError, e:
1295             if e.errno != errno.EACCES:
1296                 print "foobar"
1297                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1298
1299             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1300             # We probably have u-r or u-w directories so chmod everything
1301             # and try again.
1302             cmd = "chmod -R u+rwx %s" % (tmpdir)
1303             result = os.system(cmd)
1304             if result != 0:
1305                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1306             shutil.rmtree(tmpdir)
1307         except Exception, e:
1308             print "foobar2 (%s)" % e
1309             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1310
1311     ###########################################################################
1312     def ensure_hashes(self):
1313         # Make sure we recognise the format of the Files: field in the .changes
1314         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1315         if len(format) == 2:
1316             format = int(format[0]), int(format[1])
1317         else:
1318             format = int(float(format[0])), 0
1319
1320         # We need to deal with the original changes blob, as the fields we need
1321         # might not be in the changes dict serialised into the .dak anymore.
1322         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1323
1324         # Copy the checksums over to the current changes dict.  This will keep
1325         # the existing modifications to it intact.
1326         for field in orig_changes:
1327             if field.startswith('checksums-'):
1328                 self.pkg.changes[field] = orig_changes[field]
1329
1330         # Check for unsupported hashes
1331         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1332             self.rejects.append(j)
1333
1334         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1335             self.rejects.append(j)
1336
1337         # We have to calculate the hash if we have an earlier changes version than
1338         # the hash appears in rather than require it exist in the changes file
1339         for hashname, hashfunc, version in utils.known_hashes:
1340             # TODO: Move _ensure_changes_hash into this class
1341             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1342                 self.rejects.append(j)
1343             if "source" in self.pkg.changes["architecture"]:
1344                 # TODO: Move _ensure_dsc_hash into this class
1345                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1346                     self.rejects.append(j)
1347
1348     def check_hashes(self):
1349         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1350             self.rejects.append(m)
1351
1352         for m in utils.check_size(".changes", self.pkg.files):
1353             self.rejects.append(m)
1354
1355         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1356             self.rejects.append(m)
1357
1358         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1359             self.rejects.append(m)
1360
1361         self.ensure_hashes()
1362
1363     ###########################################################################
1364
1365     def ensure_orig(self, target_dir='.', session=None):
1366         """
1367         Ensures that all orig files mentioned in the changes file are present
1368         in target_dir. If they do not exist, they are symlinked into place.
1369
1370         An list containing the symlinks that were created are returned (so they
1371         can be removed).
1372         """
1373
1374         symlinked = []
1375         cnf = Config()
1376
1377         for filename, entry in self.pkg.dsc_files.iteritems():
1378             if not re_is_orig_source.match(filename):
1379                 # File is not an orig; ignore
1380                 continue
1381
1382             if os.path.exists(filename):
1383                 # File exists, no need to continue
1384                 continue
1385
1386             def symlink_if_valid(path):
1387                 f = utils.open_file(path)
1388                 md5sum = apt_pkg.md5sum(f)
1389                 f.close()
1390
1391                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1392                 expected = (int(entry['size']), entry['md5sum'])
1393
1394                 if fingerprint != expected:
1395                     return False
1396
1397                 dest = os.path.join(target_dir, filename)
1398
1399                 os.symlink(path, dest)
1400                 symlinked.append(dest)
1401
1402                 return True
1403
1404             session_ = session
1405             if session is None:
1406                 session_ = DBConn().session()
1407
1408             found = False
1409
1410             # Look in the pool
1411             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1412                 poolfile_path = os.path.join(
1413                     poolfile.location.path, poolfile.filename
1414                 )
1415
1416                 if symlink_if_valid(poolfile_path):
1417                     found = True
1418                     break
1419
1420             if session is None:
1421                 session_.close()
1422
1423             if found:
1424                 continue
1425
1426             # Look in some other queues for the file
1427             queues = ('New', 'Byhand', 'ProposedUpdates',
1428                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1429
1430             for queue in queues:
1431                 if not cnf.get('Dir::Queue::%s' % queue):
1432                     continue
1433
1434                 queuefile_path = os.path.join(
1435                     cnf['Dir::Queue::%s' % queue], filename
1436                 )
1437
1438                 if not os.path.exists(queuefile_path):
1439                     # Does not exist in this queue
1440                     continue
1441
1442                 if symlink_if_valid(queuefile_path):
1443                     break
1444
1445         return symlinked
1446
1447     ###########################################################################
1448
1449     def check_lintian(self):
1450         """
1451         Extends self.rejects by checking the output of lintian against tags
1452         specified in Dinstall::LintianTags.
1453         """
1454
1455         cnf = Config()
1456
1457         # Don't reject binary uploads
1458         if not self.pkg.changes['architecture'].has_key('source'):
1459             return
1460
1461         # Only check some distributions
1462         for dist in ('unstable', 'experimental'):
1463             if dist in self.pkg.changes['distribution']:
1464                 break
1465         else:
1466             return
1467
1468         # If we do not have a tagfile, don't do anything
1469         tagfile = cnf.get("Dinstall::LintianTags")
1470         if tagfile is None:
1471             return
1472
1473         # Parse the yaml file
1474         sourcefile = file(tagfile, 'r')
1475         sourcecontent = sourcefile.read()
1476         sourcefile.close()
1477
1478         try:
1479             lintiantags = yaml.load(sourcecontent)['lintian']
1480         except yaml.YAMLError, msg:
1481             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1482             return
1483
1484         # Try and find all orig mentioned in the .dsc
1485         symlinked = self.ensure_orig()
1486
1487         # Setup the input file for lintian
1488         fd, temp_filename = utils.temp_filename()
1489         temptagfile = os.fdopen(fd, 'w')
1490         for tags in lintiantags.values():
1491             temptagfile.writelines(['%s\n' % x for x in tags])
1492         temptagfile.close()
1493
1494         try:
1495             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1496                 (temp_filename, self.pkg.changes_file)
1497
1498             result, output = commands.getstatusoutput(cmd)
1499         finally:
1500             # Remove our tempfile and any symlinks we created
1501             os.unlink(temp_filename)
1502
1503             for symlink in symlinked:
1504                 os.unlink(symlink)
1505
1506         if result == 2:
1507             utils.warn("lintian failed for %s [return code: %s]." % \
1508                 (self.pkg.changes_file, result))
1509             utils.warn(utils.prefix_multi_line_string(output, \
1510                 " [possible output:] "))
1511
1512         def log(*txt):
1513             if self.logger:
1514                 self.logger.log(
1515                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1516                 )
1517
1518         # Generate messages
1519         parsed_tags = parse_lintian_output(output)
1520         self.rejects.extend(
1521             generate_reject_messages(parsed_tags, lintiantags, log=log)
1522         )
1523
1524     ###########################################################################
1525     def check_urgency(self):
1526         cnf = Config()
1527         if self.pkg.changes["architecture"].has_key("source"):
1528             if not self.pkg.changes.has_key("urgency"):
1529                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1530             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1531             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1532                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1533                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1534                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1535
1536     ###########################################################################
1537
1538     # Sanity check the time stamps of files inside debs.
1539     # [Files in the near future cause ugly warnings and extreme time
1540     #  travel can cause errors on extraction]
1541
1542     def check_timestamps(self):
1543         Cnf = Config()
1544
1545         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1546         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1547         tar = TarTime(future_cutoff, past_cutoff)
1548
1549         for filename, entry in self.pkg.files.items():
1550             if entry["type"] == "deb":
1551                 tar.reset()
1552                 try:
1553                     deb_file = utils.open_file(filename)
1554                     apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1555                     deb_file.seek(0)
1556                     try:
1557                         apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1558                     except SystemError, e:
1559                         # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1560                         if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1561                             raise
1562                         deb_file.seek(0)
1563                         apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1564
1565                     deb_file.close()
1566
1567                     future_files = tar.future_files.keys()
1568                     if future_files:
1569                         num_future_files = len(future_files)
1570                         future_file = future_files[0]
1571                         future_date = tar.future_files[future_file]
1572                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1573                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1574
1575                     ancient_files = tar.ancient_files.keys()
1576                     if ancient_files:
1577                         num_ancient_files = len(ancient_files)
1578                         ancient_file = ancient_files[0]
1579                         ancient_date = tar.ancient_files[ancient_file]
1580                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1581                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1582                 except:
1583                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1584
1585     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1586         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1587             sponsored = False
1588         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1589             sponsored = False
1590             if uid_name == "":
1591                 sponsored = True
1592         else:
1593             sponsored = True
1594             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1595                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1596                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1597                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1598                         self.pkg.changes["sponsoremail"] = uid_email
1599
1600         return sponsored
1601
1602
1603     ###########################################################################
1604     # check_signed_by_key checks
1605     ###########################################################################
1606
1607     def check_signed_by_key(self):
1608         """Ensure the .changes is signed by an authorized uploader."""
1609         session = DBConn().session()
1610
1611         # First of all we check that the person has proper upload permissions
1612         # and that this upload isn't blocked
1613         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1614
1615         if fpr is None:
1616             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1617             return
1618
1619         # TODO: Check that import-keyring adds UIDs properly
1620         if not fpr.uid:
1621             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1622             return
1623
1624         # Check that the fingerprint which uploaded has permission to do so
1625         self.check_upload_permissions(fpr, session)
1626
1627         # Check that this package is not in a transition
1628         self.check_transition(session)
1629
1630         session.close()
1631
1632
1633     def check_upload_permissions(self, fpr, session):
1634         # Check any one-off upload blocks
1635         self.check_upload_blocks(fpr, session)
1636
1637         # Start with DM as a special case
1638         # DM is a special case unfortunately, so we check it first
1639         # (keys with no source access get more access than DMs in one
1640         #  way; DMs can only upload for their packages whether source
1641         #  or binary, whereas keys with no access might be able to
1642         #  upload some binaries)
1643         if fpr.source_acl.access_level == 'dm':
1644             self.check_dm_upload(fpr, session)
1645         else:
1646             # Check source-based permissions for other types
1647             if self.pkg.changes["architecture"].has_key("source") and \
1648                 fpr.source_acl.access_level is None:
1649                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1650                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1651                 self.rejects.append(rej)
1652                 return
1653             # If not a DM, we allow full upload rights
1654             uid_email = "%s@debian.org" % (fpr.uid.uid)
1655             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1656
1657
1658         # Check binary upload permissions
1659         # By this point we know that DMs can't have got here unless they
1660         # are allowed to deal with the package concerned so just apply
1661         # normal checks
1662         if fpr.binary_acl.access_level == 'full':
1663             return
1664
1665         # Otherwise we're in the map case
1666         tmparches = self.pkg.changes["architecture"].copy()
1667         tmparches.pop('source', None)
1668
1669         for bam in fpr.binary_acl_map:
1670             tmparches.pop(bam.architecture.arch_string, None)
1671
1672         if len(tmparches.keys()) > 0:
1673             if fpr.binary_reject:
1674                 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1675                 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1676                 self.rejects.append(rej)
1677             else:
1678                 # TODO: This is where we'll implement reject vs throw away binaries later
1679                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1680                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1681                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1682                 self.rejects.append(rej)
1683
1684
1685     def check_upload_blocks(self, fpr, session):
1686         """Check whether any upload blocks apply to this source, source
1687            version, uid / fpr combination"""
1688
1689         def block_rej_template(fb):
1690             rej = 'Manual upload block in place for package %s' % fb.source
1691             if fb.version is not None:
1692                 rej += ', version %s' % fb.version
1693             return rej
1694
1695         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1696             # version is None if the block applies to all versions
1697             if fb.version is None or fb.version == self.pkg.changes['version']:
1698                 # Check both fpr and uid - either is enough to cause a reject
1699                 if fb.fpr is not None:
1700                     if fb.fpr.fingerprint == fpr.fingerprint:
1701                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1702                 if fb.uid is not None:
1703                     if fb.uid == fpr.uid:
1704                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1705
1706
1707     def check_dm_upload(self, fpr, session):
1708         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1709         ## none of the uploaded packages are NEW
1710         rej = False
1711         for f in self.pkg.files.keys():
1712             if self.pkg.files[f].has_key("byhand"):
1713                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1714                 rej = True
1715             if self.pkg.files[f].has_key("new"):
1716                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1717                 rej = True
1718
1719         if rej:
1720             return
1721
1722         r = get_newest_source(self.pkg.changes["source"], session)
1723
1724         if r is None:
1725             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1726             self.rejects.append(rej)
1727             return
1728
1729         if not r.dm_upload_allowed:
1730             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1731             self.rejects.append(rej)
1732             return
1733
1734         ## the Maintainer: field of the uploaded .changes file corresponds with
1735         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1736         ## uploads)
1737         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1738             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1739
1740         ## the most recent version of the package uploaded to unstable or
1741         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1742         ## non-developer maintainers cannot NMU or hijack packages)
1743
1744         # srcuploaders includes the maintainer
1745         accept = False
1746         for sup in r.srcuploaders:
1747             (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1748             # Eww - I hope we never have two people with the same name in Debian
1749             if email == fpr.uid.uid or name == fpr.uid.name:
1750                 accept = True
1751                 break
1752
1753         if not accept:
1754             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1755             return
1756
1757         ## none of the packages are being taken over from other source packages
1758         for b in self.pkg.changes["binary"].keys():
1759             for suite in self.pkg.changes["distribution"].keys():
1760                 for s in get_source_by_package_and_suite(b, suite, session):
1761                     if s.source != self.pkg.changes["source"]:
1762                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1763
1764
1765
1766     def check_transition(self, session):
1767         cnf = Config()
1768
1769         sourcepkg = self.pkg.changes["source"]
1770
1771         # No sourceful upload -> no need to do anything else, direct return
1772         # We also work with unstable uploads, not experimental or those going to some
1773         # proposed-updates queue
1774         if "source" not in self.pkg.changes["architecture"] or \
1775            "unstable" not in self.pkg.changes["distribution"]:
1776             return
1777
1778         # Also only check if there is a file defined (and existant) with
1779         # checks.
1780         transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1781         if transpath == "" or not os.path.exists(transpath):
1782             return
1783
1784         # Parse the yaml file
1785         sourcefile = file(transpath, 'r')
1786         sourcecontent = sourcefile.read()
1787         try:
1788             transitions = yaml.load(sourcecontent)
1789         except yaml.YAMLError, msg:
1790             # This shouldn't happen, there is a wrapper to edit the file which
1791             # checks it, but we prefer to be safe than ending up rejecting
1792             # everything.
1793             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1794             return
1795
1796         # Now look through all defined transitions
1797         for trans in transitions:
1798             t = transitions[trans]
1799             source = t["source"]
1800             expected = t["new"]
1801
1802             # Will be None if nothing is in testing.
1803             current = get_source_in_suite(source, "testing", session)
1804             if current is not None:
1805                 compare = apt_pkg.VersionCompare(current.version, expected)
1806
1807             if current is None or compare < 0:
1808                 # This is still valid, the current version in testing is older than
1809                 # the new version we wait for, or there is none in testing yet
1810
1811                 # Check if the source we look at is affected by this.
1812                 if sourcepkg in t['packages']:
1813                     # The source is affected, lets reject it.
1814
1815                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1816                         sourcepkg, trans)
1817
1818                     if current is not None:
1819                         currentlymsg = "at version %s" % (current.version)
1820                     else:
1821                         currentlymsg = "not present in testing"
1822
1823                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1824
1825                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1826 is part of a testing transition designed to get %s migrated (it is
1827 currently %s, we need version %s).  This transition is managed by the
1828 Release Team, and %s is the Release-Team member responsible for it.
1829 Please mail debian-release@lists.debian.org or contact %s directly if you
1830 need further assistance.  You might want to upload to experimental until this
1831 transition is done."""
1832                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1833
1834                     self.rejects.append(rejectmsg)
1835                     return
1836
1837     ###########################################################################
1838     # End check_signed_by_key checks
1839     ###########################################################################
1840
1841     def build_summaries(self):
1842         """ Build a summary of changes the upload introduces. """
1843
1844         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1845
1846         short_summary = summary
1847
1848         # This is for direport's benefit...
1849         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1850
1851         if byhand or new:
1852             summary += "Changes: " + f
1853
1854         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1855
1856         summary += self.announce(short_summary, 0)
1857
1858         return (summary, short_summary)
1859
1860     ###########################################################################
1861
1862     def close_bugs(self, summary, action):
1863         """
1864         Send mail to close bugs as instructed by the closes field in the changes file.
1865         Also add a line to summary if any work was done.
1866
1867         @type summary: string
1868         @param summary: summary text, as given by L{build_summaries}
1869
1870         @type action: bool
1871         @param action: Set to false no real action will be done.
1872
1873         @rtype: string
1874         @return: summary. If action was taken, extended by the list of closed bugs.
1875
1876         """
1877
1878         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1879
1880         bugs = self.pkg.changes["closes"].keys()
1881
1882         if not bugs:
1883             return summary
1884
1885         bugs.sort()
1886         summary += "Closing bugs: "
1887         for bug in bugs:
1888             summary += "%s " % (bug)
1889             if action:
1890                 self.update_subst()
1891                 self.Subst["__BUG_NUMBER__"] = bug
1892                 if self.pkg.changes["distribution"].has_key("stable"):
1893                     self.Subst["__STABLE_WARNING__"] = """
1894 Note that this package is not part of the released stable Debian
1895 distribution.  It may have dependencies on other unreleased software,
1896 or other instabilities.  Please take care if you wish to install it.
1897 The update will eventually make its way into the next released Debian
1898 distribution."""
1899                 else:
1900                     self.Subst["__STABLE_WARNING__"] = ""
1901                 mail_message = utils.TemplateSubst(self.Subst, template)
1902                 utils.send_mail(mail_message)
1903
1904                 # Clear up after ourselves
1905                 del self.Subst["__BUG_NUMBER__"]
1906                 del self.Subst["__STABLE_WARNING__"]
1907
1908         if action and self.logger:
1909             self.logger.log(["closing bugs"] + bugs)
1910
1911         summary += "\n"
1912
1913         return summary
1914
1915     ###########################################################################
1916
1917     def announce(self, short_summary, action):
1918         """
1919         Send an announce mail about a new upload.
1920
1921         @type short_summary: string
1922         @param short_summary: Short summary text to include in the mail
1923
1924         @type action: bool
1925         @param action: Set to false no real action will be done.
1926
1927         @rtype: string
1928         @return: Textstring about action taken.
1929
1930         """
1931
1932         cnf = Config()
1933         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1934
1935         # Only do announcements for source uploads with a recent dpkg-dev installed
1936         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1937            self.pkg.changes["architecture"].has_key("source"):
1938             return ""
1939
1940         lists_done = {}
1941         summary = ""
1942
1943         self.Subst["__SHORT_SUMMARY__"] = short_summary
1944
1945         for dist in self.pkg.changes["distribution"].keys():
1946             suite = get_suite(dist)
1947             if suite is None: continue
1948             announce_list = suite.announce
1949             if announce_list == "" or lists_done.has_key(announce_list):
1950                 continue
1951
1952             lists_done[announce_list] = 1
1953             summary += "Announcing to %s\n" % (announce_list)
1954
1955             if action:
1956                 self.update_subst()
1957                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1958                 if cnf.get("Dinstall::TrackingServer") and \
1959                    self.pkg.changes["architecture"].has_key("source"):
1960                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1961                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1962
1963                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1964                 utils.send_mail(mail_message)
1965
1966                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1967
1968         if cnf.FindB("Dinstall::CloseBugs"):
1969             summary = self.close_bugs(summary, action)
1970
1971         del self.Subst["__SHORT_SUMMARY__"]
1972
1973         return summary
1974
1975     ###########################################################################
1976     @session_wrapper
1977     def accept (self, summary, short_summary, session=None):
1978         """
1979         Accept an upload.
1980
1981         This moves all files referenced from the .changes into the pool,
1982         sends the accepted mail, announces to lists, closes bugs and
1983         also checks for override disparities. If enabled it will write out
1984         the version history for the BTS Version Tracking and will finally call
1985         L{queue_build}.
1986
1987         @type summary: string
1988         @param summary: Summary text
1989
1990         @type short_summary: string
1991         @param short_summary: Short summary
1992         """
1993
1994         cnf = Config()
1995         stats = SummaryStats()
1996
1997         print "Installing."
1998         self.logger.log(["installing changes", self.pkg.changes_file])
1999
2000         poolfiles = []
2001
2002         # Add the .dsc file to the DB first
2003         for newfile, entry in self.pkg.files.items():
2004             if entry["type"] == "dsc":
2005                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2006                 for j in pfs:
2007                     poolfiles.append(j)
2008
2009         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2010         for newfile, entry in self.pkg.files.items():
2011             if entry["type"] == "deb":
2012                 poolfiles.append(add_deb_to_db(self, newfile, session))
2013
2014         # If this is a sourceful diff only upload that is moving
2015         # cross-component we need to copy the .orig files into the new
2016         # component too for the same reasons as above.
2017         # XXX: mhy: I think this should be in add_dsc_to_db
2018         if self.pkg.changes["architecture"].has_key("source"):
2019             for orig_file in self.pkg.orig_files.keys():
2020                 if not self.pkg.orig_files[orig_file].has_key("id"):
2021                     continue # Skip if it's not in the pool
2022                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2023                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2024                     continue # Skip if the location didn't change
2025
2026                 # Do the move
2027                 oldf = get_poolfile_by_id(orig_file_id, session)
2028                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2029                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2030                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2031
2032                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2033
2034                 # TODO: Care about size/md5sum collisions etc
2035                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2036
2037                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2038                 if newf is None:
2039                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2040                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2041
2042                     session.flush()
2043
2044                     # Don't reference the old file from this changes
2045                     for p in poolfiles:
2046                         if p.file_id == oldf.file_id:
2047                             poolfiles.remove(p)
2048
2049                     poolfiles.append(newf)
2050
2051                     # Fix up the DSC references
2052                     toremove = []
2053
2054                     for df in source.srcfiles:
2055                         if df.poolfile.file_id == oldf.file_id:
2056                             # Add a new DSC entry and mark the old one for deletion
2057                             # Don't do it in the loop so we don't change the thing we're iterating over
2058                             newdscf = DSCFile()
2059                             newdscf.source_id = source.source_id
2060                             newdscf.poolfile_id = newf.file_id
2061                             session.add(newdscf)
2062
2063                             toremove.append(df)
2064
2065                     for df in toremove:
2066                         session.delete(df)
2067
2068                     # Flush our changes
2069                     session.flush()
2070
2071                     # Make sure that our source object is up-to-date
2072                     session.expire(source)
2073
2074         # Add changelog information to the database
2075         self.store_changelog()
2076
2077         # Install the files into the pool
2078         for newfile, entry in self.pkg.files.items():
2079             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2080             utils.move(newfile, destination)
2081             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2082             stats.accept_bytes += float(entry["size"])
2083
2084         # Copy the .changes file across for suite which need it.
2085         copy_changes = dict([(x.copychanges, '')
2086                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2087                              if x.copychanges is not None])
2088
2089         for dest in copy_changes.keys():
2090             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2091
2092         # We're done - commit the database changes
2093         session.commit()
2094         # Our SQL session will automatically start a new transaction after
2095         # the last commit
2096
2097         # Move the .changes into the 'done' directory
2098         utils.move(self.pkg.changes_file,
2099                    os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
2100
2101         if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2102             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2103
2104         self.update_subst()
2105         self.Subst["__SUMMARY__"] = summary
2106         mail_message = utils.TemplateSubst(self.Subst,
2107                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2108         utils.send_mail(mail_message)
2109         self.announce(short_summary, 1)
2110
2111         ## Helper stuff for DebBugs Version Tracking
2112         if cnf.Find("Dir::Queue::BTSVersionTrack"):
2113             if self.pkg.changes["architecture"].has_key("source"):
2114                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2115                 version_history = os.fdopen(fd, 'w')
2116                 version_history.write(self.pkg.dsc["bts changelog"])
2117                 version_history.close()
2118                 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2119                                       self.pkg.changes_file[:-8]+".versions")
2120                 os.rename(temp_filename, filename)
2121                 os.chmod(filename, 0644)
2122
2123             # Write out the binary -> source mapping.
2124             (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2125             debinfo = os.fdopen(fd, 'w')
2126             for name, entry in sorted(self.pkg.files.items()):
2127                 if entry["type"] == "deb":
2128                     line = " ".join([entry["package"], entry["version"],
2129                                      entry["architecture"], entry["source package"],
2130                                      entry["source version"]])
2131                     debinfo.write(line+"\n")
2132             debinfo.close()
2133             filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2134                                   self.pkg.changes_file[:-8]+".debinfo")
2135             os.rename(temp_filename, filename)
2136             os.chmod(filename, 0644)
2137
2138         session.commit()
2139
2140         # Set up our copy queues (e.g. buildd queues)
2141         for suite_name in self.pkg.changes["distribution"].keys():
2142             suite = get_suite(suite_name, session)
2143             for q in suite.copy_queues:
2144                 for f in poolfiles:
2145                     q.add_file_from_pool(f)
2146
2147         session.commit()
2148
2149         # Finally...
2150         stats.accept_count += 1
2151
2152     def check_override(self):
2153         """
2154         Checks override entries for validity. Mails "Override disparity" warnings,
2155         if that feature is enabled.
2156
2157         Abandons the check if
2158           - override disparity checks are disabled
2159           - mail sending is disabled
2160         """
2161
2162         cnf = Config()
2163
2164         # Abandon the check if override disparity checks have been disabled
2165         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2166             return
2167
2168         summary = self.pkg.check_override()
2169
2170         if summary == "":
2171             return
2172
2173         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2174
2175         self.update_subst()
2176         self.Subst["__SUMMARY__"] = summary
2177         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2178         utils.send_mail(mail_message)
2179         del self.Subst["__SUMMARY__"]
2180
2181     ###########################################################################
2182
2183     def remove(self, from_dir=None):
2184         """
2185         Used (for instance) in p-u to remove the package from unchecked
2186
2187         Also removes the package from holding area.
2188         """
2189         if from_dir is None:
2190             from_dir = self.pkg.directory
2191         h = Holding()
2192
2193         for f in self.pkg.files.keys():
2194             os.unlink(os.path.join(from_dir, f))
2195             if os.path.exists(os.path.join(h.holding_dir, f)):
2196                 os.unlink(os.path.join(h.holding_dir, f))
2197
2198         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2199         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2200             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2201
2202     ###########################################################################
2203
2204     def move_to_queue (self, queue):
2205         """
2206         Move files to a destination queue using the permissions in the table
2207         """
2208         h = Holding()
2209         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2210                    queue.path, perms=int(queue.change_perms, 8))
2211         for f in self.pkg.files.keys():
2212             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2213
2214     ###########################################################################
2215
2216     def force_reject(self, reject_files):
2217         """
2218         Forcefully move files from the current directory to the
2219         reject directory.  If any file already exists in the reject
2220         directory it will be moved to the morgue to make way for
2221         the new file.
2222
2223         @type reject_files: dict
2224         @param reject_files: file dictionary
2225
2226         """
2227
2228         cnf = Config()
2229
2230         for file_entry in reject_files:
2231             # Skip any files which don't exist or which we don't have permission to copy.
2232             if os.access(file_entry, os.R_OK) == 0:
2233                 continue
2234
2235             dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2236
2237             try:
2238                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2239             except OSError, e:
2240                 # File exists?  Let's find a new name by adding a number
2241                 if e.errno == errno.EEXIST:
2242                     try:
2243                         dest_file = utils.find_next_free(dest_file, 255)
2244                     except NoFreeFilenameError:
2245                         # Something's either gone badly Pete Tong, or
2246                         # someone is trying to exploit us.
2247                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2248                         return
2249
2250                     # Make sure we really got it
2251                     try:
2252                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2253                     except OSError, e:
2254                         # Likewise
2255                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2256                         return
2257                 else:
2258                     raise
2259             # If we got here, we own the destination file, so we can
2260             # safely overwrite it.
2261             utils.move(file_entry, dest_file, 1, perms=0660)
2262             os.close(dest_fd)
2263
2264     ###########################################################################
2265     def do_reject (self, manual=0, reject_message="", notes=""):
2266         """
2267         Reject an upload. If called without a reject message or C{manual} is
2268         true, spawn an editor so the user can write one.
2269
2270         @type manual: bool
2271         @param manual: manual or automated rejection
2272
2273         @type reject_message: string
2274         @param reject_message: A reject message
2275
2276         @return: 0
2277
2278         """
2279         # If we weren't given a manual rejection message, spawn an
2280         # editor so the user can add one in...
2281         if manual and not reject_message:
2282             (fd, temp_filename) = utils.temp_filename()
2283             temp_file = os.fdopen(fd, 'w')
2284             if len(notes) > 0:
2285                 for note in notes:
2286                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2287                                     % (note.author, note.version, note.notedate, note.comment))
2288             temp_file.close()
2289             editor = os.environ.get("EDITOR","vi")
2290             answer = 'E'
2291             while answer == 'E':
2292                 os.system("%s %s" % (editor, temp_filename))
2293                 temp_fh = utils.open_file(temp_filename)
2294                 reject_message = "".join(temp_fh.readlines())
2295                 temp_fh.close()
2296                 print "Reject message:"
2297                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2298                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2299                 answer = "XXX"
2300                 while prompt.find(answer) == -1:
2301                     answer = utils.our_raw_input(prompt)
2302                     m = re_default_answer.search(prompt)
2303                     if answer == "":
2304                         answer = m.group(1)
2305                     answer = answer[:1].upper()
2306             os.unlink(temp_filename)
2307             if answer == 'A':
2308                 return 1
2309             elif answer == 'Q':
2310                 sys.exit(0)
2311
2312         print "Rejecting.\n"
2313
2314         cnf = Config()
2315
2316         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2317         reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2318
2319         # Move all the files into the reject directory
2320         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2321         self.force_reject(reject_files)
2322
2323         # If we fail here someone is probably trying to exploit the race
2324         # so let's just raise an exception ...
2325         if os.path.exists(reason_filename):
2326             os.unlink(reason_filename)
2327         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2328
2329         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2330
2331         self.update_subst()
2332         if not manual:
2333             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2334             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2335             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2336             os.write(reason_fd, reject_message)
2337             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2338         else:
2339             # Build up the rejection email
2340             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2341             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2342             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2343             self.Subst["__REJECT_MESSAGE__"] = ""
2344             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2345             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2346             # Write the rejection email out as the <foo>.reason file
2347             os.write(reason_fd, reject_mail_message)
2348
2349         del self.Subst["__REJECTOR_ADDRESS__"]
2350         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2351         del self.Subst["__CC__"]
2352
2353         os.close(reason_fd)
2354
2355         # Send the rejection mail
2356         utils.send_mail(reject_mail_message)
2357
2358         if self.logger:
2359             self.logger.log(["rejected", self.pkg.changes_file])
2360
2361         return 0
2362
2363     ################################################################################
2364     def in_override_p(self, package, component, suite, binary_type, filename, session):
2365         """
2366         Check if a package already has override entries in the DB
2367
2368         @type package: string
2369         @param package: package name
2370
2371         @type component: string
2372         @param component: database id of the component
2373
2374         @type suite: int
2375         @param suite: database id of the suite
2376
2377         @type binary_type: string
2378         @param binary_type: type of the package
2379
2380         @type filename: string
2381         @param filename: filename we check
2382
2383         @return: the database result. But noone cares anyway.
2384
2385         """
2386
2387         cnf = Config()
2388
2389         if binary_type == "": # must be source
2390             file_type = "dsc"
2391         else:
2392             file_type = binary_type
2393
2394         # Override suite name; used for example with proposed-updates
2395         oldsuite = get_suite(suite, session)
2396         if (not oldsuite is None) and oldsuite.overridesuite:
2397             suite = oldsuite.overridesuite
2398
2399         result = get_override(package, suite, component, file_type, session)
2400
2401         # If checking for a source package fall back on the binary override type
2402         if file_type == "dsc" and len(result) < 1:
2403             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2404
2405         # Remember the section and priority so we can check them later if appropriate
2406         if len(result) > 0:
2407             result = result[0]
2408             self.pkg.files[filename]["override section"] = result.section.section
2409             self.pkg.files[filename]["override priority"] = result.priority.priority
2410             return result
2411
2412         return None
2413
2414     ################################################################################
2415     def get_anyversion(self, sv_list, suite):
2416         """
2417         @type sv_list: list
2418         @param sv_list: list of (suite, version) tuples to check
2419
2420         @type suite: string
2421         @param suite: suite name
2422
2423         Description: TODO
2424         """
2425         Cnf = Config()
2426         anyversion = None
2427         anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2428         for (s, v) in sv_list:
2429             if s in [ x.lower() for x in anysuite ]:
2430                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2431                     anyversion = v
2432
2433         return anyversion
2434
2435     ################################################################################
2436
2437     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2438         """
2439         @type sv_list: list
2440         @param sv_list: list of (suite, version) tuples to check
2441
2442         @type filename: string
2443         @param filename: XXX
2444
2445         @type new_version: string
2446         @param new_version: XXX
2447
2448         Ensure versions are newer than existing packages in target
2449         suites and that cross-suite version checking rules as
2450         set out in the conf file are satisfied.
2451         """
2452
2453         cnf = Config()
2454
2455         # Check versions for each target suite
2456         for target_suite in self.pkg.changes["distribution"].keys():
2457             must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2458             must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2459
2460             # Enforce "must be newer than target suite" even if conffile omits it
2461             if target_suite not in must_be_newer_than:
2462                 must_be_newer_than.append(target_suite)
2463
2464             for (suite, existent_version) in sv_list:
2465                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2466
2467                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2468                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2469
2470                 if suite in must_be_older_than and vercmp > -1:
2471                     cansave = 0
2472
2473                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2474                         # we really use the other suite, ignoring the conflicting one ...
2475                         addsuite = self.pkg.changes["distribution-version"][suite]
2476
2477                         add_version = self.get_anyversion(sv_list, addsuite)
2478                         target_version = self.get_anyversion(sv_list, target_suite)
2479
2480                         if not add_version:
2481                             # not add_version can only happen if we map to a suite
2482                             # that doesn't enhance the suite we're propup'ing from.
2483                             # so "propup-ver x a b c; map a d" is a problem only if
2484                             # d doesn't enhance a.
2485                             #
2486                             # i think we could always propagate in this case, rather
2487                             # than complaining. either way, this isn't a REJECT issue
2488                             #
2489                             # And - we really should complain to the dorks who configured dak
2490                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2491                             self.pkg.changes.setdefault("propdistribution", {})
2492                             self.pkg.changes["propdistribution"][addsuite] = 1
2493                             cansave = 1
2494                         elif not target_version:
2495                             # not targets_version is true when the package is NEW
2496                             # we could just stick with the "...old version..." REJECT
2497                             # for this, I think.
2498                             self.rejects.append("Won't propogate NEW packages.")
2499                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2500                             # propogation would be redundant. no need to reject though.
2501                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2502                             cansave = 1
2503                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2504                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2505                             # propogate!!
2506                             self.warnings.append("Propogating upload to %s" % (addsuite))
2507                             self.pkg.changes.setdefault("propdistribution", {})
2508                             self.pkg.changes["propdistribution"][addsuite] = 1
2509                             cansave = 1
2510
2511                     if not cansave:
2512                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2513
2514     ################################################################################
2515     def check_binary_against_db(self, filename, session):
2516         # Ensure version is sane
2517         self.cross_suite_version_check( \
2518             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2519                 self.pkg.files[filename]["architecture"], session),
2520             filename, self.pkg.files[filename]["version"], sourceful=False)
2521
2522         # Check for any existing copies of the file
2523         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2524         q = q.filter_by(version=self.pkg.files[filename]["version"])
2525         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2526
2527         if q.count() > 0:
2528             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2529
2530     ################################################################################
2531
2532     def check_source_against_db(self, filename, session):
2533         source = self.pkg.dsc.get("source")
2534         version = self.pkg.dsc.get("version")
2535
2536         # Ensure version is sane
2537         self.cross_suite_version_check( \
2538             get_suite_version_by_source(source, session), filename, version,
2539             sourceful=True)
2540
2541     ################################################################################
2542     def check_dsc_against_db(self, filename, session):
2543         """
2544
2545         @warning: NB: this function can remove entries from the 'files' index [if
2546          the orig tarball is a duplicate of the one in the archive]; if
2547          you're iterating over 'files' and call this function as part of
2548          the loop, be sure to add a check to the top of the loop to
2549          ensure you haven't just tried to dereference the deleted entry.
2550
2551         """
2552
2553         Cnf = Config()
2554         self.pkg.orig_files = {} # XXX: do we need to clear it?
2555         orig_files = self.pkg.orig_files
2556
2557         # Try and find all files mentioned in the .dsc.  This has
2558         # to work harder to cope with the multiple possible
2559         # locations of an .orig.tar.gz.
2560         # The ordering on the select is needed to pick the newest orig
2561         # when it exists in multiple places.
2562         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2563             found = None
2564             if self.pkg.files.has_key(dsc_name):
2565                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2566                 actual_size = int(self.pkg.files[dsc_name]["size"])
2567                 found = "%s in incoming" % (dsc_name)
2568
2569                 # Check the file does not already exist in the archive
2570                 ql = get_poolfile_like_name(dsc_name, session)
2571
2572                 # Strip out anything that isn't '%s' or '/%s$'
2573                 for i in ql:
2574                     if not i.filename.endswith(dsc_name):
2575                         ql.remove(i)
2576
2577                 # "[dak] has not broken them.  [dak] has fixed a
2578                 # brokenness.  Your crappy hack exploited a bug in
2579                 # the old dinstall.
2580                 #
2581                 # "(Come on!  I thought it was always obvious that
2582                 # one just doesn't release different files with
2583                 # the same name and version.)"
2584                 #                        -- ajk@ on d-devel@l.d.o
2585
2586                 if len(ql) > 0:
2587                     # Ignore exact matches for .orig.tar.gz
2588                     match = 0
2589                     if re_is_orig_source.match(dsc_name):
2590                         for i in ql:
2591                             if self.pkg.files.has_key(dsc_name) and \
2592                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2593                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2594                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2595                                 # TODO: Don't delete the entry, just mark it as not needed
2596                                 # This would fix the stupidity of changing something we often iterate over
2597                                 # whilst we're doing it
2598                                 del self.pkg.files[dsc_name]
2599                                 dsc_entry["files id"] = i.file_id
2600                                 if not orig_files.has_key(dsc_name):
2601                                     orig_files[dsc_name] = {}
2602                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2603                                 match = 1
2604
2605                                 # Don't bitch that we couldn't find this file later
2606                                 try:
2607                                     self.later_check_files.remove(dsc_name)
2608                                 except ValueError:
2609                                     pass
2610
2611
2612                     if not match:
2613                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2614
2615             elif re_is_orig_source.match(dsc_name):
2616                 # Check in the pool
2617                 ql = get_poolfile_like_name(dsc_name, session)
2618
2619                 # Strip out anything that isn't '%s' or '/%s$'
2620                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2621                 for i in ql:
2622                     if not i.filename.endswith(dsc_name):
2623                         ql.remove(i)
2624
2625                 if len(ql) > 0:
2626                     # Unfortunately, we may get more than one match here if,
2627                     # for example, the package was in potato but had an -sa
2628                     # upload in woody.  So we need to choose the right one.
2629
2630                     # default to something sane in case we don't match any or have only one
2631                     x = ql[0]
2632
2633                     if len(ql) > 1:
2634                         for i in ql:
2635                             old_file = os.path.join(i.location.path, i.filename)
2636                             old_file_fh = utils.open_file(old_file)
2637                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2638                             old_file_fh.close()
2639                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2640                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2641                                 x = i
2642
2643                     old_file = os.path.join(i.location.path, i.filename)
2644                     old_file_fh = utils.open_file(old_file)
2645                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2646                     old_file_fh.close()
2647                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2648                     found = old_file
2649                     suite_type = x.location.archive_type
2650                     # need this for updating dsc_files in install()
2651                     dsc_entry["files id"] = x.file_id
2652                     # See install() in process-accepted...
2653                     if not orig_files.has_key(dsc_name):
2654                         orig_files[dsc_name] = {}
2655                     orig_files[dsc_name]["id"] = x.file_id
2656                     orig_files[dsc_name]["path"] = old_file
2657                     orig_files[dsc_name]["location"] = x.location.location_id
2658                 else:
2659                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2660                     # Not there? Check the queue directories...
2661                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2662                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2663                             continue
2664                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2665                         if os.path.exists(in_otherdir):
2666                             in_otherdir_fh = utils.open_file(in_otherdir)
2667                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2668                             in_otherdir_fh.close()
2669                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2670                             found = in_otherdir
2671                             if not orig_files.has_key(dsc_name):
2672                                 orig_files[dsc_name] = {}
2673                             orig_files[dsc_name]["path"] = in_otherdir
2674
2675                     if not found:
2676                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2677                         continue
2678             else:
2679                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2680                 continue
2681             if actual_md5 != dsc_entry["md5sum"]:
2682                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2683             if actual_size != int(dsc_entry["size"]):
2684                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2685
2686     ################################################################################
2687     # This is used by process-new and process-holding to recheck a changes file
2688     # at the time we're running.  It mainly wraps various other internal functions
2689     # and is similar to accepted_checks - these should probably be tidied up
2690     # and combined
2691     def recheck(self, session):
2692         cnf = Config()
2693         for f in self.pkg.files.keys():
2694             # The .orig.tar.gz can disappear out from under us is it's a
2695             # duplicate of one in the archive.
2696             if not self.pkg.files.has_key(f):
2697                 continue
2698
2699             entry = self.pkg.files[f]
2700
2701             # Check that the source still exists
2702             if entry["type"] == "deb":
2703                 source_version = entry["source version"]
2704                 source_package = entry["source package"]
2705                 if not self.pkg.changes["architecture"].has_key("source") \
2706                    and not source_exists(source_package, source_version, \
2707                     suites = self.pkg.changes["distribution"].keys(), session = session):
2708                     source_epochless_version = re_no_epoch.sub('', source_version)
2709                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2710                     found = False
2711                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2712                         if cnf.has_key("Dir::Queue::%s" % (q)):
2713                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2714                                 found = True
2715                     if not found:
2716                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2717
2718             # Version and file overwrite checks
2719             if entry["type"] == "deb":
2720                 self.check_binary_against_db(f, session)
2721             elif entry["type"] == "dsc":
2722                 self.check_source_against_db(f, session)
2723                 self.check_dsc_against_db(f, session)
2724
2725     ################################################################################
2726     def accepted_checks(self, overwrite_checks, session):
2727         # Recheck anything that relies on the database; since that's not
2728         # frozen between accept and our run time when called from p-a.
2729
2730         # overwrite_checks is set to False when installing to stable/oldstable
2731
2732         propogate={}
2733         nopropogate={}
2734
2735         # Find the .dsc (again)
2736         dsc_filename = None
2737         for f in self.pkg.files.keys():
2738             if self.pkg.files[f]["type"] == "dsc":
2739                 dsc_filename = f
2740
2741         for checkfile in self.pkg.files.keys():
2742             # The .orig.tar.gz can disappear out from under us is it's a
2743             # duplicate of one in the archive.
2744             if not self.pkg.files.has_key(checkfile):
2745                 continue
2746
2747             entry = self.pkg.files[checkfile]
2748
2749             # Check that the source still exists
2750             if entry["type"] == "deb":
2751                 source_version = entry["source version"]
2752                 source_package = entry["source package"]
2753                 if not self.pkg.changes["architecture"].has_key("source") \
2754                    and not source_exists(source_package, source_version, \
2755                     suites = self.pkg.changes["distribution"].keys(), \
2756                     session = session):
2757                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2758
2759             # Version and file overwrite checks
2760             if overwrite_checks:
2761                 if entry["type"] == "deb":
2762                     self.check_binary_against_db(checkfile, session)
2763                 elif entry["type"] == "dsc":
2764                     self.check_source_against_db(checkfile, session)
2765                     self.check_dsc_against_db(dsc_filename, session)
2766
2767             # propogate in the case it is in the override tables:
2768             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2769                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2770                     propogate[suite] = 1
2771                 else:
2772                     nopropogate[suite] = 1
2773
2774         for suite in propogate.keys():
2775             if suite in nopropogate:
2776                 continue
2777             self.pkg.changes["distribution"][suite] = 1
2778
2779         for checkfile in self.pkg.files.keys():
2780             # Check the package is still in the override tables
2781             for suite in self.pkg.changes["distribution"].keys():
2782                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2783                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2784
2785     ################################################################################
2786     # If any file of an upload has a recent mtime then chances are good
2787     # the file is still being uploaded.
2788
2789     def upload_too_new(self):
2790         cnf = Config()
2791         too_new = False
2792         # Move back to the original directory to get accurate time stamps
2793         cwd = os.getcwd()
2794         os.chdir(self.pkg.directory)
2795         file_list = self.pkg.files.keys()
2796         file_list.extend(self.pkg.dsc_files.keys())
2797         file_list.append(self.pkg.changes_file)
2798         for f in file_list:
2799             try:
2800                 last_modified = time.time()-os.path.getmtime(f)
2801                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2802                     too_new = True
2803                     break
2804             except:
2805                 pass
2806
2807         os.chdir(cwd)
2808         return too_new
2809
2810     def store_changelog(self):
2811
2812         # Skip binary-only upload if it is not a bin-NMU
2813         if not self.pkg.changes['architecture'].has_key('source'):
2814             from daklib.regexes import re_bin_only_nmu
2815             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2816                 return
2817
2818         session = DBConn().session()
2819
2820         # Check if upload already has a changelog entry
2821         query = """SELECT changelog_id FROM changes WHERE source = :source
2822                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2823         if session.execute(query, {'source': self.pkg.changes['source'], \
2824                                    'version': self.pkg.changes['version'], \
2825                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2826             session.commit()
2827             return
2828
2829         # Add current changelog text into changelogs_text table, return created ID
2830         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2831         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2832
2833         # Link ID to the upload available in changes table
2834         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2835                    AND version = :version AND architecture = :architecture"""
2836         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2837                                 'version': self.pkg.changes['version'], \
2838                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2839
2840         session.commit()