]> git.decadent.org.uk Git - dak.git/blob - daklib/queue.py
Stop using Dir::Queue::
[dak.git] / daklib / queue.py
1 #!/usr/bin/env python
2 # vim:set et sw=4:
3
4 """
5 Queue utility functions for dak
6
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010  Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
11 """
12
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
17
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21 # GNU General Public License for more details.
22
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26
27 ###############################################################################
28
29 import errno
30 import os
31 import stat
32 import sys
33 import time
34 import apt_inst
35 import apt_pkg
36 import utils
37 import commands
38 import shutil
39 import textwrap
40 from types import *
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
43
44 import yaml
45
46 from dak_exceptions import *
47 from changes import *
48 from regexes import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
52 from dbconn import *
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
58
59 ###############################################################################
60
61 def get_type(f, session):
62     """
63     Get the file type of C{f}
64
65     @type f: dict
66     @param f: file entry from Changes object
67
68     @type session: SQLA Session
69     @param session: SQL Alchemy session object
70
71     @rtype: string
72     @return: filetype
73
74     """
75     # Determine the type
76     if f.has_key("dbtype"):
77         file_type = f["dbtype"]
78     elif re_source_ext.match(f["type"]):
79         file_type = "dsc"
80     elif f['architecture'] == 'source' and f["type"] == 'unreadable':
81         utils.warn('unreadable source file (will continue and hope for the best)')
82         return f["type"]
83     else:
84         file_type = f["type"]
85         utils.fubar("invalid type (%s) for new.  Dazed, confused and sure as heck not continuing." % (file_type))
86
87     # Validate the override type
88     type_id = get_override_type(file_type, session)
89     if type_id is None:
90         utils.fubar("invalid type (%s) for new.  Say wha?" % (file_type))
91
92     return file_type
93
94 ################################################################################
95
96 # Determine what parts in a .changes are NEW
97
98 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
99     """
100     Determine what parts in a C{changes} file are NEW.
101
102     @type filename: str
103     @param filename: changes filename
104
105     @type changes: Upload.Pkg.changes dict
106     @param changes: Changes dictionary
107
108     @type files: Upload.Pkg.files dict
109     @param files: Files dictionary
110
111     @type warn: bool
112     @param warn: Warn if overrides are added for (old)stable
113
114     @type dsc: Upload.Pkg.dsc dict
115     @param dsc: (optional); Dsc dictionary
116
117     @type new: dict
118     @param new: new packages as returned by a previous call to this function, but override information may have changed
119
120     @rtype: dict
121     @return: dictionary of NEW components.
122
123     """
124     # TODO: This should all use the database instead of parsing the changes
125     # file again
126     byhand = {}
127     if new is None:
128         new = {}
129
130     dbchg = get_dbchange(filename, session)
131     if dbchg is None:
132         print "Warning: cannot find changes file in database; won't check byhand"
133
134     # Try to get the Package-Set field from an included .dsc file (if possible).
135     if dsc:
136         for package, entry in build_package_set(dsc, session).items():
137             if not new.has_key(package):
138                 new[package] = entry
139
140     # Build up a list of potentially new things
141     for name, f in files.items():
142         # Keep a record of byhand elements
143         if f["section"] == "byhand":
144             byhand[name] = 1
145             continue
146
147         pkg = f["package"]
148         priority = f["priority"]
149         section = f["section"]
150         file_type = get_type(f, session)
151         component = f["component"]
152
153         if file_type == "dsc":
154             priority = "source"
155
156         if not new.has_key(pkg):
157             new[pkg] = {}
158             new[pkg]["priority"] = priority
159             new[pkg]["section"] = section
160             new[pkg]["type"] = file_type
161             new[pkg]["component"] = component
162             new[pkg]["files"] = []
163         else:
164             old_type = new[pkg]["type"]
165             if old_type != file_type:
166                 # source gets trumped by deb or udeb
167                 if old_type == "dsc":
168                     new[pkg]["priority"] = priority
169                     new[pkg]["section"] = section
170                     new[pkg]["type"] = file_type
171                     new[pkg]["component"] = component
172
173         new[pkg]["files"].append(name)
174
175         if f.has_key("othercomponents"):
176             new[pkg]["othercomponents"] = f["othercomponents"]
177
178     # Fix up the list of target suites
179     cnf = Config()
180     for suite in changes["suite"].keys():
181         oldsuite = get_suite(suite, session)
182         if not oldsuite:
183             print "WARNING: Invalid suite %s found" % suite
184             continue
185
186         if oldsuite.overridesuite:
187             newsuite = get_suite(oldsuite.overridesuite, session)
188
189             if newsuite:
190                 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
191                     oldsuite.overridesuite, suite)
192                 del changes["suite"][suite]
193                 changes["suite"][oldsuite.overridesuite] = 1
194             else:
195                 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist.  Bugger" % (
196                     oldsuite.overridesuite, suite)
197
198     # Check for unprocessed byhand files
199     if dbchg is not None:
200         for b in byhand.keys():
201             # Find the file entry in the database
202             found = False
203             for f in dbchg.files:
204                 if f.filename == b:
205                     found = True
206                     # If it's processed, we can ignore it
207                     if f.processed:
208                         del byhand[b]
209                     break
210
211             if not found:
212                 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
213
214     # Check for new stuff
215     for suite in changes["suite"].keys():
216         for pkg in new.keys():
217             ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
218             if len(ql) > 0:
219                 for file_entry in new[pkg]["files"]:
220                     if files[file_entry].has_key("new"):
221                         del files[file_entry]["new"]
222                 del new[pkg]
223
224     if warn:
225         for s in ['stable', 'oldstable']:
226             if changes["suite"].has_key(s):
227                 print "WARNING: overrides will be added for %s!" % s
228         for pkg in new.keys():
229             if new[pkg].has_key("othercomponents"):
230                 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
231
232     return new, byhand
233
234 ################################################################################
235
236 def check_valid(new, session = None):
237     """
238     Check if section and priority for NEW packages exist in database.
239     Additionally does sanity checks:
240       - debian-installer packages have to be udeb (or source)
241       - non debian-installer packages can not be udeb
242       - source priority can only be assigned to dsc file types
243
244     @type new: dict
245     @param new: Dict of new packages with their section, priority and type.
246
247     """
248     for pkg in new.keys():
249         section_name = new[pkg]["section"]
250         priority_name = new[pkg]["priority"]
251         file_type = new[pkg]["type"]
252
253         section = get_section(section_name, session)
254         if section is None:
255             new[pkg]["section id"] = -1
256         else:
257             new[pkg]["section id"] = section.section_id
258
259         priority = get_priority(priority_name, session)
260         if priority is None:
261             new[pkg]["priority id"] = -1
262         else:
263             new[pkg]["priority id"] = priority.priority_id
264
265         # Sanity checks
266         di = section_name.find("debian-installer") != -1
267
268         # If d-i, we must be udeb and vice-versa
269         if     (di and file_type not in ("udeb", "dsc")) or \
270            (not di and file_type == "udeb"):
271             new[pkg]["section id"] = -1
272
273         # If dsc we need to be source and vice-versa
274         if (priority == "source" and file_type != "dsc") or \
275            (priority != "source" and file_type == "dsc"):
276             new[pkg]["priority id"] = -1
277
278 ###############################################################################
279
280 # Used by Upload.check_timestamps
281 class TarTime(object):
282     def __init__(self, future_cutoff, past_cutoff):
283         self.reset()
284         self.future_cutoff = future_cutoff
285         self.past_cutoff = past_cutoff
286
287     def reset(self):
288         self.future_files = {}
289         self.ancient_files = {}
290
291     def callback(self, member, data):
292         if member.mtime > self.future_cutoff:
293             self.future_files[Name] = MTime
294         if member.mtime < self.past_cutoff:
295             self.ancient_files[Name] = MTime
296
297 ###############################################################################
298
299 def prod_maintainer(notes, upload):
300     cnf = Config()
301
302     # Here we prepare an editor and get them ready to prod...
303     (fd, temp_filename) = utils.temp_filename()
304     temp_file = os.fdopen(fd, 'w')
305     for note in notes:
306         temp_file.write(note.comment)
307     temp_file.close()
308     editor = os.environ.get("EDITOR","vi")
309     answer = 'E'
310     while answer == 'E':
311         os.system("%s %s" % (editor, temp_filename))
312         temp_fh = utils.open_file(temp_filename)
313         prod_message = "".join(temp_fh.readlines())
314         temp_fh.close()
315         print "Prod message:"
316         print utils.prefix_multi_line_string(prod_message,"  ",include_blank_lines=1)
317         prompt = "[P]rod, Edit, Abandon, Quit ?"
318         answer = "XXX"
319         while prompt.find(answer) == -1:
320             answer = utils.our_raw_input(prompt)
321             m = re_default_answer.search(prompt)
322             if answer == "":
323                 answer = m.group(1)
324             answer = answer[:1].upper()
325     os.unlink(temp_filename)
326     if answer == 'A':
327         return
328     elif answer == 'Q':
329         end()
330         sys.exit(0)
331     # Otherwise, do the proding...
332     user_email_address = utils.whoami() + " <%s>" % (
333         cnf["Dinstall::MyAdminAddress"])
334
335     Subst = upload.Subst
336
337     Subst["__FROM_ADDRESS__"] = user_email_address
338     Subst["__PROD_MESSAGE__"] = prod_message
339     Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
340
341     prod_mail_message = utils.TemplateSubst(
342         Subst,cnf["Dir::Templates"]+"/process-new.prod")
343
344     # Send the prod mail
345     utils.send_mail(prod_mail_message)
346
347     print "Sent prodding message"
348
349 ################################################################################
350
351 def edit_note(note, upload, session, trainee=False):
352     # Write the current data to a temporary file
353     (fd, temp_filename) = utils.temp_filename()
354     editor = os.environ.get("EDITOR","vi")
355     answer = 'E'
356     while answer == 'E':
357         os.system("%s %s" % (editor, temp_filename))
358         temp_file = utils.open_file(temp_filename)
359         newnote = temp_file.read().rstrip()
360         temp_file.close()
361         print "New Note:"
362         print utils.prefix_multi_line_string(newnote,"  ")
363         prompt = "[D]one, Edit, Abandon, Quit ?"
364         answer = "XXX"
365         while prompt.find(answer) == -1:
366             answer = utils.our_raw_input(prompt)
367             m = re_default_answer.search(prompt)
368             if answer == "":
369                 answer = m.group(1)
370             answer = answer[:1].upper()
371     os.unlink(temp_filename)
372     if answer == 'A':
373         return
374     elif answer == 'Q':
375         end()
376         sys.exit(0)
377
378     comment = NewComment()
379     comment.package = upload.pkg.changes["source"]
380     comment.version = upload.pkg.changes["version"]
381     comment.comment = newnote
382     comment.author  = utils.whoami()
383     comment.trainee = trainee
384     session.add(comment)
385     session.commit()
386
387 ###############################################################################
388
389 # suite names DMs can upload to
390 dm_suites = ['unstable', 'experimental']
391
392 def get_newest_source(source, session):
393     'returns the newest DBSource object in dm_suites'
394     ## the most recent version of the package uploaded to unstable or
395     ## experimental includes the field "DM-Upload-Allowed: yes" in the source
396     ## section of its control file
397     q = session.query(DBSource).filter_by(source = source). \
398         filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
399         order_by(desc('source.version'))
400     return q.first()
401
402 def get_suite_version_by_source(source, session):
403     'returns a list of tuples (suite_name, version) for source package'
404     q = session.query(Suite.suite_name, DBSource.version). \
405         join(Suite.sources).filter_by(source = source)
406     return q.all()
407
408 def get_source_by_package_and_suite(package, suite_name, session):
409     '''
410     returns a DBSource query filtered by DBBinary.package and this package's
411     suite_name
412     '''
413     return session.query(DBSource). \
414         join(DBSource.binaries).filter_by(package = package). \
415         join(DBBinary.suites).filter_by(suite_name = suite_name)
416
417 def get_suite_version_by_package(package, arch_string, session):
418     '''
419     returns a list of tuples (suite_name, version) for binary package and
420     arch_string
421     '''
422     return session.query(Suite.suite_name, DBBinary.version). \
423         join(Suite.binaries).filter_by(package = package). \
424         join(DBBinary.architecture). \
425         filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
426
427 class Upload(object):
428     """
429     Everything that has to do with an upload processed.
430
431     """
432     def __init__(self):
433         self.logger = None
434         self.pkg = Changes()
435         self.reset()
436
437     ###########################################################################
438
439     def reset (self):
440         """ Reset a number of internal variables."""
441
442         # Initialize the substitution template map
443         cnf = Config()
444         self.Subst = {}
445         self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
446         if cnf.has_key("Dinstall::BugServer"):
447             self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
448         self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
449         self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
450
451         self.rejects = []
452         self.warnings = []
453         self.notes = []
454
455         self.later_check_files = []
456
457         self.pkg.reset()
458
459     def package_info(self):
460         """
461         Format various messages from this Upload to send to the maintainer.
462         """
463
464         msgs = (
465             ('Reject Reasons', self.rejects),
466             ('Warnings', self.warnings),
467             ('Notes', self.notes),
468         )
469
470         msg = ''
471         for title, messages in msgs:
472             if messages:
473                 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
474         msg += '\n\n'
475
476         return msg
477
478     ###########################################################################
479     def update_subst(self):
480         """ Set up the per-package template substitution mappings """
481
482         cnf = Config()
483
484         # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
485         if not self.pkg.changes.has_key("architecture") or not \
486            isinstance(self.pkg.changes["architecture"], dict):
487             self.pkg.changes["architecture"] = { "Unknown" : "" }
488
489         # and maintainer2047 may not exist.
490         if not self.pkg.changes.has_key("maintainer2047"):
491             self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
492
493         self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
494         self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
495         self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
496
497         # For source uploads the Changed-By field wins; otherwise Maintainer wins.
498         if self.pkg.changes["architecture"].has_key("source") and \
499            self.pkg.changes["changedby822"] != "" and \
500            (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
501
502             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
503             self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
504             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
505         else:
506             self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
507             self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
508             self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
509
510         # Process policy doesn't set the fingerprint field and I don't want to make it
511         # do it for now as I don't want to have to deal with the case where we accepted
512         # the package into PU-NEW, but the fingerprint has gone away from the keyring in
513         # the meantime so the package will be remarked as rejectable.  Urgh.
514         # TODO: Fix this properly
515         if self.pkg.changes.has_key('fingerprint'):
516             session = DBConn().session()
517             fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
518             if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
519                 if self.pkg.changes.has_key("sponsoremail"):
520                     self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
521             session.close()
522
523         if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
524             self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
525
526         # Apply any global override of the Maintainer field
527         if cnf.get("Dinstall::OverrideMaintainer"):
528             self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
529             self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
530
531         self.Subst["__REJECT_MESSAGE__"] = self.package_info()
532         self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
533         self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
534         self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
535
536     ###########################################################################
537     def load_changes(self, filename):
538         """
539         Load a changes file and setup a dictionary around it. Also checks for mandantory
540         fields  within.
541
542         @type filename: string
543         @param filename: Changes filename, full path.
544
545         @rtype: boolean
546         @return: whether the changes file was valid or not.  We may want to
547                  reject even if this is True (see what gets put in self.rejects).
548                  This is simply to prevent us even trying things later which will
549                  fail because we couldn't properly parse the file.
550         """
551         Cnf = Config()
552         self.pkg.changes_file = filename
553
554         # Parse the .changes field into a dictionary
555         try:
556             self.pkg.changes.update(parse_changes(filename))
557         except CantOpenError:
558             self.rejects.append("%s: can't read file." % (filename))
559             return False
560         except ParseChangesError, line:
561             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
562             return False
563         except ChangesUnicodeError:
564             self.rejects.append("%s: changes file not proper utf-8" % (filename))
565             return False
566
567         # Parse the Files field from the .changes into another dictionary
568         try:
569             self.pkg.files.update(utils.build_file_list(self.pkg.changes))
570         except ParseChangesError, line:
571             self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
572             return False
573         except UnknownFormatError, format:
574             self.rejects.append("%s: unknown format '%s'." % (filename, format))
575             return False
576
577         # Check for mandatory fields
578         for i in ("distribution", "source", "binary", "architecture",
579                   "version", "maintainer", "files", "changes", "description"):
580             if not self.pkg.changes.has_key(i):
581                 # Avoid undefined errors later
582                 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
583                 return False
584
585         # Strip a source version in brackets from the source field
586         if re_strip_srcver.search(self.pkg.changes["source"]):
587             self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
588
589         # Ensure the source field is a valid package name.
590         if not re_valid_pkg_name.match(self.pkg.changes["source"]):
591             self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
592
593         # Split multi-value fields into a lower-level dictionary
594         for i in ("architecture", "distribution", "binary", "closes"):
595             o = self.pkg.changes.get(i, "")
596             if o != "":
597                 del self.pkg.changes[i]
598
599             self.pkg.changes[i] = {}
600
601             for j in o.split():
602                 self.pkg.changes[i][j] = 1
603
604         # Fix the Maintainer: field to be RFC822/2047 compatible
605         try:
606             (self.pkg.changes["maintainer822"],
607              self.pkg.changes["maintainer2047"],
608              self.pkg.changes["maintainername"],
609              self.pkg.changes["maintaineremail"]) = \
610                    fix_maintainer (self.pkg.changes["maintainer"])
611         except ParseMaintError, msg:
612             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
613                    % (filename, self.pkg.changes["maintainer"], msg))
614
615         # ...likewise for the Changed-By: field if it exists.
616         try:
617             (self.pkg.changes["changedby822"],
618              self.pkg.changes["changedby2047"],
619              self.pkg.changes["changedbyname"],
620              self.pkg.changes["changedbyemail"]) = \
621                    fix_maintainer (self.pkg.changes.get("changed-by", ""))
622         except ParseMaintError, msg:
623             self.pkg.changes["changedby822"] = ""
624             self.pkg.changes["changedby2047"] = ""
625             self.pkg.changes["changedbyname"] = ""
626             self.pkg.changes["changedbyemail"] = ""
627
628             self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
629                    % (filename, self.pkg.changes["changed-by"], msg))
630
631         # Ensure all the values in Closes: are numbers
632         if self.pkg.changes.has_key("closes"):
633             for i in self.pkg.changes["closes"].keys():
634                 if re_isanum.match (i) == None:
635                     self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
636
637         # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
638         self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
639         self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
640
641         # Check the .changes is non-empty
642         if not self.pkg.files:
643             self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
644             return False
645
646         # Changes was syntactically valid even if we'll reject
647         return True
648
649     ###########################################################################
650
651     def check_distributions(self):
652         "Check and map the Distribution field"
653
654         Cnf = Config()
655
656         # Handle suite mappings
657         for m in Cnf.ValueList("SuiteMappings"):
658             args = m.split()
659             mtype = args[0]
660             if mtype == "map" or mtype == "silent-map":
661                 (source, dest) = args[1:3]
662                 if self.pkg.changes["distribution"].has_key(source):
663                     del self.pkg.changes["distribution"][source]
664                     self.pkg.changes["distribution"][dest] = 1
665                     if mtype != "silent-map":
666                         self.notes.append("Mapping %s to %s." % (source, dest))
667                 if self.pkg.changes.has_key("distribution-version"):
668                     if self.pkg.changes["distribution-version"].has_key(source):
669                         self.pkg.changes["distribution-version"][source]=dest
670             elif mtype == "map-unreleased":
671                 (source, dest) = args[1:3]
672                 if self.pkg.changes["distribution"].has_key(source):
673                     for arch in self.pkg.changes["architecture"].keys():
674                         if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
675                             self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
676                             del self.pkg.changes["distribution"][source]
677                             self.pkg.changes["distribution"][dest] = 1
678                             break
679             elif mtype == "ignore":
680                 suite = args[1]
681                 if self.pkg.changes["distribution"].has_key(suite):
682                     del self.pkg.changes["distribution"][suite]
683                     self.warnings.append("Ignoring %s as a target suite." % (suite))
684             elif mtype == "reject":
685                 suite = args[1]
686                 if self.pkg.changes["distribution"].has_key(suite):
687                     self.rejects.append("Uploads to %s are not accepted." % (suite))
688             elif mtype == "propup-version":
689                 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
690                 #
691                 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
692                 if self.pkg.changes["distribution"].has_key(args[1]):
693                     self.pkg.changes.setdefault("distribution-version", {})
694                     for suite in args[2:]:
695                         self.pkg.changes["distribution-version"][suite] = suite
696
697         # Ensure there is (still) a target distribution
698         if len(self.pkg.changes["distribution"].keys()) < 1:
699             self.rejects.append("No valid distribution remaining.")
700
701         # Ensure target distributions exist
702         for suite in self.pkg.changes["distribution"].keys():
703             if not get_suite(suite.lower()):
704                 self.rejects.append("Unknown distribution `%s'." % (suite))
705
706     ###########################################################################
707
708     def binary_file_checks(self, f, session):
709         cnf = Config()
710         entry = self.pkg.files[f]
711
712         # Extract package control information
713         deb_file = utils.open_file(f)
714         try:
715             control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
716         except:
717             self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
718             deb_file.close()
719             # Can't continue, none of the checks on control would work.
720             return
721
722         # Check for mandantory "Description:"
723         deb_file.seek(0)
724         try:
725             apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
726         except:
727             self.rejects.append("%s: Missing Description in binary package" % (f))
728             return
729
730         deb_file.close()
731
732         # Check for mandatory fields
733         for field in [ "Package", "Architecture", "Version" ]:
734             if control.Find(field) == None:
735                 # Can't continue
736                 self.rejects.append("%s: No %s field in control." % (f, field))
737                 return
738
739         # Ensure the package name matches the one give in the .changes
740         if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
741             self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
742
743         # Validate the package field
744         package = control.Find("Package")
745         if not re_valid_pkg_name.match(package):
746             self.rejects.append("%s: invalid package name '%s'." % (f, package))
747
748         # Validate the version field
749         version = control.Find("Version")
750         if not re_valid_version.match(version):
751             self.rejects.append("%s: invalid version number '%s'." % (f, version))
752
753         # Ensure the architecture of the .deb is one we know about.
754         default_suite = cnf.get("Dinstall::DefaultSuite", "unstable")
755         architecture = control.Find("Architecture")
756         upload_suite = self.pkg.changes["distribution"].keys()[0]
757
758         if      architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
759             and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
760             self.rejects.append("Unknown architecture '%s'." % (architecture))
761
762         # Ensure the architecture of the .deb is one of the ones
763         # listed in the .changes.
764         if not self.pkg.changes["architecture"].has_key(architecture):
765             self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
766
767         # Sanity-check the Depends field
768         depends = control.Find("Depends")
769         if depends == '':
770             self.rejects.append("%s: Depends field is empty." % (f))
771
772         # Sanity-check the Provides field
773         provides = control.Find("Provides")
774         if provides:
775             provide = re_spacestrip.sub('', provides)
776             if provide == '':
777                 self.rejects.append("%s: Provides field is empty." % (f))
778             prov_list = provide.split(",")
779             for prov in prov_list:
780                 if not re_valid_pkg_name.match(prov):
781                     self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
782
783         # If there is a Built-Using field, we need to check we can find the
784         # exact source version
785         built_using = control.Find("Built-Using")
786         if built_using:
787             try:
788                 entry["built-using"] = []
789                 for dep in apt_pkg.parse_depends(built_using):
790                     bu_s, bu_v, bu_e = dep[0]
791                     # Check that it's an exact match dependency and we have
792                     # some form of version
793                     if bu_e != "=" or len(bu_v) < 1:
794                         self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
795                     else:
796                         # Find the source id for this version
797                         bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
798                         if len(bu_so) != 1:
799                             self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
800                         else:
801                             entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
802
803             except ValueError, e:
804                 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
805
806
807         # Check the section & priority match those given in the .changes (non-fatal)
808         if     control.Find("Section") and entry["section"] != "" \
809            and entry["section"] != control.Find("Section"):
810             self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
811                                 (f, control.Find("Section", ""), entry["section"]))
812         if control.Find("Priority") and entry["priority"] != "" \
813            and entry["priority"] != control.Find("Priority"):
814             self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
815                                 (f, control.Find("Priority", ""), entry["priority"]))
816
817         entry["package"] = package
818         entry["architecture"] = architecture
819         entry["version"] = version
820         entry["maintainer"] = control.Find("Maintainer", "")
821
822         if f.endswith(".udeb"):
823             self.pkg.files[f]["dbtype"] = "udeb"
824         elif f.endswith(".deb"):
825             self.pkg.files[f]["dbtype"] = "deb"
826         else:
827             self.rejects.append("%s is neither a .deb or a .udeb." % (f))
828
829         entry["source"] = control.Find("Source", entry["package"])
830
831         # Get the source version
832         source = entry["source"]
833         source_version = ""
834
835         if source.find("(") != -1:
836             m = re_extract_src_version.match(source)
837             source = m.group(1)
838             source_version = m.group(2)
839
840         if not source_version:
841             source_version = self.pkg.files[f]["version"]
842
843         entry["source package"] = source
844         entry["source version"] = source_version
845
846         # Ensure the filename matches the contents of the .deb
847         m = re_isadeb.match(f)
848
849         #  package name
850         file_package = m.group(1)
851         if entry["package"] != file_package:
852             self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
853                                 (f, file_package, entry["dbtype"], entry["package"]))
854         epochless_version = re_no_epoch.sub('', control.Find("Version"))
855
856         #  version
857         file_version = m.group(2)
858         if epochless_version != file_version:
859             self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
860                                 (f, file_version, entry["dbtype"], epochless_version))
861
862         #  architecture
863         file_architecture = m.group(3)
864         if entry["architecture"] != file_architecture:
865             self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
866                                 (f, file_architecture, entry["dbtype"], entry["architecture"]))
867
868         # Check for existent source
869         source_version = entry["source version"]
870         source_package = entry["source package"]
871         if self.pkg.changes["architecture"].has_key("source"):
872             if source_version != self.pkg.changes["version"]:
873                 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
874                                     (source_version, f, self.pkg.changes["version"]))
875         else:
876             # Check in the SQL database
877             if not source_exists(source_package, source_version, suites = \
878                 self.pkg.changes["distribution"].keys(), session = session):
879                 # Check in one of the other directories
880                 source_epochless_version = re_no_epoch.sub('', source_version)
881                 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
882
883                 byhand_dir = get_policy_queue('byhand', session).path
884                 new_dir = get_policy_queue('new', session).path
885
886                 if os.path.exists(os.path.join(byhand_dir, dsc_filename)):
887                     entry["byhand"] = 1
888                 elif os.path.exists(os.path.join(new_dir, dsc_filename)):
889                     entry["new"] = 1
890                 else:
891                     dsc_file_exists = False
892                     # TODO: Don't hardcode this list: use all relevant queues
893                     #       The question is how to determine what is relevant
894                     for queue_name in ["embargoed", "unembargoed", "proposedupdates", "oldproposedupdates"]:
895                         queue = get_policy_queue(queue_name, session)
896                         if queue:
897                             if os.path.exists(os.path.join(queue.path, dsc_filename)):
898                                 dsc_file_exists = True
899                                 break
900
901                     if not dsc_file_exists:
902                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
903
904         # Check the version and for file overwrites
905         self.check_binary_against_db(f, session)
906
907     def source_file_checks(self, f, session):
908         entry = self.pkg.files[f]
909
910         m = re_issource.match(f)
911         if not m:
912             return
913
914         entry["package"] = m.group(1)
915         entry["version"] = m.group(2)
916         entry["type"] = m.group(3)
917
918         # Ensure the source package name matches the Source filed in the .changes
919         if self.pkg.changes["source"] != entry["package"]:
920             self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
921
922         # Ensure the source version matches the version in the .changes file
923         if re_is_orig_source.match(f):
924             changes_version = self.pkg.changes["chopversion2"]
925         else:
926             changes_version = self.pkg.changes["chopversion"]
927
928         if changes_version != entry["version"]:
929             self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
930
931         # Ensure the .changes lists source in the Architecture field
932         if not self.pkg.changes["architecture"].has_key("source"):
933             self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
934
935         # Check the signature of a .dsc file
936         if entry["type"] == "dsc":
937             # check_signature returns either:
938             #  (None, [list, of, rejects]) or (signature, [])
939             (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
940             for j in rejects:
941                 self.rejects.append(j)
942
943         entry["architecture"] = "source"
944
945     def per_suite_file_checks(self, f, suite, session):
946         cnf = Config()
947         entry = self.pkg.files[f]
948
949         # Skip byhand
950         if entry.has_key("byhand"):
951             return
952
953         # Check we have fields we need to do these checks
954         oktogo = True
955         for m in ['component', 'package', 'priority', 'size', 'md5sum']:
956             if not entry.has_key(m):
957                 self.rejects.append("file '%s' does not have field %s set" % (f, m))
958                 oktogo = False
959
960         if not oktogo:
961             return
962
963         # Handle component mappings
964         for m in cnf.ValueList("ComponentMappings"):
965             (source, dest) = m.split()
966             if entry["component"] == source:
967                 entry["original component"] = source
968                 entry["component"] = dest
969
970         # Ensure the component is valid for the target suite
971         if entry["component"] not in get_component_names(session):
972             self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
973             return
974
975         # Validate the component
976         if not get_component(entry["component"], session):
977             self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
978             return
979
980         # See if the package is NEW
981         if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
982             entry["new"] = 1
983
984         # Validate the priority
985         if entry["priority"].find('/') != -1:
986             self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
987
988         # Determine the location
989         location = cnf["Dir::Pool"]
990         l = get_location(location, entry["component"], session=session)
991         if l is None:
992             self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
993             entry["location id"] = -1
994         else:
995             entry["location id"] = l.location_id
996
997         # Check the md5sum & size against existing files (if any)
998         entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
999
1000         found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
1001                                          entry["size"], entry["md5sum"], entry["location id"])
1002
1003         if found is None:
1004             self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1005         elif found is False and poolfile is not None:
1006             self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1007         else:
1008             if poolfile is None:
1009                 entry["files id"] = None
1010             else:
1011                 entry["files id"] = poolfile.file_id
1012
1013         # Check for packages that have moved from one component to another
1014         entry['suite'] = suite
1015         arch_list = [entry["architecture"], 'all']
1016         component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1017             [suite], arch_list = arch_list, session = session)
1018         if component is not None:
1019             entry["othercomponents"] = component
1020
1021     def check_files(self, action=True):
1022         file_keys = self.pkg.files.keys()
1023         holding = Holding()
1024         cnf = Config()
1025
1026         if action:
1027             cwd = os.getcwd()
1028             os.chdir(self.pkg.directory)
1029             for f in file_keys:
1030                 ret = holding.copy_to_holding(f)
1031                 if ret is not None:
1032                     self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1033
1034             os.chdir(cwd)
1035
1036         # check we already know the changes file
1037         # [NB: this check must be done post-suite mapping]
1038         base_filename = os.path.basename(self.pkg.changes_file)
1039
1040         session = DBConn().session()
1041
1042         try:
1043             dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1044             # if in the pool or in a queue other than unchecked, reject
1045             if (dbc.in_queue is None) \
1046                    or (dbc.in_queue is not None
1047                        and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1048                 self.rejects.append("%s file already known to dak" % base_filename)
1049         except NoResultFound, e:
1050             # not known, good
1051             pass
1052
1053         has_binaries = False
1054         has_source = False
1055
1056         for f, entry in self.pkg.files.items():
1057             # Ensure the file does not already exist in one of the accepted directories
1058             for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1059                 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1060                 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1061                     self.rejects.append("%s file already exists in the %s directory." % (f, d))
1062
1063             if not re_taint_free.match(f):
1064                 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1065
1066             # Check the file is readable
1067             if os.access(f, os.R_OK) == 0:
1068                 # When running in -n, copy_to_holding() won't have
1069                 # generated the reject_message, so we need to.
1070                 if action:
1071                     if os.path.exists(f):
1072                         self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1073                     else:
1074                         # Don't directly reject, mark to check later to deal with orig's
1075                         # we can find in the pool
1076                         self.later_check_files.append(f)
1077                 entry["type"] = "unreadable"
1078                 continue
1079
1080             # If it's byhand skip remaining checks
1081             if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1082                 entry["byhand"] = 1
1083                 entry["type"] = "byhand"
1084
1085             # Checks for a binary package...
1086             elif re_isadeb.match(f):
1087                 has_binaries = True
1088                 entry["type"] = "deb"
1089
1090                 # This routine appends to self.rejects/warnings as appropriate
1091                 self.binary_file_checks(f, session)
1092
1093             # Checks for a source package...
1094             elif re_issource.match(f):
1095                 has_source = True
1096
1097                 # This routine appends to self.rejects/warnings as appropriate
1098                 self.source_file_checks(f, session)
1099
1100             # Not a binary or source package?  Assume byhand...
1101             else:
1102                 entry["byhand"] = 1
1103                 entry["type"] = "byhand"
1104
1105             # Per-suite file checks
1106             entry["oldfiles"] = {}
1107             for suite in self.pkg.changes["distribution"].keys():
1108                 self.per_suite_file_checks(f, suite, session)
1109
1110         session.close()
1111
1112         # If the .changes file says it has source, it must have source.
1113         if self.pkg.changes["architecture"].has_key("source"):
1114             if not has_source:
1115                 self.rejects.append("no source found and Architecture line in changes mention source.")
1116
1117             if (not has_binaries) and (not cnf.FindB("Dinstall::AllowSourceOnlyUploads")):
1118                 self.rejects.append("source only uploads are not supported.")
1119
1120     ###########################################################################
1121
1122     def __dsc_filename(self):
1123         """
1124         Returns: (Status, Dsc_Filename)
1125         where
1126           Status: Boolean; True when there was no error, False otherwise
1127           Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1128         """
1129         dsc_filename = None
1130
1131         # find the dsc
1132         for name, entry in self.pkg.files.items():
1133             if entry.has_key("type") and entry["type"] == "dsc":
1134                 if dsc_filename:
1135                     return False, "cannot process a .changes file with multiple .dsc's."
1136                 else:
1137                     dsc_filename = name
1138
1139         if not dsc_filename:
1140             return False, "source uploads must contain a dsc file"
1141
1142         return True, dsc_filename
1143
1144     def load_dsc(self, action=True, signing_rules=1):
1145         """
1146         Find and load the dsc from self.pkg.files into self.dsc
1147
1148         Returns: (Status, Reason)
1149         where
1150           Status: Boolean; True when there was no error, False otherwise
1151           Reason: String; When Status is False this describes the error
1152         """
1153
1154         # find the dsc
1155         (status, dsc_filename) = self.__dsc_filename()
1156         if not status:
1157             # If status is false, dsc_filename has the reason
1158             return False, dsc_filename
1159
1160         try:
1161             self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1162         except CantOpenError:
1163             if not action:
1164                 return False, "%s: can't read file." % (dsc_filename)
1165         except ParseChangesError, line:
1166             return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1167         except InvalidDscError, line:
1168             return False, "%s: syntax error on line %s." % (dsc_filename, line)
1169         except ChangesUnicodeError:
1170             return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1171
1172         return True, None
1173
1174     ###########################################################################
1175
1176     def check_dsc(self, action=True, session=None):
1177         """Returns bool indicating whether or not the source changes are valid"""
1178         # Ensure there is source to check
1179         if not self.pkg.changes["architecture"].has_key("source"):
1180             return True
1181
1182         (status, reason) = self.load_dsc(action=action)
1183         if not status:
1184             self.rejects.append(reason)
1185             return False
1186         (status, dsc_filename) = self.__dsc_filename()
1187         if not status:
1188             # If status is false, dsc_filename has the reason
1189             self.rejects.append(dsc_filename)
1190             return False
1191
1192         # Build up the file list of files mentioned by the .dsc
1193         try:
1194             self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1195         except NoFilesFieldError:
1196             self.rejects.append("%s: no Files: field." % (dsc_filename))
1197             return False
1198         except UnknownFormatError, format:
1199             self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1200             return False
1201         except ParseChangesError, line:
1202             self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1203             return False
1204
1205         # Enforce mandatory fields
1206         for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1207             if not self.pkg.dsc.has_key(i):
1208                 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1209                 return False
1210
1211         # Validate the source and version fields
1212         if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1213             self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1214         if not re_valid_version.match(self.pkg.dsc["version"]):
1215             self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1216
1217         # Only a limited list of source formats are allowed in each suite
1218         for dist in self.pkg.changes["distribution"].keys():
1219             allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1220             if self.pkg.dsc["format"] not in allowed:
1221                 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1222
1223         # Validate the Maintainer field
1224         try:
1225             # We ignore the return value
1226             fix_maintainer(self.pkg.dsc["maintainer"])
1227         except ParseMaintError, msg:
1228             self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1229                                  % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1230
1231         # Validate the build-depends field(s)
1232         for field_name in [ "build-depends", "build-depends-indep" ]:
1233             field = self.pkg.dsc.get(field_name)
1234             if field:
1235                 # Have apt try to parse them...
1236                 try:
1237                     apt_pkg.ParseSrcDepends(field)
1238                 except:
1239                     self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1240
1241         # Ensure the version number in the .dsc matches the version number in the .changes
1242         epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1243         changes_version = self.pkg.files[dsc_filename]["version"]
1244
1245         if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1246             self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1247
1248         # Ensure the Files field contain only what's expected
1249         self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1250
1251         # Ensure source is newer than existing source in target suites
1252         session = DBConn().session()
1253         self.check_source_against_db(dsc_filename, session)
1254         self.check_dsc_against_db(dsc_filename, session)
1255
1256         dbchg = get_dbchange(self.pkg.changes_file, session)
1257
1258         # Finally, check if we're missing any files
1259         for f in self.later_check_files:
1260             print 'XXX: %s' % f
1261             # Check if we've already processed this file if we have a dbchg object
1262             ok = False
1263             if dbchg:
1264                 for pf in dbchg.files:
1265                     if pf.filename == f and pf.processed:
1266                         self.notes.append('%s was already processed so we can go ahead' % f)
1267                         ok = True
1268                         del self.pkg.files[f]
1269             if not ok:
1270                 self.rejects.append("Could not find file %s references in changes" % f)
1271
1272         session.close()
1273
1274         return (len(self.rejects) == 0)
1275
1276     ###########################################################################
1277
1278     def get_changelog_versions(self, source_dir):
1279         """Extracts a the source package and (optionally) grabs the
1280         version history out of debian/changelog for the BTS."""
1281
1282         cnf = Config()
1283
1284         # Find the .dsc (again)
1285         dsc_filename = None
1286         for f in self.pkg.files.keys():
1287             if self.pkg.files[f]["type"] == "dsc":
1288                 dsc_filename = f
1289
1290         # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1291         if not dsc_filename:
1292             return
1293
1294         # Create a symlink mirror of the source files in our temporary directory
1295         for f in self.pkg.files.keys():
1296             m = re_issource.match(f)
1297             if m:
1298                 src = os.path.join(source_dir, f)
1299                 # If a file is missing for whatever reason, give up.
1300                 if not os.path.exists(src):
1301                     return
1302                 ftype = m.group(3)
1303                 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1304                    self.pkg.orig_files[f].has_key("path"):
1305                     continue
1306                 dest = os.path.join(os.getcwd(), f)
1307                 os.symlink(src, dest)
1308
1309         # If the orig files are not a part of the upload, create symlinks to the
1310         # existing copies.
1311         for orig_file in self.pkg.orig_files.keys():
1312             if not self.pkg.orig_files[orig_file].has_key("path"):
1313                 continue
1314             dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1315             os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1316
1317         # Extract the source
1318         try:
1319             unpacked = UnpackedSource(dsc_filename)
1320         except:
1321             self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1322             return
1323
1324         if not cnf.Find("Dir::BTSVersionTrack"):
1325             return
1326
1327         # Get the upstream version
1328         upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1329         if re_strip_revision.search(upstr_version):
1330             upstr_version = re_strip_revision.sub('', upstr_version)
1331
1332         # Ensure the changelog file exists
1333         changelog_file = unpacked.get_changelog_file()
1334         if changelog_file is None:
1335             self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1336             return
1337
1338         # Parse the changelog
1339         self.pkg.dsc["bts changelog"] = ""
1340         for line in changelog_file.readlines():
1341             m = re_changelog_versions.match(line)
1342             if m:
1343                 self.pkg.dsc["bts changelog"] += line
1344         changelog_file.close()
1345         unpacked.cleanup()
1346
1347         # Check we found at least one revision in the changelog
1348         if not self.pkg.dsc["bts changelog"]:
1349             self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1350
1351     def check_source(self):
1352         # Bail out if:
1353         #    a) there's no source
1354         if not self.pkg.changes["architecture"].has_key("source"):
1355             return
1356
1357         tmpdir = utils.temp_dirname()
1358
1359         # Move into the temporary directory
1360         cwd = os.getcwd()
1361         os.chdir(tmpdir)
1362
1363         # Get the changelog version history
1364         self.get_changelog_versions(cwd)
1365
1366         # Move back and cleanup the temporary tree
1367         os.chdir(cwd)
1368
1369         try:
1370             shutil.rmtree(tmpdir)
1371         except OSError, e:
1372             if e.errno != errno.EACCES:
1373                 print "foobar"
1374                 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1375
1376             self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1377             # We probably have u-r or u-w directories so chmod everything
1378             # and try again.
1379             cmd = "chmod -R u+rwx %s" % (tmpdir)
1380             result = os.system(cmd)
1381             if result != 0:
1382                 utils.fubar("'%s' failed with result %s." % (cmd, result))
1383             shutil.rmtree(tmpdir)
1384         except Exception, e:
1385             print "foobar2 (%s)" % e
1386             utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1387
1388     ###########################################################################
1389     def ensure_hashes(self):
1390         # Make sure we recognise the format of the Files: field in the .changes
1391         format = self.pkg.changes.get("format", "0.0").split(".", 1)
1392         if len(format) == 2:
1393             format = int(format[0]), int(format[1])
1394         else:
1395             format = int(float(format[0])), 0
1396
1397         # We need to deal with the original changes blob, as the fields we need
1398         # might not be in the changes dict serialised into the .dak anymore.
1399         orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1400
1401         # Copy the checksums over to the current changes dict.  This will keep
1402         # the existing modifications to it intact.
1403         for field in orig_changes:
1404             if field.startswith('checksums-'):
1405                 self.pkg.changes[field] = orig_changes[field]
1406
1407         # Check for unsupported hashes
1408         for j in utils.check_hash_fields(".changes", self.pkg.changes):
1409             self.rejects.append(j)
1410
1411         for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1412             self.rejects.append(j)
1413
1414         # We have to calculate the hash if we have an earlier changes version than
1415         # the hash appears in rather than require it exist in the changes file
1416         for hashname, hashfunc, version in utils.known_hashes:
1417             # TODO: Move _ensure_changes_hash into this class
1418             for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1419                 self.rejects.append(j)
1420             if "source" in self.pkg.changes["architecture"]:
1421                 # TODO: Move _ensure_dsc_hash into this class
1422                 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1423                     self.rejects.append(j)
1424
1425     def check_hashes(self):
1426         for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1427             self.rejects.append(m)
1428
1429         for m in utils.check_size(".changes", self.pkg.files):
1430             self.rejects.append(m)
1431
1432         for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1433             self.rejects.append(m)
1434
1435         for m in utils.check_size(".dsc", self.pkg.dsc_files):
1436             self.rejects.append(m)
1437
1438         self.ensure_hashes()
1439
1440     ###########################################################################
1441
1442     def ensure_orig(self, target_dir='.', session=None):
1443         """
1444         Ensures that all orig files mentioned in the changes file are present
1445         in target_dir. If they do not exist, they are symlinked into place.
1446
1447         An list containing the symlinks that were created are returned (so they
1448         can be removed).
1449         """
1450
1451         symlinked = []
1452         cnf = Config()
1453
1454         for filename, entry in self.pkg.dsc_files.iteritems():
1455             if not re_is_orig_source.match(filename):
1456                 # File is not an orig; ignore
1457                 continue
1458
1459             if os.path.exists(filename):
1460                 # File exists, no need to continue
1461                 continue
1462
1463             def symlink_if_valid(path):
1464                 f = utils.open_file(path)
1465                 md5sum = apt_pkg.md5sum(f)
1466                 f.close()
1467
1468                 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1469                 expected = (int(entry['size']), entry['md5sum'])
1470
1471                 if fingerprint != expected:
1472                     return False
1473
1474                 dest = os.path.join(target_dir, filename)
1475
1476                 os.symlink(path, dest)
1477                 symlinked.append(dest)
1478
1479                 return True
1480
1481             session_ = session
1482             if session is None:
1483                 session_ = DBConn().session()
1484
1485             found = False
1486
1487             # Look in the pool
1488             for poolfile in get_poolfile_like_name('%s' % filename, session_):
1489                 poolfile_path = os.path.join(
1490                     poolfile.location.path, poolfile.filename
1491                 )
1492
1493                 if symlink_if_valid(poolfile_path):
1494                     found = True
1495                     break
1496
1497             if session is None:
1498                 session_.close()
1499
1500             if found:
1501                 continue
1502
1503             # Look in some other queues for the file
1504             queues = ('New', 'Byhand', 'ProposedUpdates',
1505                 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1506
1507             for queue in queues:
1508                 if not cnf.get('Dir::Queue::%s' % queue):
1509                     continue
1510
1511                 queuefile_path = os.path.join(
1512                     cnf['Dir::Queue::%s' % queue], filename
1513                 )
1514
1515                 if not os.path.exists(queuefile_path):
1516                     # Does not exist in this queue
1517                     continue
1518
1519                 if symlink_if_valid(queuefile_path):
1520                     break
1521
1522         return symlinked
1523
1524     ###########################################################################
1525
1526     def check_lintian(self):
1527         """
1528         Extends self.rejects by checking the output of lintian against tags
1529         specified in Dinstall::LintianTags.
1530         """
1531
1532         cnf = Config()
1533
1534         # Don't reject binary uploads
1535         if not self.pkg.changes['architecture'].has_key('source'):
1536             return
1537
1538         # Only check some distributions
1539         for dist in ('unstable', 'experimental'):
1540             if dist in self.pkg.changes['distribution']:
1541                 break
1542         else:
1543             return
1544
1545         # If we do not have a tagfile, don't do anything
1546         tagfile = cnf.get("Dinstall::LintianTags")
1547         if not tagfile:
1548             return
1549
1550         # Parse the yaml file
1551         sourcefile = file(tagfile, 'r')
1552         sourcecontent = sourcefile.read()
1553         sourcefile.close()
1554
1555         try:
1556             lintiantags = yaml.load(sourcecontent)['lintian']
1557         except yaml.YAMLError, msg:
1558             utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1559             return
1560
1561         # Try and find all orig mentioned in the .dsc
1562         symlinked = self.ensure_orig()
1563
1564         # Setup the input file for lintian
1565         fd, temp_filename = utils.temp_filename()
1566         temptagfile = os.fdopen(fd, 'w')
1567         for tags in lintiantags.values():
1568             temptagfile.writelines(['%s\n' % x for x in tags])
1569         temptagfile.close()
1570
1571         try:
1572             cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1573                 (temp_filename, self.pkg.changes_file)
1574
1575             result, output = commands.getstatusoutput(cmd)
1576         finally:
1577             # Remove our tempfile and any symlinks we created
1578             os.unlink(temp_filename)
1579
1580             for symlink in symlinked:
1581                 os.unlink(symlink)
1582
1583         if result == 2:
1584             utils.warn("lintian failed for %s [return code: %s]." % \
1585                 (self.pkg.changes_file, result))
1586             utils.warn(utils.prefix_multi_line_string(output, \
1587                 " [possible output:] "))
1588
1589         def log(*txt):
1590             if self.logger:
1591                 self.logger.log(
1592                     [self.pkg.changes_file, "check_lintian"] + list(txt)
1593                 )
1594
1595         # Generate messages
1596         parsed_tags = parse_lintian_output(output)
1597         self.rejects.extend(
1598             generate_reject_messages(parsed_tags, lintiantags, log=log)
1599         )
1600
1601     ###########################################################################
1602     def check_urgency(self):
1603         cnf = Config()
1604         if self.pkg.changes["architecture"].has_key("source"):
1605             if not self.pkg.changes.has_key("urgency"):
1606                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1607             self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1608             if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1609                 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1610                                      (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1611                 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1612
1613     ###########################################################################
1614
1615     # Sanity check the time stamps of files inside debs.
1616     # [Files in the near future cause ugly warnings and extreme time
1617     #  travel can cause errors on extraction]
1618
1619     def check_timestamps(self):
1620         Cnf = Config()
1621
1622         future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1623         past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1624         tar = TarTime(future_cutoff, past_cutoff)
1625
1626         for filename, entry in self.pkg.files.items():
1627             if entry["type"] == "deb":
1628                 tar.reset()
1629                 try:
1630                     deb = apt_inst.DebFile(filename)
1631                     deb.control.go(tar.callback)
1632
1633                     future_files = tar.future_files.keys()
1634                     if future_files:
1635                         num_future_files = len(future_files)
1636                         future_file = future_files[0]
1637                         future_date = tar.future_files[future_file]
1638                         self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1639                                % (filename, num_future_files, future_file, time.ctime(future_date)))
1640
1641                     ancient_files = tar.ancient_files.keys()
1642                     if ancient_files:
1643                         num_ancient_files = len(ancient_files)
1644                         ancient_file = ancient_files[0]
1645                         ancient_date = tar.ancient_files[ancient_file]
1646                         self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1647                                % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1648                 except:
1649                     self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1650
1651     def check_if_upload_is_sponsored(self, uid_email, uid_name):
1652         if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1653             sponsored = False
1654         elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1655             sponsored = False
1656             if uid_name == "":
1657                 sponsored = True
1658         else:
1659             sponsored = True
1660             if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1661                 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1662                 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1663                     self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1664                         self.pkg.changes["sponsoremail"] = uid_email
1665
1666         return sponsored
1667
1668
1669     ###########################################################################
1670     # check_signed_by_key checks
1671     ###########################################################################
1672
1673     def check_signed_by_key(self):
1674         """Ensure the .changes is signed by an authorized uploader."""
1675         session = DBConn().session()
1676
1677         # First of all we check that the person has proper upload permissions
1678         # and that this upload isn't blocked
1679         fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1680
1681         if fpr is None:
1682             self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1683             return
1684
1685         # TODO: Check that import-keyring adds UIDs properly
1686         if not fpr.uid:
1687             self.rejects.append("Cannot find uid for fingerprint %s.  Please contact ftpmaster@debian.org" % fpr.fingerprint)
1688             return
1689
1690         # Check that the fingerprint which uploaded has permission to do so
1691         self.check_upload_permissions(fpr, session)
1692
1693         # Check that this package is not in a transition
1694         self.check_transition(session)
1695
1696         session.close()
1697
1698
1699     def check_upload_permissions(self, fpr, session):
1700         # Check any one-off upload blocks
1701         self.check_upload_blocks(fpr, session)
1702
1703         # If the source_acl is None, source is never allowed
1704         if fpr.source_acl is None:
1705             if self.pkg.changes["architecture"].has_key("source"):
1706                 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1707                 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1708                 self.rejects.append(rej)
1709                 return
1710         # Do DM as a special case
1711         # DM is a special case unfortunately, so we check it first
1712         # (keys with no source access get more access than DMs in one
1713         #  way; DMs can only upload for their packages whether source
1714         #  or binary, whereas keys with no access might be able to
1715         #  upload some binaries)
1716         elif fpr.source_acl.access_level == 'dm':
1717             self.check_dm_upload(fpr, session)
1718         else:
1719             # If not a DM, we allow full upload rights
1720             uid_email = "%s@debian.org" % (fpr.uid.uid)
1721             self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1722
1723
1724         # Check binary upload permissions
1725         # By this point we know that DMs can't have got here unless they
1726         # are allowed to deal with the package concerned so just apply
1727         # normal checks
1728         if fpr.binary_acl.access_level == 'full':
1729             return
1730
1731         # Otherwise we're in the map case
1732         tmparches = self.pkg.changes["architecture"].copy()
1733         tmparches.pop('source', None)
1734
1735         for bam in fpr.binary_acl_map:
1736             tmparches.pop(bam.architecture.arch_string, None)
1737
1738         if len(tmparches.keys()) > 0:
1739             if fpr.binary_reject:
1740                 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1741                 if len(tmparches.keys()) == 1:
1742                     rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1743                 else:
1744                     rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1745                 self.rejects.append(rej)
1746             else:
1747                 # TODO: This is where we'll implement reject vs throw away binaries later
1748                 rej = "Uhm.  I'm meant to throw away the binaries now but that's not implemented yet"
1749                 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1750                 rej += "\nFingerprint: %s", (fpr.fingerprint)
1751                 self.rejects.append(rej)
1752
1753
1754     def check_upload_blocks(self, fpr, session):
1755         """Check whether any upload blocks apply to this source, source
1756            version, uid / fpr combination"""
1757
1758         def block_rej_template(fb):
1759             rej = 'Manual upload block in place for package %s' % fb.source
1760             if fb.version is not None:
1761                 rej += ', version %s' % fb.version
1762             return rej
1763
1764         for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1765             # version is None if the block applies to all versions
1766             if fb.version is None or fb.version == self.pkg.changes['version']:
1767                 # Check both fpr and uid - either is enough to cause a reject
1768                 if fb.fpr is not None:
1769                     if fb.fpr.fingerprint == fpr.fingerprint:
1770                         self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1771                 if fb.uid is not None:
1772                     if fb.uid == fpr.uid:
1773                         self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1774
1775
1776     def check_dm_upload(self, fpr, session):
1777         # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1778         ## none of the uploaded packages are NEW
1779         rej = False
1780         for f in self.pkg.files.keys():
1781             if self.pkg.files[f].has_key("byhand"):
1782                 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1783                 rej = True
1784             if self.pkg.files[f].has_key("new"):
1785                 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1786                 rej = True
1787
1788         if rej:
1789             return
1790
1791         r = get_newest_source(self.pkg.changes["source"], session)
1792
1793         if r is None:
1794             rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1795             self.rejects.append(rej)
1796             return
1797
1798         if not r.dm_upload_allowed:
1799             rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1800             self.rejects.append(rej)
1801             return
1802
1803         ## the Maintainer: field of the uploaded .changes file corresponds with
1804         ## the owner of the key used (ie, non-developer maintainers may not sponsor
1805         ## uploads)
1806         if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1807             self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1808
1809         ## the most recent version of the package uploaded to unstable or
1810         ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1811         ## non-developer maintainers cannot NMU or hijack packages)
1812
1813         # uploader includes the maintainer
1814         accept = False
1815         for uploader in r.uploaders:
1816             (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1817             # Eww - I hope we never have two people with the same name in Debian
1818             if email == fpr.uid.uid or name == fpr.uid.name:
1819                 accept = True
1820                 break
1821
1822         if not accept:
1823             self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1824             return
1825
1826         ## none of the packages are being taken over from other source packages
1827         for b in self.pkg.changes["binary"].keys():
1828             for suite in self.pkg.changes["distribution"].keys():
1829                 for s in get_source_by_package_and_suite(b, suite, session):
1830                     if s.source != self.pkg.changes["source"]:
1831                         self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1832
1833
1834
1835     def check_transition(self, session):
1836         cnf = Config()
1837
1838         sourcepkg = self.pkg.changes["source"]
1839
1840         # No sourceful upload -> no need to do anything else, direct return
1841         # We also work with unstable uploads, not experimental or those going to some
1842         # proposed-updates queue
1843         if "source" not in self.pkg.changes["architecture"] or \
1844            "unstable" not in self.pkg.changes["distribution"]:
1845             return
1846
1847         # Also only check if there is a file defined (and existant) with
1848         # checks.
1849         transpath = cnf.get("Dinstall::ReleaseTransitions", "")
1850         if transpath == "" or not os.path.exists(transpath):
1851             return
1852
1853         # Parse the yaml file
1854         sourcefile = file(transpath, 'r')
1855         sourcecontent = sourcefile.read()
1856         try:
1857             transitions = yaml.load(sourcecontent)
1858         except yaml.YAMLError, msg:
1859             # This shouldn't happen, there is a wrapper to edit the file which
1860             # checks it, but we prefer to be safe than ending up rejecting
1861             # everything.
1862             utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1863             return
1864
1865         # Now look through all defined transitions
1866         for trans in transitions:
1867             t = transitions[trans]
1868             source = t["source"]
1869             expected = t["new"]
1870
1871             # Will be None if nothing is in testing.
1872             current = get_source_in_suite(source, "testing", session)
1873             if current is not None:
1874                 compare = apt_pkg.VersionCompare(current.version, expected)
1875
1876             if current is None or compare < 0:
1877                 # This is still valid, the current version in testing is older than
1878                 # the new version we wait for, or there is none in testing yet
1879
1880                 # Check if the source we look at is affected by this.
1881                 if sourcepkg in t['packages']:
1882                     # The source is affected, lets reject it.
1883
1884                     rejectmsg = "%s: part of the %s transition.\n\n" % (
1885                         sourcepkg, trans)
1886
1887                     if current is not None:
1888                         currentlymsg = "at version %s" % (current.version)
1889                     else:
1890                         currentlymsg = "not present in testing"
1891
1892                     rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1893
1894                     rejectmsg += "\n".join(textwrap.wrap("""Your package
1895 is part of a testing transition designed to get %s migrated (it is
1896 currently %s, we need version %s).  This transition is managed by the
1897 Release Team, and %s is the Release-Team member responsible for it.
1898 Please mail debian-release@lists.debian.org or contact %s directly if you
1899 need further assistance.  You might want to upload to experimental until this
1900 transition is done."""
1901                             % (source, currentlymsg, expected,t["rm"], t["rm"])))
1902
1903                     self.rejects.append(rejectmsg)
1904                     return
1905
1906     ###########################################################################
1907     # End check_signed_by_key checks
1908     ###########################################################################
1909
1910     def build_summaries(self):
1911         """ Build a summary of changes the upload introduces. """
1912
1913         (byhand, new, summary, override_summary) = self.pkg.file_summary()
1914
1915         short_summary = summary
1916
1917         # This is for direport's benefit...
1918         f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1919
1920         if byhand or new:
1921             summary += "Changes: " + f
1922
1923         summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1924
1925         summary += self.announce(short_summary, 0)
1926
1927         return (summary, short_summary)
1928
1929     ###########################################################################
1930
1931     def close_bugs(self, summary, action):
1932         """
1933         Send mail to close bugs as instructed by the closes field in the changes file.
1934         Also add a line to summary if any work was done.
1935
1936         @type summary: string
1937         @param summary: summary text, as given by L{build_summaries}
1938
1939         @type action: bool
1940         @param action: Set to false no real action will be done.
1941
1942         @rtype: string
1943         @return: summary. If action was taken, extended by the list of closed bugs.
1944
1945         """
1946
1947         template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1948
1949         bugs = self.pkg.changes["closes"].keys()
1950
1951         if not bugs:
1952             return summary
1953
1954         bugs.sort()
1955         summary += "Closing bugs: "
1956         for bug in bugs:
1957             summary += "%s " % (bug)
1958             if action:
1959                 self.update_subst()
1960                 self.Subst["__BUG_NUMBER__"] = bug
1961                 if self.pkg.changes["distribution"].has_key("stable"):
1962                     self.Subst["__STABLE_WARNING__"] = """
1963 Note that this package is not part of the released stable Debian
1964 distribution.  It may have dependencies on other unreleased software,
1965 or other instabilities.  Please take care if you wish to install it.
1966 The update will eventually make its way into the next released Debian
1967 distribution."""
1968                 else:
1969                     self.Subst["__STABLE_WARNING__"] = ""
1970                 mail_message = utils.TemplateSubst(self.Subst, template)
1971                 utils.send_mail(mail_message)
1972
1973                 # Clear up after ourselves
1974                 del self.Subst["__BUG_NUMBER__"]
1975                 del self.Subst["__STABLE_WARNING__"]
1976
1977         if action and self.logger:
1978             self.logger.log(["closing bugs"] + bugs)
1979
1980         summary += "\n"
1981
1982         return summary
1983
1984     ###########################################################################
1985
1986     def announce(self, short_summary, action):
1987         """
1988         Send an announce mail about a new upload.
1989
1990         @type short_summary: string
1991         @param short_summary: Short summary text to include in the mail
1992
1993         @type action: bool
1994         @param action: Set to false no real action will be done.
1995
1996         @rtype: string
1997         @return: Textstring about action taken.
1998
1999         """
2000
2001         cnf = Config()
2002         announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2003
2004         # Only do announcements for source uploads with a recent dpkg-dev installed
2005         if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2006            self.pkg.changes["architecture"].has_key("source"):
2007             return ""
2008
2009         lists_done = {}
2010         summary = ""
2011
2012         self.Subst["__SHORT_SUMMARY__"] = short_summary
2013
2014         for dist in self.pkg.changes["distribution"].keys():
2015             suite = get_suite(dist)
2016             if suite is None: continue
2017             announce_list = suite.announce
2018             if announce_list == "" or lists_done.has_key(announce_list):
2019                 continue
2020
2021             lists_done[announce_list] = 1
2022             summary += "Announcing to %s\n" % (announce_list)
2023
2024             if action:
2025                 self.update_subst()
2026                 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2027                 if cnf.get("Dinstall::TrackingServer") and \
2028                    self.pkg.changes["architecture"].has_key("source"):
2029                     trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2030                     self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2031
2032                 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2033                 utils.send_mail(mail_message)
2034
2035                 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2036
2037         if cnf.FindB("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"):
2038             summary = self.close_bugs(summary, action)
2039
2040         del self.Subst["__SHORT_SUMMARY__"]
2041
2042         return summary
2043
2044     ###########################################################################
2045     @session_wrapper
2046     def accept (self, summary, short_summary, session=None):
2047         """
2048         Accept an upload.
2049
2050         This moves all files referenced from the .changes into the pool,
2051         sends the accepted mail, announces to lists, closes bugs and
2052         also checks for override disparities. If enabled it will write out
2053         the version history for the BTS Version Tracking and will finally call
2054         L{queue_build}.
2055
2056         @type summary: string
2057         @param summary: Summary text
2058
2059         @type short_summary: string
2060         @param short_summary: Short summary
2061         """
2062
2063         cnf = Config()
2064         stats = SummaryStats()
2065
2066         print "Installing."
2067         self.logger.log(["installing changes", self.pkg.changes_file])
2068
2069         binaries = []
2070         poolfiles = []
2071
2072         # Add the .dsc file to the DB first
2073         for newfile, entry in self.pkg.files.items():
2074             if entry["type"] == "dsc":
2075                 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2076                 for j in pfs:
2077                     poolfiles.append(j)
2078
2079         # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2080         for newfile, entry in self.pkg.files.items():
2081             if entry["type"] == "deb":
2082                 b, pf = add_deb_to_db(self, newfile, session)
2083                 binaries.append(b)
2084                 poolfiles.append(pf)
2085
2086         # If this is a sourceful diff only upload that is moving
2087         # cross-component we need to copy the .orig files into the new
2088         # component too for the same reasons as above.
2089         # XXX: mhy: I think this should be in add_dsc_to_db
2090         if self.pkg.changes["architecture"].has_key("source"):
2091             for orig_file in self.pkg.orig_files.keys():
2092                 if not self.pkg.orig_files[orig_file].has_key("id"):
2093                     continue # Skip if it's not in the pool
2094                 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2095                 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2096                     continue # Skip if the location didn't change
2097
2098                 # Do the move
2099                 oldf = get_poolfile_by_id(orig_file_id, session)
2100                 old_filename = os.path.join(oldf.location.path, oldf.filename)
2101                 old_dat = {'size': oldf.filesize,   'md5sum': oldf.md5sum,
2102                            'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2103
2104                 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2105
2106                 # TODO: Care about size/md5sum collisions etc
2107                 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2108
2109                 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2110                 if newf is None:
2111                     utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2112                     newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2113
2114                     session.flush()
2115
2116                     # Don't reference the old file from this changes
2117                     for p in poolfiles:
2118                         if p.file_id == oldf.file_id:
2119                             poolfiles.remove(p)
2120
2121                     poolfiles.append(newf)
2122
2123                     # Fix up the DSC references
2124                     toremove = []
2125
2126                     for df in source.srcfiles:
2127                         if df.poolfile.file_id == oldf.file_id:
2128                             # Add a new DSC entry and mark the old one for deletion
2129                             # Don't do it in the loop so we don't change the thing we're iterating over
2130                             newdscf = DSCFile()
2131                             newdscf.source_id = source.source_id
2132                             newdscf.poolfile_id = newf.file_id
2133                             session.add(newdscf)
2134
2135                             toremove.append(df)
2136
2137                     for df in toremove:
2138                         session.delete(df)
2139
2140                     # Flush our changes
2141                     session.flush()
2142
2143                     # Make sure that our source object is up-to-date
2144                     session.expire(source)
2145
2146         # Add changelog information to the database
2147         self.store_changelog()
2148
2149         # Install the files into the pool
2150         for newfile, entry in self.pkg.files.items():
2151             destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2152             utils.move(newfile, destination)
2153             self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2154             stats.accept_bytes += float(entry["size"])
2155
2156         # Copy the .changes file across for suite which need it.
2157         copy_changes = dict([(x.copychanges, '')
2158                              for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2159                              if x.copychanges is not None])
2160
2161         for dest in copy_changes.keys():
2162             utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2163
2164         # We're done - commit the database changes
2165         session.commit()
2166         # Our SQL session will automatically start a new transaction after
2167         # the last commit
2168
2169         # Now ensure that the metadata has been added
2170         # This has to be done after we copy the files into the pool
2171         # For source if we have it:
2172         if self.pkg.changes["architecture"].has_key("source"):
2173             import_metadata_into_db(source, session)
2174
2175         # Now for any of our binaries
2176         for b in binaries:
2177             import_metadata_into_db(b, session)
2178
2179         session.commit()
2180
2181         # Move the .changes into the 'done' directory
2182         ye, mo, da = time.gmtime()[0:3]
2183         donedir = os.path.join(cnf["Dir::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2184         if not os.path.isdir(donedir):
2185             os.makedirs(donedir)
2186
2187         utils.move(self.pkg.changes_file,
2188                    os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2189
2190         if self.pkg.changes["architecture"].has_key("source"):
2191             UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2192
2193         self.update_subst()
2194         self.Subst["__SUMMARY__"] = summary
2195         mail_message = utils.TemplateSubst(self.Subst,
2196                                            os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2197         utils.send_mail(mail_message)
2198         self.announce(short_summary, 1)
2199
2200         ## Helper stuff for DebBugs Version Tracking
2201         if cnf.Find("Dir::BTSVersionTrack"):
2202             if self.pkg.changes["architecture"].has_key("source"):
2203                 (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2204                 version_history = os.fdopen(fd, 'w')
2205                 version_history.write(self.pkg.dsc["bts changelog"])
2206                 version_history.close()
2207                 filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2208                                       self.pkg.changes_file[:-8]+".versions")
2209                 os.rename(temp_filename, filename)
2210                 os.chmod(filename, 0644)
2211
2212             # Write out the binary -> source mapping.
2213             (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2214             debinfo = os.fdopen(fd, 'w')
2215             for name, entry in sorted(self.pkg.files.items()):
2216                 if entry["type"] == "deb":
2217                     line = " ".join([entry["package"], entry["version"],
2218                                      entry["architecture"], entry["source package"],
2219                                      entry["source version"]])
2220                     debinfo.write(line+"\n")
2221             debinfo.close()
2222             filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2223                                   self.pkg.changes_file[:-8]+".debinfo")
2224             os.rename(temp_filename, filename)
2225             os.chmod(filename, 0644)
2226
2227         session.commit()
2228
2229         # Set up our copy queues (e.g. buildd queues)
2230         for suite_name in self.pkg.changes["distribution"].keys():
2231             suite = get_suite(suite_name, session)
2232             for q in suite.copy_queues:
2233                 for f in poolfiles:
2234                     q.add_file_from_pool(f)
2235
2236         session.commit()
2237
2238         # Finally...
2239         stats.accept_count += 1
2240
2241     def check_override(self):
2242         """
2243         Checks override entries for validity. Mails "Override disparity" warnings,
2244         if that feature is enabled.
2245
2246         Abandons the check if
2247           - override disparity checks are disabled
2248           - mail sending is disabled
2249         """
2250
2251         cnf = Config()
2252
2253         # Abandon the check if override disparity checks have been disabled
2254         if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2255             return
2256
2257         summary = self.pkg.check_override()
2258
2259         if summary == "":
2260             return
2261
2262         overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2263
2264         self.update_subst()
2265         self.Subst["__SUMMARY__"] = summary
2266         mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2267         utils.send_mail(mail_message)
2268         del self.Subst["__SUMMARY__"]
2269
2270     ###########################################################################
2271
2272     def remove(self, from_dir=None):
2273         """
2274         Used (for instance) in p-u to remove the package from unchecked
2275
2276         Also removes the package from holding area.
2277         """
2278         if from_dir is None:
2279             from_dir = self.pkg.directory
2280         h = Holding()
2281
2282         for f in self.pkg.files.keys():
2283             os.unlink(os.path.join(from_dir, f))
2284             if os.path.exists(os.path.join(h.holding_dir, f)):
2285                 os.unlink(os.path.join(h.holding_dir, f))
2286
2287         os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2288         if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2289             os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2290
2291     ###########################################################################
2292
2293     def move_to_queue (self, queue):
2294         """
2295         Move files to a destination queue using the permissions in the table
2296         """
2297         h = Holding()
2298         utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2299                    queue.path, perms=int(queue.change_perms, 8))
2300         for f in self.pkg.files.keys():
2301             utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2302
2303     ###########################################################################
2304
2305     def force_reject(self, reject_files):
2306         """
2307         Forcefully move files from the current directory to the
2308         reject directory.  If any file already exists in the reject
2309         directory it will be moved to the morgue to make way for
2310         the new file.
2311
2312         @type reject_files: dict
2313         @param reject_files: file dictionary
2314
2315         """
2316
2317         cnf = Config()
2318
2319         for file_entry in reject_files:
2320             # Skip any files which don't exist or which we don't have permission to copy.
2321             if os.access(file_entry, os.R_OK) == 0:
2322                 continue
2323
2324             dest_file = os.path.join(cnf["Dir::Reject"], file_entry)
2325
2326             try:
2327                 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2328             except OSError, e:
2329                 # File exists?  Let's find a new name by adding a number
2330                 if e.errno == errno.EEXIST:
2331                     try:
2332                         dest_file = utils.find_next_free(dest_file, 255)
2333                     except NoFreeFilenameError:
2334                         # Something's either gone badly Pete Tong, or
2335                         # someone is trying to exploit us.
2336                         utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Reject"]))
2337                         return
2338
2339                     # Make sure we really got it
2340                     try:
2341                         dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2342                     except OSError, e:
2343                         # Likewise
2344                         utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2345                         return
2346                 else:
2347                     raise
2348             # If we got here, we own the destination file, so we can
2349             # safely overwrite it.
2350             utils.move(file_entry, dest_file, 1, perms=0660)
2351             os.close(dest_fd)
2352
2353     ###########################################################################
2354     def do_reject (self, manual=0, reject_message="", notes=""):
2355         """
2356         Reject an upload. If called without a reject message or C{manual} is
2357         true, spawn an editor so the user can write one.
2358
2359         @type manual: bool
2360         @param manual: manual or automated rejection
2361
2362         @type reject_message: string
2363         @param reject_message: A reject message
2364
2365         @return: 0
2366
2367         """
2368         # If we weren't given a manual rejection message, spawn an
2369         # editor so the user can add one in...
2370         if manual and not reject_message:
2371             (fd, temp_filename) = utils.temp_filename()
2372             temp_file = os.fdopen(fd, 'w')
2373             if len(notes) > 0:
2374                 for note in notes:
2375                     temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2376                                     % (note.author, note.version, note.notedate, note.comment))
2377             temp_file.close()
2378             editor = os.environ.get("EDITOR","vi")
2379             answer = 'E'
2380             while answer == 'E':
2381                 os.system("%s %s" % (editor, temp_filename))
2382                 temp_fh = utils.open_file(temp_filename)
2383                 reject_message = "".join(temp_fh.readlines())
2384                 temp_fh.close()
2385                 print "Reject message:"
2386                 print utils.prefix_multi_line_string(reject_message,"  ",include_blank_lines=1)
2387                 prompt = "[R]eject, Edit, Abandon, Quit ?"
2388                 answer = "XXX"
2389                 while prompt.find(answer) == -1:
2390                     answer = utils.our_raw_input(prompt)
2391                     m = re_default_answer.search(prompt)
2392                     if answer == "":
2393                         answer = m.group(1)
2394                     answer = answer[:1].upper()
2395             os.unlink(temp_filename)
2396             if answer == 'A':
2397                 return 1
2398             elif answer == 'Q':
2399                 sys.exit(0)
2400
2401         print "Rejecting.\n"
2402
2403         cnf = Config()
2404
2405         reason_filename = self.pkg.changes_file[:-8] + ".reason"
2406         reason_filename = os.path.join(cnf["Dir::Reject"], reason_filename)
2407
2408         # Move all the files into the reject directory
2409         reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2410         self.force_reject(reject_files)
2411
2412         # If we fail here someone is probably trying to exploit the race
2413         # so let's just raise an exception ...
2414         if os.path.exists(reason_filename):
2415             os.unlink(reason_filename)
2416         reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2417
2418         rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2419
2420         self.update_subst()
2421         if not manual:
2422             self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2423             self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2424             self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2425             os.write(reason_fd, reject_message)
2426             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2427         else:
2428             # Build up the rejection email
2429             user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2430             self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2431             self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2432             self.Subst["__REJECT_MESSAGE__"] = ""
2433             self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2434             reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2435             # Write the rejection email out as the <foo>.reason file
2436             os.write(reason_fd, reject_mail_message)
2437
2438         del self.Subst["__REJECTOR_ADDRESS__"]
2439         del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2440         del self.Subst["__CC__"]
2441
2442         os.close(reason_fd)
2443
2444         # Send the rejection mail
2445         utils.send_mail(reject_mail_message)
2446
2447         if self.logger:
2448             self.logger.log(["rejected", self.pkg.changes_file])
2449
2450         stats = SummaryStats()
2451         stats.reject_count += 1
2452         return 0
2453
2454     ################################################################################
2455     def in_override_p(self, package, component, suite, binary_type, filename, session):
2456         """
2457         Check if a package already has override entries in the DB
2458
2459         @type package: string
2460         @param package: package name
2461
2462         @type component: string
2463         @param component: database id of the component
2464
2465         @type suite: int
2466         @param suite: database id of the suite
2467
2468         @type binary_type: string
2469         @param binary_type: type of the package
2470
2471         @type filename: string
2472         @param filename: filename we check
2473
2474         @return: the database result. But noone cares anyway.
2475
2476         """
2477
2478         cnf = Config()
2479
2480         if binary_type == "": # must be source
2481             file_type = "dsc"
2482         else:
2483             file_type = binary_type
2484
2485         # Override suite name; used for example with proposed-updates
2486         oldsuite = get_suite(suite, session)
2487         if (not oldsuite is None) and oldsuite.overridesuite:
2488             suite = oldsuite.overridesuite
2489
2490         result = get_override(package, suite, component, file_type, session)
2491
2492         # If checking for a source package fall back on the binary override type
2493         if file_type == "dsc" and len(result) < 1:
2494             result = get_override(package, suite, component, ['deb', 'udeb'], session)
2495
2496         # Remember the section and priority so we can check them later if appropriate
2497         if len(result) > 0:
2498             result = result[0]
2499             self.pkg.files[filename]["override section"] = result.section.section
2500             self.pkg.files[filename]["override priority"] = result.priority.priority
2501             return result
2502
2503         return None
2504
2505     ################################################################################
2506     def get_anyversion(self, sv_list, suite):
2507         """
2508         @type sv_list: list
2509         @param sv_list: list of (suite, version) tuples to check
2510
2511         @type suite: string
2512         @param suite: suite name
2513
2514         Description: TODO
2515         """
2516         Cnf = Config()
2517         anyversion = None
2518         anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2519         for (s, v) in sv_list:
2520             if s in [ x.lower() for x in anysuite ]:
2521                 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2522                     anyversion = v
2523
2524         return anyversion
2525
2526     ################################################################################
2527
2528     def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2529         """
2530         @type sv_list: list
2531         @param sv_list: list of (suite, version) tuples to check
2532
2533         @type filename: string
2534         @param filename: XXX
2535
2536         @type new_version: string
2537         @param new_version: XXX
2538
2539         Ensure versions are newer than existing packages in target
2540         suites and that cross-suite version checking rules as
2541         set out in the conf file are satisfied.
2542         """
2543
2544         cnf = Config()
2545
2546         # Check versions for each target suite
2547         for target_suite in self.pkg.changes["distribution"].keys():
2548             # Check we can find the target suite
2549             ts = get_suite(target_suite)
2550             if ts is None:
2551                 self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite)
2552                 continue
2553
2554             must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2555             must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2556
2557             # Enforce "must be newer than target suite" even if conffile omits it
2558             if target_suite not in must_be_newer_than:
2559                 must_be_newer_than.append(target_suite)
2560
2561             for (suite, existent_version) in sv_list:
2562                 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2563
2564                 if suite in must_be_newer_than and sourceful and vercmp < 1:
2565                     self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2566
2567                 if suite in must_be_older_than and vercmp > -1:
2568                     cansave = 0
2569
2570                     if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2571                         # we really use the other suite, ignoring the conflicting one ...
2572                         addsuite = self.pkg.changes["distribution-version"][suite]
2573
2574                         add_version = self.get_anyversion(sv_list, addsuite)
2575                         target_version = self.get_anyversion(sv_list, target_suite)
2576
2577                         if not add_version:
2578                             # not add_version can only happen if we map to a suite
2579                             # that doesn't enhance the suite we're propup'ing from.
2580                             # so "propup-ver x a b c; map a d" is a problem only if
2581                             # d doesn't enhance a.
2582                             #
2583                             # i think we could always propagate in this case, rather
2584                             # than complaining. either way, this isn't a REJECT issue
2585                             #
2586                             # And - we really should complain to the dorks who configured dak
2587                             self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2588                             self.pkg.changes.setdefault("propdistribution", {})
2589                             self.pkg.changes["propdistribution"][addsuite] = 1
2590                             cansave = 1
2591                         elif not target_version:
2592                             # not targets_version is true when the package is NEW
2593                             # we could just stick with the "...old version..." REJECT
2594                             # for this, I think.
2595                             self.rejects.append("Won't propogate NEW packages.")
2596                         elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2597                             # propogation would be redundant. no need to reject though.
2598                             self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2599                             cansave = 1
2600                         elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2601                              apt_pkg.VersionCompare(add_version, target_version) >= 0:
2602                             # propogate!!
2603                             self.warnings.append("Propogating upload to %s" % (addsuite))
2604                             self.pkg.changes.setdefault("propdistribution", {})
2605                             self.pkg.changes["propdistribution"][addsuite] = 1
2606                             cansave = 1
2607
2608                     if not cansave:
2609                         self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2610
2611     ################################################################################
2612     def check_binary_against_db(self, filename, session):
2613         # Ensure version is sane
2614         self.cross_suite_version_check( \
2615             get_suite_version_by_package(self.pkg.files[filename]["package"], \
2616                 self.pkg.files[filename]["architecture"], session),
2617             filename, self.pkg.files[filename]["version"], sourceful=False)
2618
2619         # Check for any existing copies of the file
2620         q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2621         q = q.filter_by(version=self.pkg.files[filename]["version"])
2622         q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2623
2624         if q.count() > 0:
2625             self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2626
2627     ################################################################################
2628
2629     def check_source_against_db(self, filename, session):
2630         source = self.pkg.dsc.get("source")
2631         version = self.pkg.dsc.get("version")
2632
2633         # Ensure version is sane
2634         self.cross_suite_version_check( \
2635             get_suite_version_by_source(source, session), filename, version,
2636             sourceful=True)
2637
2638     ################################################################################
2639     def check_dsc_against_db(self, filename, session):
2640         """
2641
2642         @warning: NB: this function can remove entries from the 'files' index [if
2643          the orig tarball is a duplicate of the one in the archive]; if
2644          you're iterating over 'files' and call this function as part of
2645          the loop, be sure to add a check to the top of the loop to
2646          ensure you haven't just tried to dereference the deleted entry.
2647
2648         """
2649
2650         Cnf = Config()
2651         self.pkg.orig_files = {} # XXX: do we need to clear it?
2652         orig_files = self.pkg.orig_files
2653
2654         # Try and find all files mentioned in the .dsc.  This has
2655         # to work harder to cope with the multiple possible
2656         # locations of an .orig.tar.gz.
2657         # The ordering on the select is needed to pick the newest orig
2658         # when it exists in multiple places.
2659         for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2660             found = None
2661             if self.pkg.files.has_key(dsc_name):
2662                 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2663                 actual_size = int(self.pkg.files[dsc_name]["size"])
2664                 found = "%s in incoming" % (dsc_name)
2665
2666                 # Check the file does not already exist in the archive
2667                 ql = get_poolfile_like_name(dsc_name, session)
2668
2669                 # Strip out anything that isn't '%s' or '/%s$'
2670                 for i in ql:
2671                     if not i.filename.endswith(dsc_name):
2672                         ql.remove(i)
2673
2674                 # "[dak] has not broken them.  [dak] has fixed a
2675                 # brokenness.  Your crappy hack exploited a bug in
2676                 # the old dinstall.
2677                 #
2678                 # "(Come on!  I thought it was always obvious that
2679                 # one just doesn't release different files with
2680                 # the same name and version.)"
2681                 #                        -- ajk@ on d-devel@l.d.o
2682
2683                 if len(ql) > 0:
2684                     # Ignore exact matches for .orig.tar.gz
2685                     match = 0
2686                     if re_is_orig_source.match(dsc_name):
2687                         for i in ql:
2688                             if self.pkg.files.has_key(dsc_name) and \
2689                                int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2690                                self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2691                                 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2692                                 # TODO: Don't delete the entry, just mark it as not needed
2693                                 # This would fix the stupidity of changing something we often iterate over
2694                                 # whilst we're doing it
2695                                 del self.pkg.files[dsc_name]
2696                                 dsc_entry["files id"] = i.file_id
2697                                 if not orig_files.has_key(dsc_name):
2698                                     orig_files[dsc_name] = {}
2699                                 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2700                                 match = 1
2701
2702                                 # Don't bitch that we couldn't find this file later
2703                                 try:
2704                                     self.later_check_files.remove(dsc_name)
2705                                 except ValueError:
2706                                     pass
2707
2708
2709                     if not match:
2710                         self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2711
2712             elif re_is_orig_source.match(dsc_name):
2713                 # Check in the pool
2714                 ql = get_poolfile_like_name(dsc_name, session)
2715
2716                 # Strip out anything that isn't '%s' or '/%s$'
2717                 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2718                 for i in ql:
2719                     if not i.filename.endswith(dsc_name):
2720                         ql.remove(i)
2721
2722                 if len(ql) > 0:
2723                     # Unfortunately, we may get more than one match here if,
2724                     # for example, the package was in potato but had an -sa
2725                     # upload in woody.  So we need to choose the right one.
2726
2727                     # default to something sane in case we don't match any or have only one
2728                     x = ql[0]
2729
2730                     if len(ql) > 1:
2731                         for i in ql:
2732                             old_file = os.path.join(i.location.path, i.filename)
2733                             old_file_fh = utils.open_file(old_file)
2734                             actual_md5 = apt_pkg.md5sum(old_file_fh)
2735                             old_file_fh.close()
2736                             actual_size = os.stat(old_file)[stat.ST_SIZE]
2737                             if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2738                                 x = i
2739
2740                     old_file = os.path.join(i.location.path, i.filename)
2741                     old_file_fh = utils.open_file(old_file)
2742                     actual_md5 = apt_pkg.md5sum(old_file_fh)
2743                     old_file_fh.close()
2744                     actual_size = os.stat(old_file)[stat.ST_SIZE]
2745                     found = old_file
2746                     suite_type = x.location.archive_type
2747                     # need this for updating dsc_files in install()
2748                     dsc_entry["files id"] = x.file_id
2749                     # See install() in process-accepted...
2750                     if not orig_files.has_key(dsc_name):
2751                         orig_files[dsc_name] = {}
2752                     orig_files[dsc_name]["id"] = x.file_id
2753                     orig_files[dsc_name]["path"] = old_file
2754                     orig_files[dsc_name]["location"] = x.location.location_id
2755                 else:
2756                     # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2757                     # Not there? Check the queue directories...
2758                     for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2759                         if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2760                             continue
2761                         in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2762                         if os.path.exists(in_otherdir):
2763                             in_otherdir_fh = utils.open_file(in_otherdir)
2764                             actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2765                             in_otherdir_fh.close()
2766                             actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2767                             found = in_otherdir
2768                             if not orig_files.has_key(dsc_name):
2769                                 orig_files[dsc_name] = {}
2770                             orig_files[dsc_name]["path"] = in_otherdir
2771
2772                     if not found:
2773                         self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2774                         continue
2775             else:
2776                 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2777                 continue
2778             if actual_md5 != dsc_entry["md5sum"]:
2779                 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2780             if actual_size != int(dsc_entry["size"]):
2781                 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2782
2783     ################################################################################
2784     # This is used by process-new and process-holding to recheck a changes file
2785     # at the time we're running.  It mainly wraps various other internal functions
2786     # and is similar to accepted_checks - these should probably be tidied up
2787     # and combined
2788     def recheck(self, session):
2789         cnf = Config()
2790         for f in self.pkg.files.keys():
2791             # The .orig.tar.gz can disappear out from under us is it's a
2792             # duplicate of one in the archive.
2793             if not self.pkg.files.has_key(f):
2794                 continue
2795
2796             entry = self.pkg.files[f]
2797
2798             # Check that the source still exists
2799             if entry["type"] == "deb":
2800                 source_version = entry["source version"]
2801                 source_package = entry["source package"]
2802                 if not self.pkg.changes["architecture"].has_key("source") \
2803                    and not source_exists(source_package, source_version, \
2804                     suites = self.pkg.changes["distribution"].keys(), session = session):
2805                     source_epochless_version = re_no_epoch.sub('', source_version)
2806                     dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2807                     found = False
2808                     for q in ["Embargoed", "Unembargoed", "Newstage"]:
2809                         if cnf.has_key("Dir::Queue::%s" % (q)):
2810                             if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2811                                 found = True
2812                     if not found:
2813                         self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2814
2815             # Version and file overwrite checks
2816             if entry["type"] == "deb":
2817                 self.check_binary_against_db(f, session)
2818             elif entry["type"] == "dsc":
2819                 self.check_source_against_db(f, session)
2820                 self.check_dsc_against_db(f, session)
2821
2822     ################################################################################
2823     def accepted_checks(self, overwrite_checks, session):
2824         # Recheck anything that relies on the database; since that's not
2825         # frozen between accept and our run time when called from p-a.
2826
2827         # overwrite_checks is set to False when installing to stable/oldstable
2828
2829         propogate={}
2830         nopropogate={}
2831
2832         # Find the .dsc (again)
2833         dsc_filename = None
2834         for f in self.pkg.files.keys():
2835             if self.pkg.files[f]["type"] == "dsc":
2836                 dsc_filename = f
2837
2838         for checkfile in self.pkg.files.keys():
2839             # The .orig.tar.gz can disappear out from under us is it's a
2840             # duplicate of one in the archive.
2841             if not self.pkg.files.has_key(checkfile):
2842                 continue
2843
2844             entry = self.pkg.files[checkfile]
2845
2846             # Check that the source still exists
2847             if entry["type"] == "deb":
2848                 source_version = entry["source version"]
2849                 source_package = entry["source package"]
2850                 if not self.pkg.changes["architecture"].has_key("source") \
2851                    and not source_exists(source_package, source_version, \
2852                     suites = self.pkg.changes["distribution"].keys(), \
2853                     session = session):
2854                     self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2855
2856             # Version and file overwrite checks
2857             if overwrite_checks:
2858                 if entry["type"] == "deb":
2859                     self.check_binary_against_db(checkfile, session)
2860                 elif entry["type"] == "dsc":
2861                     self.check_source_against_db(checkfile, session)
2862                     self.check_dsc_against_db(dsc_filename, session)
2863
2864             # propogate in the case it is in the override tables:
2865             for suite in self.pkg.changes.get("propdistribution", {}).keys():
2866                 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2867                     propogate[suite] = 1
2868                 else:
2869                     nopropogate[suite] = 1
2870
2871         for suite in propogate.keys():
2872             if suite in nopropogate:
2873                 continue
2874             self.pkg.changes["distribution"][suite] = 1
2875
2876         for checkfile in self.pkg.files.keys():
2877             # Check the package is still in the override tables
2878             for suite in self.pkg.changes["distribution"].keys():
2879                 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2880                     self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2881
2882     ################################################################################
2883     # If any file of an upload has a recent mtime then chances are good
2884     # the file is still being uploaded.
2885
2886     def upload_too_new(self):
2887         cnf = Config()
2888         too_new = False
2889         # Move back to the original directory to get accurate time stamps
2890         cwd = os.getcwd()
2891         os.chdir(self.pkg.directory)
2892         file_list = self.pkg.files.keys()
2893         file_list.extend(self.pkg.dsc_files.keys())
2894         file_list.append(self.pkg.changes_file)
2895         for f in file_list:
2896             try:
2897                 last_modified = time.time()-os.path.getmtime(f)
2898                 if last_modified < int(cnf["Dinstall::SkipTime"]):
2899                     too_new = True
2900                     break
2901             except:
2902                 pass
2903
2904         os.chdir(cwd)
2905         return too_new
2906
2907     def store_changelog(self):
2908
2909         # Skip binary-only upload if it is not a bin-NMU
2910         if not self.pkg.changes['architecture'].has_key('source'):
2911             from daklib.regexes import re_bin_only_nmu
2912             if not re_bin_only_nmu.search(self.pkg.changes['version']):
2913                 return
2914
2915         session = DBConn().session()
2916
2917         # Check if upload already has a changelog entry
2918         query = """SELECT changelog_id FROM changes WHERE source = :source
2919                    AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2920         if session.execute(query, {'source': self.pkg.changes['source'], \
2921                                    'version': self.pkg.changes['version'], \
2922                                    'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2923             session.commit()
2924             return
2925
2926         # Add current changelog text into changelogs_text table, return created ID
2927         query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2928         ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2929
2930         # Link ID to the upload available in changes table
2931         query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2932                    AND version = :version AND architecture = :architecture"""
2933         session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2934                                 'version': self.pkg.changes['version'], \
2935                                 'architecture': " ".join(self.pkg.changes['architecture'].keys())})
2936
2937         session.commit()