5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
82 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
84 # Validate the override type
85 type_id = get_override_type(file_type, session)
87 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
91 ################################################################################
93 # Determine what parts in a .changes are NEW
95 def determine_new(changes, files, warn=1, session = None):
97 Determine what parts in a C{changes} file are NEW.
99 @type changes: Upload.Pkg.changes dict
100 @param changes: Changes dictionary
102 @type files: Upload.Pkg.files dict
103 @param files: Files dictionary
106 @param warn: Warn if overrides are added for (old)stable
109 @return: dictionary of NEW components.
114 # Build up a list of potentially new things
115 for name, f in files.items():
116 # Skip byhand elements
117 # if f["type"] == "byhand":
120 priority = f["priority"]
121 section = f["section"]
122 file_type = get_type(f, session)
123 component = f["component"]
125 if file_type == "dsc":
128 if not new.has_key(pkg):
130 new[pkg]["priority"] = priority
131 new[pkg]["section"] = section
132 new[pkg]["type"] = file_type
133 new[pkg]["component"] = component
134 new[pkg]["files"] = []
136 old_type = new[pkg]["type"]
137 if old_type != file_type:
138 # source gets trumped by deb or udeb
139 if old_type == "dsc":
140 new[pkg]["priority"] = priority
141 new[pkg]["section"] = section
142 new[pkg]["type"] = file_type
143 new[pkg]["component"] = component
145 new[pkg]["files"].append(name)
147 if f.has_key("othercomponents"):
148 new[pkg]["othercomponents"] = f["othercomponents"]
150 # Fix up the list of target suites
152 for suite in changes["suite"].keys():
153 override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
155 (olderr, newerr) = (get_suite(suite, session) == None,
156 get_suite(override, session) == None)
158 (oinv, newinv) = ("", "")
159 if olderr: oinv = "invalid "
160 if newerr: ninv = "invalid "
161 print "warning: overriding %ssuite %s to %ssuite %s" % (
162 oinv, suite, ninv, override)
163 del changes["suite"][suite]
164 changes["suite"][override] = 1
166 for suite in changes["suite"].keys():
167 for pkg in new.keys():
168 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
170 for file_entry in new[pkg]["files"]:
171 if files[file_entry].has_key("new"):
172 del files[file_entry]["new"]
176 for s in ['stable', 'oldstable']:
177 if changes["suite"].has_key(s):
178 print "WARNING: overrides will be added for %s!" % s
179 for pkg in new.keys():
180 if new[pkg].has_key("othercomponents"):
181 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
185 ################################################################################
187 def check_valid(new, session = None):
189 Check if section and priority for NEW packages exist in database.
190 Additionally does sanity checks:
191 - debian-installer packages have to be udeb (or source)
192 - non debian-installer packages can not be udeb
193 - source priority can only be assigned to dsc file types
196 @param new: Dict of new packages with their section, priority and type.
199 for pkg in new.keys():
200 section_name = new[pkg]["section"]
201 priority_name = new[pkg]["priority"]
202 file_type = new[pkg]["type"]
204 section = get_section(section_name, session)
206 new[pkg]["section id"] = -1
208 new[pkg]["section id"] = section.section_id
210 priority = get_priority(priority_name, session)
212 new[pkg]["priority id"] = -1
214 new[pkg]["priority id"] = priority.priority_id
217 di = section_name.find("debian-installer") != -1
219 # If d-i, we must be udeb and vice-versa
220 if (di and file_type not in ("udeb", "dsc")) or \
221 (not di and file_type == "udeb"):
222 new[pkg]["section id"] = -1
224 # If dsc we need to be source and vice-versa
225 if (priority == "source" and file_type != "dsc") or \
226 (priority != "source" and file_type == "dsc"):
227 new[pkg]["priority id"] = -1
229 ###############################################################################
231 # Used by Upload.check_timestamps
232 class TarTime(object):
233 def __init__(self, future_cutoff, past_cutoff):
235 self.future_cutoff = future_cutoff
236 self.past_cutoff = past_cutoff
239 self.future_files = {}
240 self.ancient_files = {}
242 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
243 if MTime > self.future_cutoff:
244 self.future_files[Name] = MTime
245 if MTime < self.past_cutoff:
246 self.ancient_files[Name] = MTime
248 ###############################################################################
250 class Upload(object):
252 Everything that has to do with an upload processed.
260 ###########################################################################
263 """ Reset a number of internal variables."""
265 # Initialize the substitution template map
268 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
269 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
270 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
271 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
277 self.later_check_files = []
281 def package_info(self):
283 Format various messages from this Upload to send to the maintainer.
287 ('Reject Reasons', self.rejects),
288 ('Warnings', self.warnings),
289 ('Notes', self.notes),
293 for title, messages in msgs:
295 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
300 ###########################################################################
301 def update_subst(self):
302 """ Set up the per-package template substitution mappings """
306 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
307 if not self.pkg.changes.has_key("architecture") or not \
308 isinstance(self.pkg.changes["architecture"], dict):
309 self.pkg.changes["architecture"] = { "Unknown" : "" }
311 # and maintainer2047 may not exist.
312 if not self.pkg.changes.has_key("maintainer2047"):
313 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
315 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
316 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
317 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
319 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
320 if self.pkg.changes["architecture"].has_key("source") and \
321 self.pkg.changes["changedby822"] != "" and \
322 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
324 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
325 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
326 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
328 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
329 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
330 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
332 # Process policy doesn't set the fingerprint field and I don't want to make it
333 # do it for now as I don't want to have to deal with the case where we accepted
334 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
335 # the meantime so the package will be remarked as rejectable. Urgh.
336 # TODO: Fix this properly
337 if self.pkg.changes.has_key('fingerprint'):
338 session = DBConn().session()
339 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
340 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
341 if self.pkg.changes.has_key("sponsoremail"):
342 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
345 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
346 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
348 # Apply any global override of the Maintainer field
349 if cnf.get("Dinstall::OverrideMaintainer"):
350 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
351 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
353 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
354 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
355 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
356 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
358 ###########################################################################
359 def load_changes(self, filename):
361 Load a changes file and setup a dictionary around it. Also checks for mandantory
364 @type filename: string
365 @param filename: Changes filename, full path.
368 @return: whether the changes file was valid or not. We may want to
369 reject even if this is True (see what gets put in self.rejects).
370 This is simply to prevent us even trying things later which will
371 fail because we couldn't properly parse the file.
374 self.pkg.changes_file = filename
376 # Parse the .changes field into a dictionary
378 self.pkg.changes.update(parse_changes(filename))
379 except CantOpenError:
380 self.rejects.append("%s: can't read file." % (filename))
382 except ParseChangesError, line:
383 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
385 except ChangesUnicodeError:
386 self.rejects.append("%s: changes file not proper utf-8" % (filename))
389 # Parse the Files field from the .changes into another dictionary
391 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
392 except ParseChangesError, line:
393 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
395 except UnknownFormatError, format:
396 self.rejects.append("%s: unknown format '%s'." % (filename, format))
399 # Check for mandatory fields
400 for i in ("distribution", "source", "binary", "architecture",
401 "version", "maintainer", "files", "changes", "description"):
402 if not self.pkg.changes.has_key(i):
403 # Avoid undefined errors later
404 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
407 # Strip a source version in brackets from the source field
408 if re_strip_srcver.search(self.pkg.changes["source"]):
409 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
411 # Ensure the source field is a valid package name.
412 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
413 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
415 # Split multi-value fields into a lower-level dictionary
416 for i in ("architecture", "distribution", "binary", "closes"):
417 o = self.pkg.changes.get(i, "")
419 del self.pkg.changes[i]
421 self.pkg.changes[i] = {}
424 self.pkg.changes[i][j] = 1
426 # Fix the Maintainer: field to be RFC822/2047 compatible
428 (self.pkg.changes["maintainer822"],
429 self.pkg.changes["maintainer2047"],
430 self.pkg.changes["maintainername"],
431 self.pkg.changes["maintaineremail"]) = \
432 fix_maintainer (self.pkg.changes["maintainer"])
433 except ParseMaintError, msg:
434 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
435 % (filename, self.pkg.changes["maintainer"], msg))
437 # ...likewise for the Changed-By: field if it exists.
439 (self.pkg.changes["changedby822"],
440 self.pkg.changes["changedby2047"],
441 self.pkg.changes["changedbyname"],
442 self.pkg.changes["changedbyemail"]) = \
443 fix_maintainer (self.pkg.changes.get("changed-by", ""))
444 except ParseMaintError, msg:
445 self.pkg.changes["changedby822"] = ""
446 self.pkg.changes["changedby2047"] = ""
447 self.pkg.changes["changedbyname"] = ""
448 self.pkg.changes["changedbyemail"] = ""
450 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
451 % (filename, self.pkg.changes["changed-by"], msg))
453 # Ensure all the values in Closes: are numbers
454 if self.pkg.changes.has_key("closes"):
455 for i in self.pkg.changes["closes"].keys():
456 if re_isanum.match (i) == None:
457 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
459 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
460 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
461 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
463 # Check the .changes is non-empty
464 if not self.pkg.files:
465 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
468 # Changes was syntactically valid even if we'll reject
471 ###########################################################################
473 def check_distributions(self):
474 "Check and map the Distribution field"
478 # Handle suite mappings
479 for m in Cnf.ValueList("SuiteMappings"):
482 if mtype == "map" or mtype == "silent-map":
483 (source, dest) = args[1:3]
484 if self.pkg.changes["distribution"].has_key(source):
485 del self.pkg.changes["distribution"][source]
486 self.pkg.changes["distribution"][dest] = 1
487 if mtype != "silent-map":
488 self.notes.append("Mapping %s to %s." % (source, dest))
489 if self.pkg.changes.has_key("distribution-version"):
490 if self.pkg.changes["distribution-version"].has_key(source):
491 self.pkg.changes["distribution-version"][source]=dest
492 elif mtype == "map-unreleased":
493 (source, dest) = args[1:3]
494 if self.pkg.changes["distribution"].has_key(source):
495 for arch in self.pkg.changes["architecture"].keys():
496 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
497 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
498 del self.pkg.changes["distribution"][source]
499 self.pkg.changes["distribution"][dest] = 1
501 elif mtype == "ignore":
503 if self.pkg.changes["distribution"].has_key(suite):
504 del self.pkg.changes["distribution"][suite]
505 self.warnings.append("Ignoring %s as a target suite." % (suite))
506 elif mtype == "reject":
508 if self.pkg.changes["distribution"].has_key(suite):
509 self.rejects.append("Uploads to %s are not accepted." % (suite))
510 elif mtype == "propup-version":
511 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
513 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
514 if self.pkg.changes["distribution"].has_key(args[1]):
515 self.pkg.changes.setdefault("distribution-version", {})
516 for suite in args[2:]:
517 self.pkg.changes["distribution-version"][suite] = suite
519 # Ensure there is (still) a target distribution
520 if len(self.pkg.changes["distribution"].keys()) < 1:
521 self.rejects.append("No valid distribution remaining.")
523 # Ensure target distributions exist
524 for suite in self.pkg.changes["distribution"].keys():
525 if not Cnf.has_key("Suite::%s" % (suite)):
526 self.rejects.append("Unknown distribution `%s'." % (suite))
528 ###########################################################################
530 def binary_file_checks(self, f, session):
532 entry = self.pkg.files[f]
534 # Extract package control information
535 deb_file = utils.open_file(f)
537 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
539 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
541 # Can't continue, none of the checks on control would work.
544 # Check for mandantory "Description:"
547 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
549 self.rejects.append("%s: Missing Description in binary package" % (f))
554 # Check for mandatory fields
555 for field in [ "Package", "Architecture", "Version" ]:
556 if control.Find(field) == None:
558 self.rejects.append("%s: No %s field in control." % (f, field))
561 # Ensure the package name matches the one give in the .changes
562 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
563 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
565 # Validate the package field
566 package = control.Find("Package")
567 if not re_valid_pkg_name.match(package):
568 self.rejects.append("%s: invalid package name '%s'." % (f, package))
570 # Validate the version field
571 version = control.Find("Version")
572 if not re_valid_version.match(version):
573 self.rejects.append("%s: invalid version number '%s'." % (f, version))
575 # Ensure the architecture of the .deb is one we know about.
576 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
577 architecture = control.Find("Architecture")
578 upload_suite = self.pkg.changes["distribution"].keys()[0]
580 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
581 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
582 self.rejects.append("Unknown architecture '%s'." % (architecture))
584 # Ensure the architecture of the .deb is one of the ones
585 # listed in the .changes.
586 if not self.pkg.changes["architecture"].has_key(architecture):
587 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
589 # Sanity-check the Depends field
590 depends = control.Find("Depends")
592 self.rejects.append("%s: Depends field is empty." % (f))
594 # Sanity-check the Provides field
595 provides = control.Find("Provides")
597 provide = re_spacestrip.sub('', provides)
599 self.rejects.append("%s: Provides field is empty." % (f))
600 prov_list = provide.split(",")
601 for prov in prov_list:
602 if not re_valid_pkg_name.match(prov):
603 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
605 # Check the section & priority match those given in the .changes (non-fatal)
606 if control.Find("Section") and entry["section"] != "" \
607 and entry["section"] != control.Find("Section"):
608 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
609 (f, control.Find("Section", ""), entry["section"]))
610 if control.Find("Priority") and entry["priority"] != "" \
611 and entry["priority"] != control.Find("Priority"):
612 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
613 (f, control.Find("Priority", ""), entry["priority"]))
615 entry["package"] = package
616 entry["architecture"] = architecture
617 entry["version"] = version
618 entry["maintainer"] = control.Find("Maintainer", "")
620 if f.endswith(".udeb"):
621 self.pkg.files[f]["dbtype"] = "udeb"
622 elif f.endswith(".deb"):
623 self.pkg.files[f]["dbtype"] = "deb"
625 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
627 entry["source"] = control.Find("Source", entry["package"])
629 # Get the source version
630 source = entry["source"]
633 if source.find("(") != -1:
634 m = re_extract_src_version.match(source)
636 source_version = m.group(2)
638 if not source_version:
639 source_version = self.pkg.files[f]["version"]
641 entry["source package"] = source
642 entry["source version"] = source_version
644 # Ensure the filename matches the contents of the .deb
645 m = re_isadeb.match(f)
648 file_package = m.group(1)
649 if entry["package"] != file_package:
650 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
651 (f, file_package, entry["dbtype"], entry["package"]))
652 epochless_version = re_no_epoch.sub('', control.Find("Version"))
655 file_version = m.group(2)
656 if epochless_version != file_version:
657 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
658 (f, file_version, entry["dbtype"], epochless_version))
661 file_architecture = m.group(3)
662 if entry["architecture"] != file_architecture:
663 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
664 (f, file_architecture, entry["dbtype"], entry["architecture"]))
666 # Check for existent source
667 source_version = entry["source version"]
668 source_package = entry["source package"]
669 if self.pkg.changes["architecture"].has_key("source"):
670 if source_version != self.pkg.changes["version"]:
671 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
672 (source_version, f, self.pkg.changes["version"]))
674 # Check in the SQL database
675 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
676 # Check in one of the other directories
677 source_epochless_version = re_no_epoch.sub('', source_version)
678 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
679 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
681 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
684 dsc_file_exists = False
685 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
686 if cnf.has_key("Dir::Queue::%s" % (myq)):
687 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
688 dsc_file_exists = True
691 if not dsc_file_exists:
692 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
694 # Check the version and for file overwrites
695 self.check_binary_against_db(f, session)
697 # Temporarily disable contents generation until we change the table storage layout
700 #if len(b.rejects) > 0:
701 # for j in b.rejects:
702 # self.rejects.append(j)
704 def source_file_checks(self, f, session):
705 entry = self.pkg.files[f]
707 m = re_issource.match(f)
711 entry["package"] = m.group(1)
712 entry["version"] = m.group(2)
713 entry["type"] = m.group(3)
715 # Ensure the source package name matches the Source filed in the .changes
716 if self.pkg.changes["source"] != entry["package"]:
717 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
719 # Ensure the source version matches the version in the .changes file
720 if re_is_orig_source.match(f):
721 changes_version = self.pkg.changes["chopversion2"]
723 changes_version = self.pkg.changes["chopversion"]
725 if changes_version != entry["version"]:
726 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
728 # Ensure the .changes lists source in the Architecture field
729 if not self.pkg.changes["architecture"].has_key("source"):
730 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
732 # Check the signature of a .dsc file
733 if entry["type"] == "dsc":
734 # check_signature returns either:
735 # (None, [list, of, rejects]) or (signature, [])
736 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
738 self.rejects.append(j)
740 entry["architecture"] = "source"
742 def per_suite_file_checks(self, f, suite, session):
744 entry = self.pkg.files[f]
747 if entry.has_key("byhand"):
750 # Check we have fields we need to do these checks
752 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
753 if not entry.has_key(m):
754 self.rejects.append("file '%s' does not have field %s set" % (f, m))
760 # Handle component mappings
761 for m in cnf.ValueList("ComponentMappings"):
762 (source, dest) = m.split()
763 if entry["component"] == source:
764 entry["original component"] = source
765 entry["component"] = dest
767 # Ensure the component is valid for the target suite
768 if cnf.has_key("Suite:%s::Components" % (suite)) and \
769 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
770 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
773 # Validate the component
774 if not get_component(entry["component"], session):
775 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
778 # See if the package is NEW
779 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
782 # Validate the priority
783 if entry["priority"].find('/') != -1:
784 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
786 # Determine the location
787 location = cnf["Dir::Pool"]
788 l = get_location(location, entry["component"], session=session)
790 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
791 entry["location id"] = -1
793 entry["location id"] = l.location_id
795 # Check the md5sum & size against existing files (if any)
796 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
798 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
799 entry["size"], entry["md5sum"], entry["location id"])
802 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
803 elif found is False and poolfile is not None:
804 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
807 entry["files id"] = None
809 entry["files id"] = poolfile.file_id
811 # Check for packages that have moved from one component to another
812 entry['suite'] = suite
813 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
815 entry["othercomponents"] = res.fetchone()[0]
817 def check_files(self, action=True):
818 file_keys = self.pkg.files.keys()
824 os.chdir(self.pkg.directory)
826 ret = holding.copy_to_holding(f)
828 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
832 # check we already know the changes file
833 # [NB: this check must be done post-suite mapping]
834 base_filename = os.path.basename(self.pkg.changes_file)
836 session = DBConn().session()
839 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
840 # if in the pool or in a queue other than unchecked, reject
841 if (dbc.in_queue is None) \
842 or (dbc.in_queue is not None
843 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
844 self.rejects.append("%s file already known to dak" % base_filename)
845 except NoResultFound, e:
852 for f, entry in self.pkg.files.items():
853 # Ensure the file does not already exist in one of the accepted directories
854 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
855 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
856 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
857 self.rejects.append("%s file already exists in the %s directory." % (f, d))
859 if not re_taint_free.match(f):
860 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
862 # Check the file is readable
863 if os.access(f, os.R_OK) == 0:
864 # When running in -n, copy_to_holding() won't have
865 # generated the reject_message, so we need to.
867 if os.path.exists(f):
868 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
870 # Don't directly reject, mark to check later to deal with orig's
871 # we can find in the pool
872 self.later_check_files.append(f)
873 entry["type"] = "unreadable"
876 # If it's byhand skip remaining checks
877 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
879 entry["type"] = "byhand"
881 # Checks for a binary package...
882 elif re_isadeb.match(f):
884 entry["type"] = "deb"
886 # This routine appends to self.rejects/warnings as appropriate
887 self.binary_file_checks(f, session)
889 # Checks for a source package...
890 elif re_issource.match(f):
893 # This routine appends to self.rejects/warnings as appropriate
894 self.source_file_checks(f, session)
896 # Not a binary or source package? Assume byhand...
899 entry["type"] = "byhand"
901 # Per-suite file checks
902 entry["oldfiles"] = {}
903 for suite in self.pkg.changes["distribution"].keys():
904 self.per_suite_file_checks(f, suite, session)
908 # If the .changes file says it has source, it must have source.
909 if self.pkg.changes["architecture"].has_key("source"):
911 self.rejects.append("no source found and Architecture line in changes mention source.")
913 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
914 self.rejects.append("source only uploads are not supported.")
916 ###########################################################################
917 def check_dsc(self, action=True, session=None):
918 """Returns bool indicating whether or not the source changes are valid"""
919 # Ensure there is source to check
920 if not self.pkg.changes["architecture"].has_key("source"):
925 for f, entry in self.pkg.files.items():
926 if entry["type"] == "dsc":
928 self.rejects.append("can not process a .changes file with multiple .dsc's.")
933 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
935 self.rejects.append("source uploads must contain a dsc file")
938 # Parse the .dsc file
940 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
941 except CantOpenError:
942 # if not -n copy_to_holding() will have done this for us...
944 self.rejects.append("%s: can't read file." % (dsc_filename))
945 except ParseChangesError, line:
946 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
947 except InvalidDscError, line:
948 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
949 except ChangesUnicodeError:
950 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
952 # Build up the file list of files mentioned by the .dsc
954 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
955 except NoFilesFieldError:
956 self.rejects.append("%s: no Files: field." % (dsc_filename))
958 except UnknownFormatError, format:
959 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
961 except ParseChangesError, line:
962 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
965 # Enforce mandatory fields
966 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
967 if not self.pkg.dsc.has_key(i):
968 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
971 # Validate the source and version fields
972 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
973 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
974 if not re_valid_version.match(self.pkg.dsc["version"]):
975 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
977 # Only a limited list of source formats are allowed in each suite
978 for dist in self.pkg.changes["distribution"].keys():
979 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
980 if self.pkg.dsc["format"] not in allowed:
981 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
983 # Validate the Maintainer field
985 # We ignore the return value
986 fix_maintainer(self.pkg.dsc["maintainer"])
987 except ParseMaintError, msg:
988 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
989 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
991 # Validate the build-depends field(s)
992 for field_name in [ "build-depends", "build-depends-indep" ]:
993 field = self.pkg.dsc.get(field_name)
995 # Have apt try to parse them...
997 apt_pkg.ParseSrcDepends(field)
999 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1001 # Ensure the version number in the .dsc matches the version number in the .changes
1002 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1003 changes_version = self.pkg.files[dsc_filename]["version"]
1005 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1006 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1008 # Ensure the Files field contain only what's expected
1009 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1011 # Ensure source is newer than existing source in target suites
1012 session = DBConn().session()
1013 self.check_source_against_db(dsc_filename, session)
1014 self.check_dsc_against_db(dsc_filename, session)
1017 # Finally, check if we're missing any files
1018 for f in self.later_check_files:
1019 self.rejects.append("Could not find file %s references in changes" % f)
1023 ###########################################################################
1025 def get_changelog_versions(self, source_dir):
1026 """Extracts a the source package and (optionally) grabs the
1027 version history out of debian/changelog for the BTS."""
1031 # Find the .dsc (again)
1033 for f in self.pkg.files.keys():
1034 if self.pkg.files[f]["type"] == "dsc":
1037 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1038 if not dsc_filename:
1041 # Create a symlink mirror of the source files in our temporary directory
1042 for f in self.pkg.files.keys():
1043 m = re_issource.match(f)
1045 src = os.path.join(source_dir, f)
1046 # If a file is missing for whatever reason, give up.
1047 if not os.path.exists(src):
1050 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1051 self.pkg.orig_files[f].has_key("path"):
1053 dest = os.path.join(os.getcwd(), f)
1054 os.symlink(src, dest)
1056 # If the orig files are not a part of the upload, create symlinks to the
1058 for orig_file in self.pkg.orig_files.keys():
1059 if not self.pkg.orig_files[orig_file].has_key("path"):
1061 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1062 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1064 # Extract the source
1065 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1066 (result, output) = commands.getstatusoutput(cmd)
1068 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1069 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1072 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1075 # Get the upstream version
1076 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1077 if re_strip_revision.search(upstr_version):
1078 upstr_version = re_strip_revision.sub('', upstr_version)
1080 # Ensure the changelog file exists
1081 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1082 if not os.path.exists(changelog_filename):
1083 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1086 # Parse the changelog
1087 self.pkg.dsc["bts changelog"] = ""
1088 changelog_file = utils.open_file(changelog_filename)
1089 for line in changelog_file.readlines():
1090 m = re_changelog_versions.match(line)
1092 self.pkg.dsc["bts changelog"] += line
1093 changelog_file.close()
1095 # Check we found at least one revision in the changelog
1096 if not self.pkg.dsc["bts changelog"]:
1097 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1099 def check_source(self):
1101 # a) there's no source
1102 if not self.pkg.changes["architecture"].has_key("source"):
1105 tmpdir = utils.temp_dirname()
1107 # Move into the temporary directory
1111 # Get the changelog version history
1112 self.get_changelog_versions(cwd)
1114 # Move back and cleanup the temporary tree
1118 shutil.rmtree(tmpdir)
1120 if e.errno != errno.EACCES:
1122 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1124 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1125 # We probably have u-r or u-w directories so chmod everything
1127 cmd = "chmod -R u+rwx %s" % (tmpdir)
1128 result = os.system(cmd)
1130 utils.fubar("'%s' failed with result %s." % (cmd, result))
1131 shutil.rmtree(tmpdir)
1132 except Exception, e:
1133 print "foobar2 (%s)" % e
1134 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1136 ###########################################################################
1137 def ensure_hashes(self):
1138 # Make sure we recognise the format of the Files: field in the .changes
1139 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1140 if len(format) == 2:
1141 format = int(format[0]), int(format[1])
1143 format = int(float(format[0])), 0
1145 # We need to deal with the original changes blob, as the fields we need
1146 # might not be in the changes dict serialised into the .dak anymore.
1147 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1149 # Copy the checksums over to the current changes dict. This will keep
1150 # the existing modifications to it intact.
1151 for field in orig_changes:
1152 if field.startswith('checksums-'):
1153 self.pkg.changes[field] = orig_changes[field]
1155 # Check for unsupported hashes
1156 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1157 self.rejects.append(j)
1159 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1160 self.rejects.append(j)
1162 # We have to calculate the hash if we have an earlier changes version than
1163 # the hash appears in rather than require it exist in the changes file
1164 for hashname, hashfunc, version in utils.known_hashes:
1165 # TODO: Move _ensure_changes_hash into this class
1166 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1167 self.rejects.append(j)
1168 if "source" in self.pkg.changes["architecture"]:
1169 # TODO: Move _ensure_dsc_hash into this class
1170 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1171 self.rejects.append(j)
1173 def check_hashes(self):
1174 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1175 self.rejects.append(m)
1177 for m in utils.check_size(".changes", self.pkg.files):
1178 self.rejects.append(m)
1180 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1181 self.rejects.append(m)
1183 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1184 self.rejects.append(m)
1186 self.ensure_hashes()
1188 ###########################################################################
1190 def ensure_orig(self, target_dir='.', session=None):
1192 Ensures that all orig files mentioned in the changes file are present
1193 in target_dir. If they do not exist, they are symlinked into place.
1195 An list containing the symlinks that were created are returned (so they
1202 for filename, entry in self.pkg.dsc_files.iteritems():
1203 if not re_is_orig_source.match(filename):
1204 # File is not an orig; ignore
1207 if os.path.exists(filename):
1208 # File exists, no need to continue
1211 def symlink_if_valid(path):
1212 f = utils.open_file(path)
1213 md5sum = apt_pkg.md5sum(f)
1216 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1217 expected = (int(entry['size']), entry['md5sum'])
1219 if fingerprint != expected:
1222 dest = os.path.join(target_dir, filename)
1224 os.symlink(path, dest)
1225 symlinked.append(dest)
1231 session_ = DBConn().session()
1236 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1237 poolfile_path = os.path.join(
1238 poolfile.location.path, poolfile.filename
1241 if symlink_if_valid(poolfile_path):
1251 # Look in some other queues for the file
1252 queues = ('New', 'Byhand', 'ProposedUpdates',
1253 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1255 for queue in queues:
1256 if not cnf.get('Dir::Queue::%s' % queue):
1259 queuefile_path = os.path.join(
1260 cnf['Dir::Queue::%s' % queue], filename
1263 if not os.path.exists(queuefile_path):
1264 # Does not exist in this queue
1267 if symlink_if_valid(queuefile_path):
1272 ###########################################################################
1274 def check_lintian(self):
1276 Extends self.rejects by checking the output of lintian against tags
1277 specified in Dinstall::LintianTags.
1282 # Don't reject binary uploads
1283 if not self.pkg.changes['architecture'].has_key('source'):
1286 # Only check some distributions
1287 for dist in ('unstable', 'experimental'):
1288 if dist in self.pkg.changes['distribution']:
1293 # If we do not have a tagfile, don't do anything
1294 tagfile = cnf.get("Dinstall::LintianTags")
1298 # Parse the yaml file
1299 sourcefile = file(tagfile, 'r')
1300 sourcecontent = sourcefile.read()
1304 lintiantags = yaml.load(sourcecontent)['lintian']
1305 except yaml.YAMLError, msg:
1306 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1309 # Try and find all orig mentioned in the .dsc
1310 symlinked = self.ensure_orig()
1312 # Setup the input file for lintian
1313 fd, temp_filename = utils.temp_filename()
1314 temptagfile = os.fdopen(fd, 'w')
1315 for tags in lintiantags.values():
1316 temptagfile.writelines(['%s\n' % x for x in tags])
1320 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1321 (temp_filename, self.pkg.changes_file)
1323 result, output = commands.getstatusoutput(cmd)
1325 # Remove our tempfile and any symlinks we created
1326 os.unlink(temp_filename)
1328 for symlink in symlinked:
1332 utils.warn("lintian failed for %s [return code: %s]." % \
1333 (self.pkg.changes_file, result))
1334 utils.warn(utils.prefix_multi_line_string(output, \
1335 " [possible output:] "))
1340 [self.pkg.changes_file, "check_lintian"] + list(txt)
1344 parsed_tags = parse_lintian_output(output)
1345 self.rejects.extend(
1346 generate_reject_messages(parsed_tags, lintiantags, log=log)
1349 ###########################################################################
1350 def check_urgency(self):
1352 if self.pkg.changes["architecture"].has_key("source"):
1353 if not self.pkg.changes.has_key("urgency"):
1354 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1355 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1356 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1357 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1358 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1359 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1361 ###########################################################################
1363 # Sanity check the time stamps of files inside debs.
1364 # [Files in the near future cause ugly warnings and extreme time
1365 # travel can cause errors on extraction]
1367 def check_timestamps(self):
1370 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1371 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1372 tar = TarTime(future_cutoff, past_cutoff)
1374 for filename, entry in self.pkg.files.items():
1375 if entry["type"] == "deb":
1378 deb_file = utils.open_file(filename)
1379 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1382 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1383 except SystemError, e:
1384 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1385 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1388 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1392 future_files = tar.future_files.keys()
1394 num_future_files = len(future_files)
1395 future_file = future_files[0]
1396 future_date = tar.future_files[future_file]
1397 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1398 % (filename, num_future_files, future_file, time.ctime(future_date)))
1400 ancient_files = tar.ancient_files.keys()
1402 num_ancient_files = len(ancient_files)
1403 ancient_file = ancient_files[0]
1404 ancient_date = tar.ancient_files[ancient_file]
1405 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1406 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1408 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1410 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1411 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1413 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1419 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1420 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1421 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1422 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1423 self.pkg.changes["sponsoremail"] = uid_email
1428 ###########################################################################
1429 # check_signed_by_key checks
1430 ###########################################################################
1432 def check_signed_by_key(self):
1433 """Ensure the .changes is signed by an authorized uploader."""
1434 session = DBConn().session()
1436 # First of all we check that the person has proper upload permissions
1437 # and that this upload isn't blocked
1438 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1441 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1444 # TODO: Check that import-keyring adds UIDs properly
1446 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1449 # Check that the fingerprint which uploaded has permission to do so
1450 self.check_upload_permissions(fpr, session)
1452 # Check that this package is not in a transition
1453 self.check_transition(session)
1458 def check_upload_permissions(self, fpr, session):
1459 # Check any one-off upload blocks
1460 self.check_upload_blocks(fpr, session)
1462 # Start with DM as a special case
1463 # DM is a special case unfortunately, so we check it first
1464 # (keys with no source access get more access than DMs in one
1465 # way; DMs can only upload for their packages whether source
1466 # or binary, whereas keys with no access might be able to
1467 # upload some binaries)
1468 if fpr.source_acl.access_level == 'dm':
1469 self.check_dm_upload(fpr, session)
1471 # Check source-based permissions for other types
1472 if self.pkg.changes["architecture"].has_key("source") and \
1473 fpr.source_acl.access_level is None:
1474 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1475 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1476 self.rejects.append(rej)
1478 # If not a DM, we allow full upload rights
1479 uid_email = "%s@debian.org" % (fpr.uid.uid)
1480 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1483 # Check binary upload permissions
1484 # By this point we know that DMs can't have got here unless they
1485 # are allowed to deal with the package concerned so just apply
1487 if fpr.binary_acl.access_level == 'full':
1490 # Otherwise we're in the map case
1491 tmparches = self.pkg.changes["architecture"].copy()
1492 tmparches.pop('source', None)
1494 for bam in fpr.binary_acl_map:
1495 tmparches.pop(bam.architecture.arch_string, None)
1497 if len(tmparches.keys()) > 0:
1498 if fpr.binary_reject:
1499 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1500 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1501 self.rejects.append(rej)
1503 # TODO: This is where we'll implement reject vs throw away binaries later
1504 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1505 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1506 rej += "\nFingerprint: %s", (fpr.fingerprint)
1507 self.rejects.append(rej)
1510 def check_upload_blocks(self, fpr, session):
1511 """Check whether any upload blocks apply to this source, source
1512 version, uid / fpr combination"""
1514 def block_rej_template(fb):
1515 rej = 'Manual upload block in place for package %s' % fb.source
1516 if fb.version is not None:
1517 rej += ', version %s' % fb.version
1520 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1521 # version is None if the block applies to all versions
1522 if fb.version is None or fb.version == self.pkg.changes['version']:
1523 # Check both fpr and uid - either is enough to cause a reject
1524 if fb.fpr is not None:
1525 if fb.fpr.fingerprint == fpr.fingerprint:
1526 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1527 if fb.uid is not None:
1528 if fb.uid == fpr.uid:
1529 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1532 def check_dm_upload(self, fpr, session):
1533 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1534 ## none of the uploaded packages are NEW
1536 for f in self.pkg.files.keys():
1537 if self.pkg.files[f].has_key("byhand"):
1538 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1540 if self.pkg.files[f].has_key("new"):
1541 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1547 ## the most recent version of the package uploaded to unstable or
1548 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1549 ## section of its control file
1550 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1551 q = q.join(SrcAssociation)
1552 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1553 q = q.order_by(desc('source.version')).limit(1)
1558 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1559 self.rejects.append(rej)
1563 if not r.dm_upload_allowed:
1564 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1565 self.rejects.append(rej)
1568 ## the Maintainer: field of the uploaded .changes file corresponds with
1569 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1571 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1572 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1574 ## the most recent version of the package uploaded to unstable or
1575 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1576 ## non-developer maintainers cannot NMU or hijack packages)
1578 # srcuploaders includes the maintainer
1580 for sup in r.srcuploaders:
1581 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1582 # Eww - I hope we never have two people with the same name in Debian
1583 if email == fpr.uid.uid or name == fpr.uid.name:
1588 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1591 ## none of the packages are being taken over from other source packages
1592 for b in self.pkg.changes["binary"].keys():
1593 for suite in self.pkg.changes["distribution"].keys():
1594 q = session.query(DBSource)
1595 q = q.join(DBBinary).filter_by(package=b)
1596 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1599 if s.source != self.pkg.changes["source"]:
1600 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1604 def check_transition(self, session):
1607 sourcepkg = self.pkg.changes["source"]
1609 # No sourceful upload -> no need to do anything else, direct return
1610 # We also work with unstable uploads, not experimental or those going to some
1611 # proposed-updates queue
1612 if "source" not in self.pkg.changes["architecture"] or \
1613 "unstable" not in self.pkg.changes["distribution"]:
1616 # Also only check if there is a file defined (and existant) with
1618 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1619 if transpath == "" or not os.path.exists(transpath):
1622 # Parse the yaml file
1623 sourcefile = file(transpath, 'r')
1624 sourcecontent = sourcefile.read()
1626 transitions = yaml.load(sourcecontent)
1627 except yaml.YAMLError, msg:
1628 # This shouldn't happen, there is a wrapper to edit the file which
1629 # checks it, but we prefer to be safe than ending up rejecting
1631 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1634 # Now look through all defined transitions
1635 for trans in transitions:
1636 t = transitions[trans]
1637 source = t["source"]
1640 # Will be None if nothing is in testing.
1641 current = get_source_in_suite(source, "testing", session)
1642 if current is not None:
1643 compare = apt_pkg.VersionCompare(current.version, expected)
1645 if current is None or compare < 0:
1646 # This is still valid, the current version in testing is older than
1647 # the new version we wait for, or there is none in testing yet
1649 # Check if the source we look at is affected by this.
1650 if sourcepkg in t['packages']:
1651 # The source is affected, lets reject it.
1653 rejectmsg = "%s: part of the %s transition.\n\n" % (
1656 if current is not None:
1657 currentlymsg = "at version %s" % (current.version)
1659 currentlymsg = "not present in testing"
1661 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1663 rejectmsg += "\n".join(textwrap.wrap("""Your package
1664 is part of a testing transition designed to get %s migrated (it is
1665 currently %s, we need version %s). This transition is managed by the
1666 Release Team, and %s is the Release-Team member responsible for it.
1667 Please mail debian-release@lists.debian.org or contact %s directly if you
1668 need further assistance. You might want to upload to experimental until this
1669 transition is done."""
1670 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1672 self.rejects.append(rejectmsg)
1675 ###########################################################################
1676 # End check_signed_by_key checks
1677 ###########################################################################
1679 def build_summaries(self):
1680 """ Build a summary of changes the upload introduces. """
1682 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1684 short_summary = summary
1686 # This is for direport's benefit...
1687 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1690 summary += "Changes: " + f
1692 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1694 summary += self.announce(short_summary, 0)
1696 return (summary, short_summary)
1698 ###########################################################################
1700 def close_bugs(self, summary, action):
1702 Send mail to close bugs as instructed by the closes field in the changes file.
1703 Also add a line to summary if any work was done.
1705 @type summary: string
1706 @param summary: summary text, as given by L{build_summaries}
1709 @param action: Set to false no real action will be done.
1712 @return: summary. If action was taken, extended by the list of closed bugs.
1716 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1718 bugs = self.pkg.changes["closes"].keys()
1724 summary += "Closing bugs: "
1726 summary += "%s " % (bug)
1729 self.Subst["__BUG_NUMBER__"] = bug
1730 if self.pkg.changes["distribution"].has_key("stable"):
1731 self.Subst["__STABLE_WARNING__"] = """
1732 Note that this package is not part of the released stable Debian
1733 distribution. It may have dependencies on other unreleased software,
1734 or other instabilities. Please take care if you wish to install it.
1735 The update will eventually make its way into the next released Debian
1738 self.Subst["__STABLE_WARNING__"] = ""
1739 mail_message = utils.TemplateSubst(self.Subst, template)
1740 utils.send_mail(mail_message)
1742 # Clear up after ourselves
1743 del self.Subst["__BUG_NUMBER__"]
1744 del self.Subst["__STABLE_WARNING__"]
1746 if action and self.logger:
1747 self.logger.log(["closing bugs"] + bugs)
1753 ###########################################################################
1755 def announce(self, short_summary, action):
1757 Send an announce mail about a new upload.
1759 @type short_summary: string
1760 @param short_summary: Short summary text to include in the mail
1763 @param action: Set to false no real action will be done.
1766 @return: Textstring about action taken.
1771 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1773 # Only do announcements for source uploads with a recent dpkg-dev installed
1774 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1775 self.pkg.changes["architecture"].has_key("source"):
1781 self.Subst["__SHORT_SUMMARY__"] = short_summary
1783 for dist in self.pkg.changes["distribution"].keys():
1784 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1785 if announce_list == "" or lists_done.has_key(announce_list):
1788 lists_done[announce_list] = 1
1789 summary += "Announcing to %s\n" % (announce_list)
1793 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1794 if cnf.get("Dinstall::TrackingServer") and \
1795 self.pkg.changes["architecture"].has_key("source"):
1796 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1797 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1799 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1800 utils.send_mail(mail_message)
1802 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1804 if cnf.FindB("Dinstall::CloseBugs"):
1805 summary = self.close_bugs(summary, action)
1807 del self.Subst["__SHORT_SUMMARY__"]
1811 ###########################################################################
1813 def accept (self, summary, short_summary, session=None):
1817 This moves all files referenced from the .changes into the pool,
1818 sends the accepted mail, announces to lists, closes bugs and
1819 also checks for override disparities. If enabled it will write out
1820 the version history for the BTS Version Tracking and will finally call
1823 @type summary: string
1824 @param summary: Summary text
1826 @type short_summary: string
1827 @param short_summary: Short summary
1831 stats = SummaryStats()
1834 self.logger.log(["installing changes", self.pkg.changes_file])
1838 # Add the .dsc file to the DB first
1839 for newfile, entry in self.pkg.files.items():
1840 if entry["type"] == "dsc":
1841 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1845 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1846 for newfile, entry in self.pkg.files.items():
1847 if entry["type"] == "deb":
1848 poolfiles.append(add_deb_to_db(self, newfile, session))
1850 # If this is a sourceful diff only upload that is moving
1851 # cross-component we need to copy the .orig files into the new
1852 # component too for the same reasons as above.
1853 # XXX: mhy: I think this should be in add_dsc_to_db
1854 if self.pkg.changes["architecture"].has_key("source"):
1855 for orig_file in self.pkg.orig_files.keys():
1856 if not self.pkg.orig_files[orig_file].has_key("id"):
1857 continue # Skip if it's not in the pool
1858 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1859 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1860 continue # Skip if the location didn't change
1863 oldf = get_poolfile_by_id(orig_file_id, session)
1864 old_filename = os.path.join(oldf.location.path, oldf.filename)
1865 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1866 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1868 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1870 # TODO: Care about size/md5sum collisions etc
1871 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1873 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1875 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1876 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1880 # Don't reference the old file from this changes
1882 if p.file_id == oldf.file_id:
1885 poolfiles.append(newf)
1887 # Fix up the DSC references
1890 for df in source.srcfiles:
1891 if df.poolfile.file_id == oldf.file_id:
1892 # Add a new DSC entry and mark the old one for deletion
1893 # Don't do it in the loop so we don't change the thing we're iterating over
1895 newdscf.source_id = source.source_id
1896 newdscf.poolfile_id = newf.file_id
1897 session.add(newdscf)
1907 # Make sure that our source object is up-to-date
1908 session.expire(source)
1910 # Add changelog information to the database
1911 self.store_changelog()
1913 # Install the files into the pool
1914 for newfile, entry in self.pkg.files.items():
1915 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1916 utils.move(newfile, destination)
1917 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1918 stats.accept_bytes += float(entry["size"])
1920 # Copy the .changes file across for suite which need it.
1922 for suite_name in self.pkg.changes["distribution"].keys():
1923 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1924 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1926 for dest in copy_changes.keys():
1927 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1929 # We're done - commit the database changes
1931 # Our SQL session will automatically start a new transaction after
1934 # Move the .changes into the 'done' directory
1935 utils.move(self.pkg.changes_file,
1936 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1938 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1939 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1942 self.Subst["__SUMMARY__"] = summary
1943 mail_message = utils.TemplateSubst(self.Subst,
1944 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1945 utils.send_mail(mail_message)
1946 self.announce(short_summary, 1)
1948 ## Helper stuff for DebBugs Version Tracking
1949 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1950 if self.pkg.changes["architecture"].has_key("source"):
1951 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1952 version_history = os.fdopen(fd, 'w')
1953 version_history.write(self.pkg.dsc["bts changelog"])
1954 version_history.close()
1955 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1956 self.pkg.changes_file[:-8]+".versions")
1957 os.rename(temp_filename, filename)
1958 os.chmod(filename, 0644)
1960 # Write out the binary -> source mapping.
1961 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1962 debinfo = os.fdopen(fd, 'w')
1963 for name, entry in sorted(self.pkg.files.items()):
1964 if entry["type"] == "deb":
1965 line = " ".join([entry["package"], entry["version"],
1966 entry["architecture"], entry["source package"],
1967 entry["source version"]])
1968 debinfo.write(line+"\n")
1970 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1971 self.pkg.changes_file[:-8]+".debinfo")
1972 os.rename(temp_filename, filename)
1973 os.chmod(filename, 0644)
1977 # Set up our copy queues (e.g. buildd queues)
1978 for suite_name in self.pkg.changes["distribution"].keys():
1979 suite = get_suite(suite_name, session)
1980 for q in suite.copy_queues:
1982 q.add_file_from_pool(f)
1987 stats.accept_count += 1
1989 def check_override(self):
1991 Checks override entries for validity. Mails "Override disparity" warnings,
1992 if that feature is enabled.
1994 Abandons the check if
1995 - override disparity checks are disabled
1996 - mail sending is disabled
2001 # Abandon the check if override disparity checks have been disabled
2002 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2005 summary = self.pkg.check_override()
2010 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2013 self.Subst["__SUMMARY__"] = summary
2014 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2015 utils.send_mail(mail_message)
2016 del self.Subst["__SUMMARY__"]
2018 ###########################################################################
2020 def remove(self, from_dir=None):
2022 Used (for instance) in p-u to remove the package from unchecked
2024 Also removes the package from holding area.
2026 if from_dir is None:
2027 from_dir = self.pkg.directory
2030 for f in self.pkg.files.keys():
2031 os.unlink(os.path.join(from_dir, f))
2032 if os.path.exists(os.path.join(h.holding_dir, f)):
2033 os.unlink(os.path.join(h.holding_dir, f))
2035 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2036 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2037 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2039 ###########################################################################
2041 def move_to_queue (self, queue):
2043 Move files to a destination queue using the permissions in the table
2046 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2047 queue.path, perms=int(queue.change_perms, 8))
2048 for f in self.pkg.files.keys():
2049 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2051 ###########################################################################
2053 def force_reject(self, reject_files):
2055 Forcefully move files from the current directory to the
2056 reject directory. If any file already exists in the reject
2057 directory it will be moved to the morgue to make way for
2060 @type reject_files: dict
2061 @param reject_files: file dictionary
2067 for file_entry in reject_files:
2068 # Skip any files which don't exist or which we don't have permission to copy.
2069 if os.access(file_entry, os.R_OK) == 0:
2072 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2075 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2077 # File exists? Let's find a new name by adding a number
2078 if e.errno == errno.EEXIST:
2080 dest_file = utils.find_next_free(dest_file, 255)
2081 except NoFreeFilenameError:
2082 # Something's either gone badly Pete Tong, or
2083 # someone is trying to exploit us.
2084 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2087 # Make sure we really got it
2089 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2092 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2096 # If we got here, we own the destination file, so we can
2097 # safely overwrite it.
2098 utils.move(file_entry, dest_file, 1, perms=0660)
2101 ###########################################################################
2102 def do_reject (self, manual=0, reject_message="", notes=""):
2104 Reject an upload. If called without a reject message or C{manual} is
2105 true, spawn an editor so the user can write one.
2108 @param manual: manual or automated rejection
2110 @type reject_message: string
2111 @param reject_message: A reject message
2116 # If we weren't given a manual rejection message, spawn an
2117 # editor so the user can add one in...
2118 if manual and not reject_message:
2119 (fd, temp_filename) = utils.temp_filename()
2120 temp_file = os.fdopen(fd, 'w')
2123 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2124 % (note.author, note.version, note.notedate, note.comment))
2126 editor = os.environ.get("EDITOR","vi")
2128 while answer == 'E':
2129 os.system("%s %s" % (editor, temp_filename))
2130 temp_fh = utils.open_file(temp_filename)
2131 reject_message = "".join(temp_fh.readlines())
2133 print "Reject message:"
2134 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2135 prompt = "[R]eject, Edit, Abandon, Quit ?"
2137 while prompt.find(answer) == -1:
2138 answer = utils.our_raw_input(prompt)
2139 m = re_default_answer.search(prompt)
2142 answer = answer[:1].upper()
2143 os.unlink(temp_filename)
2149 print "Rejecting.\n"
2153 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2154 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2156 # Move all the files into the reject directory
2157 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2158 self.force_reject(reject_files)
2160 # If we fail here someone is probably trying to exploit the race
2161 # so let's just raise an exception ...
2162 if os.path.exists(reason_filename):
2163 os.unlink(reason_filename)
2164 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2166 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2170 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2171 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2172 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2173 os.write(reason_fd, reject_message)
2174 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2176 # Build up the rejection email
2177 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2178 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2179 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2180 self.Subst["__REJECT_MESSAGE__"] = ""
2181 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2182 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2183 # Write the rejection email out as the <foo>.reason file
2184 os.write(reason_fd, reject_mail_message)
2186 del self.Subst["__REJECTOR_ADDRESS__"]
2187 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2188 del self.Subst["__CC__"]
2192 # Send the rejection mail
2193 utils.send_mail(reject_mail_message)
2196 self.logger.log(["rejected", self.pkg.changes_file])
2200 ################################################################################
2201 def in_override_p(self, package, component, suite, binary_type, filename, session):
2203 Check if a package already has override entries in the DB
2205 @type package: string
2206 @param package: package name
2208 @type component: string
2209 @param component: database id of the component
2212 @param suite: database id of the suite
2214 @type binary_type: string
2215 @param binary_type: type of the package
2217 @type filename: string
2218 @param filename: filename we check
2220 @return: the database result. But noone cares anyway.
2226 if binary_type == "": # must be source
2229 file_type = binary_type
2231 # Override suite name; used for example with proposed-updates
2232 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2233 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2235 result = get_override(package, suite, component, file_type, session)
2237 # If checking for a source package fall back on the binary override type
2238 if file_type == "dsc" and len(result) < 1:
2239 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2241 # Remember the section and priority so we can check them later if appropriate
2244 self.pkg.files[filename]["override section"] = result.section.section
2245 self.pkg.files[filename]["override priority"] = result.priority.priority
2250 ################################################################################
2251 def get_anyversion(self, sv_list, suite):
2254 @param sv_list: list of (suite, version) tuples to check
2257 @param suite: suite name
2263 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2264 for (s, v) in sv_list:
2265 if s in [ x.lower() for x in anysuite ]:
2266 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2271 ################################################################################
2273 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2276 @param sv_list: list of (suite, version) tuples to check
2278 @type filename: string
2279 @param filename: XXX
2281 @type new_version: string
2282 @param new_version: XXX
2284 Ensure versions are newer than existing packages in target
2285 suites and that cross-suite version checking rules as
2286 set out in the conf file are satisfied.
2291 # Check versions for each target suite
2292 for target_suite in self.pkg.changes["distribution"].keys():
2293 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2294 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2296 # Enforce "must be newer than target suite" even if conffile omits it
2297 if target_suite not in must_be_newer_than:
2298 must_be_newer_than.append(target_suite)
2300 for (suite, existent_version) in sv_list:
2301 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2303 if suite in must_be_newer_than and sourceful and vercmp < 1:
2304 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2306 if suite in must_be_older_than and vercmp > -1:
2309 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2310 # we really use the other suite, ignoring the conflicting one ...
2311 addsuite = self.pkg.changes["distribution-version"][suite]
2313 add_version = self.get_anyversion(sv_list, addsuite)
2314 target_version = self.get_anyversion(sv_list, target_suite)
2317 # not add_version can only happen if we map to a suite
2318 # that doesn't enhance the suite we're propup'ing from.
2319 # so "propup-ver x a b c; map a d" is a problem only if
2320 # d doesn't enhance a.
2322 # i think we could always propagate in this case, rather
2323 # than complaining. either way, this isn't a REJECT issue
2325 # And - we really should complain to the dorks who configured dak
2326 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2327 self.pkg.changes.setdefault("propdistribution", {})
2328 self.pkg.changes["propdistribution"][addsuite] = 1
2330 elif not target_version:
2331 # not targets_version is true when the package is NEW
2332 # we could just stick with the "...old version..." REJECT
2333 # for this, I think.
2334 self.rejects.append("Won't propogate NEW packages.")
2335 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2336 # propogation would be redundant. no need to reject though.
2337 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2339 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2340 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2342 self.warnings.append("Propogating upload to %s" % (addsuite))
2343 self.pkg.changes.setdefault("propdistribution", {})
2344 self.pkg.changes["propdistribution"][addsuite] = 1
2348 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2350 ################################################################################
2351 def check_binary_against_db(self, filename, session):
2352 # Ensure version is sane
2353 q = session.query(BinAssociation)
2354 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2355 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2357 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2358 filename, self.pkg.files[filename]["version"], sourceful=False)
2360 # Check for any existing copies of the file
2361 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2362 q = q.filter_by(version=self.pkg.files[filename]["version"])
2363 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2366 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2368 ################################################################################
2370 def check_source_against_db(self, filename, session):
2371 source = self.pkg.dsc.get("source")
2372 version = self.pkg.dsc.get("version")
2374 # Ensure version is sane
2375 q = session.query(SrcAssociation)
2376 q = q.join(DBSource).filter(DBSource.source==source)
2378 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2379 filename, version, sourceful=True)
2381 ################################################################################
2382 def check_dsc_against_db(self, filename, session):
2385 @warning: NB: this function can remove entries from the 'files' index [if
2386 the orig tarball is a duplicate of the one in the archive]; if
2387 you're iterating over 'files' and call this function as part of
2388 the loop, be sure to add a check to the top of the loop to
2389 ensure you haven't just tried to dereference the deleted entry.
2394 self.pkg.orig_files = {} # XXX: do we need to clear it?
2395 orig_files = self.pkg.orig_files
2397 # Try and find all files mentioned in the .dsc. This has
2398 # to work harder to cope with the multiple possible
2399 # locations of an .orig.tar.gz.
2400 # The ordering on the select is needed to pick the newest orig
2401 # when it exists in multiple places.
2402 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2404 if self.pkg.files.has_key(dsc_name):
2405 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2406 actual_size = int(self.pkg.files[dsc_name]["size"])
2407 found = "%s in incoming" % (dsc_name)
2409 # Check the file does not already exist in the archive
2410 ql = get_poolfile_like_name(dsc_name, session)
2412 # Strip out anything that isn't '%s' or '/%s$'
2414 if not i.filename.endswith(dsc_name):
2417 # "[dak] has not broken them. [dak] has fixed a
2418 # brokenness. Your crappy hack exploited a bug in
2421 # "(Come on! I thought it was always obvious that
2422 # one just doesn't release different files with
2423 # the same name and version.)"
2424 # -- ajk@ on d-devel@l.d.o
2427 # Ignore exact matches for .orig.tar.gz
2429 if re_is_orig_source.match(dsc_name):
2431 if self.pkg.files.has_key(dsc_name) and \
2432 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2433 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2434 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2435 # TODO: Don't delete the entry, just mark it as not needed
2436 # This would fix the stupidity of changing something we often iterate over
2437 # whilst we're doing it
2438 del self.pkg.files[dsc_name]
2439 dsc_entry["files id"] = i.file_id
2440 if not orig_files.has_key(dsc_name):
2441 orig_files[dsc_name] = {}
2442 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2445 # Don't bitch that we couldn't find this file later
2447 self.later_check_files.remove(dsc_name)
2453 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2455 elif re_is_orig_source.match(dsc_name):
2457 ql = get_poolfile_like_name(dsc_name, session)
2459 # Strip out anything that isn't '%s' or '/%s$'
2460 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2462 if not i.filename.endswith(dsc_name):
2466 # Unfortunately, we may get more than one match here if,
2467 # for example, the package was in potato but had an -sa
2468 # upload in woody. So we need to choose the right one.
2470 # default to something sane in case we don't match any or have only one
2475 old_file = os.path.join(i.location.path, i.filename)
2476 old_file_fh = utils.open_file(old_file)
2477 actual_md5 = apt_pkg.md5sum(old_file_fh)
2479 actual_size = os.stat(old_file)[stat.ST_SIZE]
2480 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2483 old_file = os.path.join(i.location.path, i.filename)
2484 old_file_fh = utils.open_file(old_file)
2485 actual_md5 = apt_pkg.md5sum(old_file_fh)
2487 actual_size = os.stat(old_file)[stat.ST_SIZE]
2489 suite_type = x.location.archive_type
2490 # need this for updating dsc_files in install()
2491 dsc_entry["files id"] = x.file_id
2492 # See install() in process-accepted...
2493 if not orig_files.has_key(dsc_name):
2494 orig_files[dsc_name] = {}
2495 orig_files[dsc_name]["id"] = x.file_id
2496 orig_files[dsc_name]["path"] = old_file
2497 orig_files[dsc_name]["location"] = x.location.location_id
2499 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2500 # Not there? Check the queue directories...
2501 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2502 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2504 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2505 if os.path.exists(in_otherdir):
2506 in_otherdir_fh = utils.open_file(in_otherdir)
2507 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2508 in_otherdir_fh.close()
2509 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2511 if not orig_files.has_key(dsc_name):
2512 orig_files[dsc_name] = {}
2513 orig_files[dsc_name]["path"] = in_otherdir
2516 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2519 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2521 if actual_md5 != dsc_entry["md5sum"]:
2522 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2523 if actual_size != int(dsc_entry["size"]):
2524 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2526 ################################################################################
2527 # This is used by process-new and process-holding to recheck a changes file
2528 # at the time we're running. It mainly wraps various other internal functions
2529 # and is similar to accepted_checks - these should probably be tidied up
2531 def recheck(self, session):
2533 for f in self.pkg.files.keys():
2534 # The .orig.tar.gz can disappear out from under us is it's a
2535 # duplicate of one in the archive.
2536 if not self.pkg.files.has_key(f):
2539 entry = self.pkg.files[f]
2541 # Check that the source still exists
2542 if entry["type"] == "deb":
2543 source_version = entry["source version"]
2544 source_package = entry["source package"]
2545 if not self.pkg.changes["architecture"].has_key("source") \
2546 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2547 source_epochless_version = re_no_epoch.sub('', source_version)
2548 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2550 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2551 if cnf.has_key("Dir::Queue::%s" % (q)):
2552 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2555 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2557 # Version and file overwrite checks
2558 if entry["type"] == "deb":
2559 self.check_binary_against_db(f, session)
2560 elif entry["type"] == "dsc":
2561 self.check_source_against_db(f, session)
2562 self.check_dsc_against_db(f, session)
2564 ################################################################################
2565 def accepted_checks(self, overwrite_checks, session):
2566 # Recheck anything that relies on the database; since that's not
2567 # frozen between accept and our run time when called from p-a.
2569 # overwrite_checks is set to False when installing to stable/oldstable
2574 # Find the .dsc (again)
2576 for f in self.pkg.files.keys():
2577 if self.pkg.files[f]["type"] == "dsc":
2580 for checkfile in self.pkg.files.keys():
2581 # The .orig.tar.gz can disappear out from under us is it's a
2582 # duplicate of one in the archive.
2583 if not self.pkg.files.has_key(checkfile):
2586 entry = self.pkg.files[checkfile]
2588 # Check that the source still exists
2589 if entry["type"] == "deb":
2590 source_version = entry["source version"]
2591 source_package = entry["source package"]
2592 if not self.pkg.changes["architecture"].has_key("source") \
2593 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2594 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2596 # Version and file overwrite checks
2597 if overwrite_checks:
2598 if entry["type"] == "deb":
2599 self.check_binary_against_db(checkfile, session)
2600 elif entry["type"] == "dsc":
2601 self.check_source_against_db(checkfile, session)
2602 self.check_dsc_against_db(dsc_filename, session)
2604 # propogate in the case it is in the override tables:
2605 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2606 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2607 propogate[suite] = 1
2609 nopropogate[suite] = 1
2611 for suite in propogate.keys():
2612 if suite in nopropogate:
2614 self.pkg.changes["distribution"][suite] = 1
2616 for checkfile in self.pkg.files.keys():
2617 # Check the package is still in the override tables
2618 for suite in self.pkg.changes["distribution"].keys():
2619 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2620 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2622 ################################################################################
2623 # If any file of an upload has a recent mtime then chances are good
2624 # the file is still being uploaded.
2626 def upload_too_new(self):
2629 # Move back to the original directory to get accurate time stamps
2631 os.chdir(self.pkg.directory)
2632 file_list = self.pkg.files.keys()
2633 file_list.extend(self.pkg.dsc_files.keys())
2634 file_list.append(self.pkg.changes_file)
2637 last_modified = time.time()-os.path.getmtime(f)
2638 if last_modified < int(cnf["Dinstall::SkipTime"]):
2647 def store_changelog(self):
2649 # Skip binary-only upload if it is not a bin-NMU
2650 if not self.pkg.changes['architecture'].has_key('source'):
2651 from daklib.regexes import re_bin_only_nmu
2652 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2655 session = DBConn().session()
2657 # Check if upload already has a changelog entry
2658 query = """SELECT changelog_id FROM changes WHERE source = :source
2659 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2660 if session.execute(query, {'source': self.pkg.changes['source'], \
2661 'version': self.pkg.changes['version'], \
2662 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2666 # Add current changelog text into changelogs_text table, return created ID
2667 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2668 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2670 # Link ID to the upload available in changes table
2671 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2672 AND version = :version AND architecture = :architecture"""
2673 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2674 'version': self.pkg.changes['version'], \
2675 'architecture': " ".join(self.pkg.changes['architecture'].keys())})