5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
44 from dak_exceptions import *
47 from config import Config
48 from holding import Holding
50 from summarystats import SummaryStats
51 from utils import parse_changes, check_dsc_files
52 from textutils import fix_maintainer
53 from binary import Binary
55 ###############################################################################
57 def get_type(f, session):
59 Get the file type of C{f}
62 @param f: file entry from Changes object
64 @type session: SQLA Session
65 @param session: SQL Alchemy session object
72 if f.has_key("dbtype"):
73 file_type = f["dbtype"]
74 elif re_source_ext.match(f["type"]):
77 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
79 # Validate the override type
80 type_id = get_override_type(file_type, session)
82 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
86 ################################################################################
88 # Determine what parts in a .changes are NEW
90 def determine_new(changes, files, warn=1):
92 Determine what parts in a C{changes} file are NEW.
94 @type changes: Upload.Pkg.changes dict
95 @param changes: Changes dictionary
97 @type files: Upload.Pkg.files dict
98 @param files: Files dictionary
101 @param warn: Warn if overrides are added for (old)stable
104 @return: dictionary of NEW components.
109 session = DBConn().session()
111 # Build up a list of potentially new things
112 for name, f in files.items():
113 # Skip byhand elements
114 if f["type"] == "byhand":
117 priority = f["priority"]
118 section = f["section"]
119 file_type = get_type(f, session)
120 component = f["component"]
122 if file_type == "dsc":
125 if not new.has_key(pkg):
127 new[pkg]["priority"] = priority
128 new[pkg]["section"] = section
129 new[pkg]["type"] = file_type
130 new[pkg]["component"] = component
131 new[pkg]["files"] = []
133 old_type = new[pkg]["type"]
134 if old_type != file_type:
135 # source gets trumped by deb or udeb
136 if old_type == "dsc":
137 new[pkg]["priority"] = priority
138 new[pkg]["section"] = section
139 new[pkg]["type"] = file_type
140 new[pkg]["component"] = component
142 new[pkg]["files"].append(name)
144 if f.has_key("othercomponents"):
145 new[pkg]["othercomponents"] = f["othercomponents"]
147 for suite in changes["suite"].keys():
148 for pkg in new.keys():
149 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
151 for file_entry in new[pkg]["files"]:
152 if files[file_entry].has_key("new"):
153 del files[file_entry]["new"]
157 for s in ['stable', 'oldstable']:
158 if changes["suite"].has_key(s):
159 print "WARNING: overrides will be added for %s!" % s
160 for pkg in new.keys():
161 if new[pkg].has_key("othercomponents"):
162 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
168 ################################################################################
170 def check_valid(new):
172 Check if section and priority for NEW packages exist in database.
173 Additionally does sanity checks:
174 - debian-installer packages have to be udeb (or source)
175 - non debian-installer packages can not be udeb
176 - source priority can only be assigned to dsc file types
179 @param new: Dict of new packages with their section, priority and type.
182 for pkg in new.keys():
183 section_name = new[pkg]["section"]
184 priority_name = new[pkg]["priority"]
185 file_type = new[pkg]["type"]
187 section = get_section(section_name)
189 new[pkg]["section id"] = -1
191 new[pkg]["section id"] = section.section_id
193 priority = get_priority(priority_name)
195 new[pkg]["priority id"] = -1
197 new[pkg]["priority id"] = priority.priority_id
200 di = section_name.find("debian-installer") != -1
202 # If d-i, we must be udeb and vice-versa
203 if (di and file_type not in ("udeb", "dsc")) or \
204 (not di and file_type == "udeb"):
205 new[pkg]["section id"] = -1
207 # If dsc we need to be source and vice-versa
208 if (priority == "source" and file_type != "dsc") or \
209 (priority != "source" and file_type == "dsc"):
210 new[pkg]["priority id"] = -1
212 ###############################################################################
214 def check_status(files):
216 for f in files.keys():
217 if files[f]["type"] == "byhand":
219 elif files[f].has_key("new"):
223 ###############################################################################
225 # Used by Upload.check_timestamps
226 class TarTime(object):
227 def __init__(self, future_cutoff, past_cutoff):
229 self.future_cutoff = future_cutoff
230 self.past_cutoff = past_cutoff
233 self.future_files = {}
234 self.ancient_files = {}
236 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
237 if MTime > self.future_cutoff:
238 self.future_files[Name] = MTime
239 if MTime < self.past_cutoff:
240 self.ancient_files[Name] = MTime
242 ###############################################################################
244 class Upload(object):
246 Everything that has to do with an upload processed.
254 ###########################################################################
257 """ Reset a number of internal variables."""
259 # Initialize the substitution template map
262 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
263 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
264 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
265 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
273 def package_info(self):
275 Format various messages from this Upload to send to the maintainer.
279 ('Reject Reasons', self.rejects),
280 ('Warnings', self.warnings),
281 ('Notes', self.notes),
285 for title, messages in msgs:
287 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
291 ###########################################################################
292 def update_subst(self):
293 """ Set up the per-package template substitution mappings """
297 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
298 if not self.pkg.changes.has_key("architecture") or not \
299 isinstance(self.pkg.changes["architecture"], dict):
300 self.pkg.changes["architecture"] = { "Unknown" : "" }
302 # and maintainer2047 may not exist.
303 if not self.pkg.changes.has_key("maintainer2047"):
304 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
306 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
307 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
308 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
310 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
311 if self.pkg.changes["architecture"].has_key("source") and \
312 self.pkg.changes["changedby822"] != "" and \
313 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
315 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
316 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
317 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
319 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
320 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
321 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
323 if "sponsoremail" in self.pkg.changes:
324 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
326 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
327 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
329 # Apply any global override of the Maintainer field
330 if cnf.get("Dinstall::OverrideMaintainer"):
331 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
332 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
334 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
335 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
336 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
338 ###########################################################################
339 def load_changes(self, filename):
342 @rvalue: whether the changes file was valid or not. We may want to
343 reject even if this is True (see what gets put in self.rejects).
344 This is simply to prevent us even trying things later which will
345 fail because we couldn't properly parse the file.
348 self.pkg.changes_file = filename
350 # Parse the .changes field into a dictionary
352 self.pkg.changes.update(parse_changes(filename))
353 except CantOpenError:
354 self.rejects.append("%s: can't read file." % (filename))
356 except ParseChangesError, line:
357 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
359 except ChangesUnicodeError:
360 self.rejects.append("%s: changes file not proper utf-8" % (filename))
363 # Parse the Files field from the .changes into another dictionary
365 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
366 except ParseChangesError, line:
367 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
369 except UnknownFormatError, format:
370 self.rejects.append("%s: unknown format '%s'." % (filename, format))
373 # Check for mandatory fields
374 for i in ("distribution", "source", "binary", "architecture",
375 "version", "maintainer", "files", "changes", "description"):
376 if not self.pkg.changes.has_key(i):
377 # Avoid undefined errors later
378 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
381 # Strip a source version in brackets from the source field
382 if re_strip_srcver.search(self.pkg.changes["source"]):
383 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
385 # Ensure the source field is a valid package name.
386 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
387 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
389 # Split multi-value fields into a lower-level dictionary
390 for i in ("architecture", "distribution", "binary", "closes"):
391 o = self.pkg.changes.get(i, "")
393 del self.pkg.changes[i]
395 self.pkg.changes[i] = {}
398 self.pkg.changes[i][j] = 1
400 # Fix the Maintainer: field to be RFC822/2047 compatible
402 (self.pkg.changes["maintainer822"],
403 self.pkg.changes["maintainer2047"],
404 self.pkg.changes["maintainername"],
405 self.pkg.changes["maintaineremail"]) = \
406 fix_maintainer (self.pkg.changes["maintainer"])
407 except ParseMaintError, msg:
408 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
409 % (filename, self.pkg.changes["maintainer"], msg))
411 # ...likewise for the Changed-By: field if it exists.
413 (self.pkg.changes["changedby822"],
414 self.pkg.changes["changedby2047"],
415 self.pkg.changes["changedbyname"],
416 self.pkg.changes["changedbyemail"]) = \
417 fix_maintainer (self.pkg.changes.get("changed-by", ""))
418 except ParseMaintError, msg:
419 self.pkg.changes["changedby822"] = ""
420 self.pkg.changes["changedby2047"] = ""
421 self.pkg.changes["changedbyname"] = ""
422 self.pkg.changes["changedbyemail"] = ""
424 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
425 % (filename, changes["changed-by"], msg))
427 # Ensure all the values in Closes: are numbers
428 if self.pkg.changes.has_key("closes"):
429 for i in self.pkg.changes["closes"].keys():
430 if re_isanum.match (i) == None:
431 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
433 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
434 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
435 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
437 # Check there isn't already a changes file of the same name in one
438 # of the queue directories.
439 base_filename = os.path.basename(filename)
440 if get_knownchange(base_filename):
441 self.rejects.append("%s: a file with this name already exists." % (base_filename))
443 # Check the .changes is non-empty
444 if not self.pkg.files:
445 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
448 # Changes was syntactically valid even if we'll reject
451 ###########################################################################
453 def check_distributions(self):
454 "Check and map the Distribution field"
458 # Handle suite mappings
459 for m in Cnf.ValueList("SuiteMappings"):
462 if mtype == "map" or mtype == "silent-map":
463 (source, dest) = args[1:3]
464 if self.pkg.changes["distribution"].has_key(source):
465 del self.pkg.changes["distribution"][source]
466 self.pkg.changes["distribution"][dest] = 1
467 if mtype != "silent-map":
468 self.notes.append("Mapping %s to %s." % (source, dest))
469 if self.pkg.changes.has_key("distribution-version"):
470 if self.pkg.changes["distribution-version"].has_key(source):
471 self.pkg.changes["distribution-version"][source]=dest
472 elif mtype == "map-unreleased":
473 (source, dest) = args[1:3]
474 if self.pkg.changes["distribution"].has_key(source):
475 for arch in self.pkg.changes["architecture"].keys():
476 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
477 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
478 del self.pkg.changes["distribution"][source]
479 self.pkg.changes["distribution"][dest] = 1
481 elif mtype == "ignore":
483 if self.pkg.changes["distribution"].has_key(suite):
484 del self.pkg.changes["distribution"][suite]
485 self.warnings.append("Ignoring %s as a target suite." % (suite))
486 elif mtype == "reject":
488 if self.pkg.changes["distribution"].has_key(suite):
489 self.rejects.append("Uploads to %s are not accepted." % (suite))
490 elif mtype == "propup-version":
491 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
493 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
494 if self.pkg.changes["distribution"].has_key(args[1]):
495 self.pkg.changes.setdefault("distribution-version", {})
496 for suite in args[2:]:
497 self.pkg.changes["distribution-version"][suite] = suite
499 # Ensure there is (still) a target distribution
500 if len(self.pkg.changes["distribution"].keys()) < 1:
501 self.rejects.append("No valid distribution remaining.")
503 # Ensure target distributions exist
504 for suite in self.pkg.changes["distribution"].keys():
505 if not Cnf.has_key("Suite::%s" % (suite)):
506 self.rejects.append("Unknown distribution `%s'." % (suite))
508 ###########################################################################
510 def binary_file_checks(self, f, session):
512 entry = self.pkg.files[f]
514 # Extract package control information
515 deb_file = utils.open_file(f)
517 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
519 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
521 # Can't continue, none of the checks on control would work.
524 # Check for mandantory "Description:"
527 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
529 self.rejects.append("%s: Missing Description in binary package" % (f))
534 # Check for mandatory fields
535 for field in [ "Package", "Architecture", "Version" ]:
536 if control.Find(field) == None:
538 self.rejects.append("%s: No %s field in control." % (f, field))
541 # Ensure the package name matches the one give in the .changes
542 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
543 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
545 # Validate the package field
546 package = control.Find("Package")
547 if not re_valid_pkg_name.match(package):
548 self.rejects.append("%s: invalid package name '%s'." % (f, package))
550 # Validate the version field
551 version = control.Find("Version")
552 if not re_valid_version.match(version):
553 self.rejects.append("%s: invalid version number '%s'." % (f, version))
555 # Ensure the architecture of the .deb is one we know about.
556 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
557 architecture = control.Find("Architecture")
558 upload_suite = self.pkg.changes["distribution"].keys()[0]
560 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
561 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
562 self.rejects.append("Unknown architecture '%s'." % (architecture))
564 # Ensure the architecture of the .deb is one of the ones
565 # listed in the .changes.
566 if not self.pkg.changes["architecture"].has_key(architecture):
567 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
569 # Sanity-check the Depends field
570 depends = control.Find("Depends")
572 self.rejects.append("%s: Depends field is empty." % (f))
574 # Sanity-check the Provides field
575 provides = control.Find("Provides")
577 provide = re_spacestrip.sub('', provides)
579 self.rejects.append("%s: Provides field is empty." % (f))
580 prov_list = provide.split(",")
581 for prov in prov_list:
582 if not re_valid_pkg_name.match(prov):
583 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
585 # Check the section & priority match those given in the .changes (non-fatal)
586 if control.Find("Section") and entry["section"] != "" \
587 and entry["section"] != control.Find("Section"):
588 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
589 (f, control.Find("Section", ""), entry["section"]))
590 if control.Find("Priority") and entry["priority"] != "" \
591 and entry["priority"] != control.Find("Priority"):
592 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
593 (f, control.Find("Priority", ""), entry["priority"]))
595 entry["package"] = package
596 entry["architecture"] = architecture
597 entry["version"] = version
598 entry["maintainer"] = control.Find("Maintainer", "")
600 if f.endswith(".udeb"):
601 self.pkg.files[f]["dbtype"] = "udeb"
602 elif f.endswith(".deb"):
603 self.pkg.files[f]["dbtype"] = "deb"
605 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
607 entry["source"] = control.Find("Source", entry["package"])
609 # Get the source version
610 source = entry["source"]
613 if source.find("(") != -1:
614 m = re_extract_src_version.match(source)
616 source_version = m.group(2)
618 if not source_version:
619 source_version = self.pkg.files[f]["version"]
621 entry["source package"] = source
622 entry["source version"] = source_version
624 # Ensure the filename matches the contents of the .deb
625 m = re_isadeb.match(f)
628 file_package = m.group(1)
629 if entry["package"] != file_package:
630 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
631 (f, file_package, entry["dbtype"], entry["package"]))
632 epochless_version = re_no_epoch.sub('', control.Find("Version"))
635 file_version = m.group(2)
636 if epochless_version != file_version:
637 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
638 (f, file_version, entry["dbtype"], epochless_version))
641 file_architecture = m.group(3)
642 if entry["architecture"] != file_architecture:
643 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
644 (f, file_architecture, entry["dbtype"], entry["architecture"]))
646 # Check for existent source
647 source_version = entry["source version"]
648 source_package = entry["source package"]
649 if self.pkg.changes["architecture"].has_key("source"):
650 if source_version != self.pkg.changes["version"]:
651 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
652 (source_version, f, self.pkg.changes["version"]))
654 # Check in the SQL database
655 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
656 # Check in one of the other directories
657 source_epochless_version = re_no_epoch.sub('', source_version)
658 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
659 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
661 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
664 dsc_file_exists = False
665 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
666 if cnf.has_key("Dir::Queue::%s" % (myq)):
667 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
668 dsc_file_exists = True
671 if not dsc_file_exists:
672 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
674 # Check the version and for file overwrites
675 self.check_binary_against_db(f, session)
677 # Temporarily disable contents generation until we change the table storage layout
680 #if len(b.rejects) > 0:
681 # for j in b.rejects:
682 # self.rejects.append(j)
684 def source_file_checks(self, f, session):
685 entry = self.pkg.files[f]
687 m = re_issource.match(f)
691 entry["package"] = m.group(1)
692 entry["version"] = m.group(2)
693 entry["type"] = m.group(3)
695 # Ensure the source package name matches the Source filed in the .changes
696 if self.pkg.changes["source"] != entry["package"]:
697 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
699 # Ensure the source version matches the version in the .changes file
700 if re_is_orig_source.match(f):
701 changes_version = self.pkg.changes["chopversion2"]
703 changes_version = self.pkg.changes["chopversion"]
705 if changes_version != entry["version"]:
706 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
708 # Ensure the .changes lists source in the Architecture field
709 if not self.pkg.changes["architecture"].has_key("source"):
710 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
712 # Check the signature of a .dsc file
713 if entry["type"] == "dsc":
714 # check_signature returns either:
715 # (None, [list, of, rejects]) or (signature, [])
716 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
718 self.rejects.append(j)
720 entry["architecture"] = "source"
722 def per_suite_file_checks(self, f, suite, session):
724 entry = self.pkg.files[f]
725 archive = utils.where_am_i()
728 if entry.has_key("byhand"):
731 # Check we have fields we need to do these checks
733 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
734 if not entry.has_key(m):
735 self.rejects.append("file '%s' does not have field %s set" % (f, m))
741 # Handle component mappings
742 for m in cnf.ValueList("ComponentMappings"):
743 (source, dest) = m.split()
744 if entry["component"] == source:
745 entry["original component"] = source
746 entry["component"] = dest
748 # Ensure the component is valid for the target suite
749 if cnf.has_key("Suite:%s::Components" % (suite)) and \
750 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
751 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
754 # Validate the component
755 if not get_component(entry["component"], session):
756 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
759 # See if the package is NEW
760 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
763 # Validate the priority
764 if entry["priority"].find('/') != -1:
765 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
767 # Determine the location
768 location = cnf["Dir::Pool"]
769 l = get_location(location, entry["component"], archive, session)
771 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (entry["component"], archive))
772 entry["location id"] = -1
774 entry["location id"] = l.location_id
776 # Check the md5sum & size against existing files (if any)
777 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
779 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
780 entry["size"], entry["md5sum"], entry["location id"])
783 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
784 elif found is False and poolfile is not None:
785 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
788 entry["files id"] = None
790 entry["files id"] = poolfile.file_id
792 # Check for packages that have moved from one component to another
793 entry['suite'] = suite
794 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
796 entry["othercomponents"] = res.fetchone()[0]
798 def check_files(self, action=True):
799 archive = utils.where_am_i()
800 file_keys = self.pkg.files.keys()
804 # XXX: As far as I can tell, this can no longer happen - see
805 # comments by AJ in old revisions - mhy
806 # if reprocess is 2 we've already done this and we're checking
807 # things again for the new .orig.tar.gz.
808 # [Yes, I'm fully aware of how disgusting this is]
809 if action and self.reprocess < 2:
811 os.chdir(self.pkg.directory)
813 ret = holding.copy_to_holding(f)
815 # XXX: Should we bail out here or try and continue?
816 self.rejects.append(ret)
820 # Check there isn't already a .changes or .dak file of the same name in
821 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
822 # [NB: this check must be done post-suite mapping]
823 base_filename = os.path.basename(self.pkg.changes_file)
824 dot_dak_filename = base_filename[:-8] + ".dak"
826 for suite in self.pkg.changes["distribution"].keys():
827 copychanges = "Suite::%s::CopyChanges" % (suite)
828 if cnf.has_key(copychanges) and \
829 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
830 self.rejects.append("%s: a file with this name already exists in %s" \
831 % (base_filename, cnf[copychanges]))
833 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
834 if cnf.has_key(copy_dot_dak) and \
835 os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
836 self.rejects.append("%s: a file with this name already exists in %s" \
837 % (dot_dak_filename, Cnf[copy_dot_dak]))
843 session = DBConn().session()
845 for f, entry in self.pkg.files.items():
846 # Ensure the file does not already exist in one of the accepted directories
847 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
848 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
849 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
850 self.rejects.append("%s file already exists in the %s directory." % (f, d))
852 if not re_taint_free.match(f):
853 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
855 # Check the file is readable
856 if os.access(f, os.R_OK) == 0:
857 # When running in -n, copy_to_holding() won't have
858 # generated the reject_message, so we need to.
860 if os.path.exists(f):
861 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
863 self.rejects.append("Can't read `%s'. [file not found]" % (f))
864 entry["type"] = "unreadable"
867 # If it's byhand skip remaining checks
868 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
870 entry["type"] = "byhand"
872 # Checks for a binary package...
873 elif re_isadeb.match(f):
875 entry["type"] = "deb"
877 # This routine appends to self.rejects/warnings as appropriate
878 self.binary_file_checks(f, session)
880 # Checks for a source package...
881 elif re_issource.match(f):
884 # This routine appends to self.rejects/warnings as appropriate
885 self.source_file_checks(f, session)
887 # Not a binary or source package? Assume byhand...
890 entry["type"] = "byhand"
892 # Per-suite file checks
893 entry["oldfiles"] = {}
894 for suite in self.pkg.changes["distribution"].keys():
895 self.per_suite_file_checks(f, suite, session)
899 # If the .changes file says it has source, it must have source.
900 if self.pkg.changes["architecture"].has_key("source"):
902 self.rejects.append("no source found and Architecture line in changes mention source.")
904 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
905 self.rejects.append("source only uploads are not supported.")
907 ###########################################################################
908 def check_dsc(self, action=True, session=None):
909 """Returns bool indicating whether or not the source changes are valid"""
910 # Ensure there is source to check
911 if not self.pkg.changes["architecture"].has_key("source"):
916 for f, entry in self.pkg.files.items():
917 if entry["type"] == "dsc":
919 self.rejects.append("can not process a .changes file with multiple .dsc's.")
924 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
926 self.rejects.append("source uploads must contain a dsc file")
929 # Parse the .dsc file
931 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
932 except CantOpenError:
933 # if not -n copy_to_holding() will have done this for us...
935 self.rejects.append("%s: can't read file." % (dsc_filename))
936 except ParseChangesError, line:
937 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
938 except InvalidDscError, line:
939 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
940 except ChangesUnicodeError:
941 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
943 # Build up the file list of files mentioned by the .dsc
945 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
946 except NoFilesFieldError:
947 self.rejects.append("%s: no Files: field." % (dsc_filename))
949 except UnknownFormatError, format:
950 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
952 except ParseChangesError, line:
953 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
956 # Enforce mandatory fields
957 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
958 if not self.pkg.dsc.has_key(i):
959 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
962 # Validate the source and version fields
963 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
964 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
965 if not re_valid_version.match(self.pkg.dsc["version"]):
966 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
968 # Only a limited list of source formats are allowed in each suite
969 for dist in self.pkg.changes["distribution"].keys():
970 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
971 if self.pkg.dsc["format"] not in allowed:
972 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
974 # Validate the Maintainer field
976 # We ignore the return value
977 fix_maintainer(self.pkg.dsc["maintainer"])
978 except ParseMaintError, msg:
979 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
980 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
982 # Validate the build-depends field(s)
983 for field_name in [ "build-depends", "build-depends-indep" ]:
984 field = self.pkg.dsc.get(field_name)
986 # Have apt try to parse them...
988 apt_pkg.ParseSrcDepends(field)
990 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
992 # Ensure the version number in the .dsc matches the version number in the .changes
993 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
994 changes_version = self.pkg.files[dsc_filename]["version"]
996 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
997 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
999 # Ensure the Files field contain only what's expected
1000 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1002 # Ensure source is newer than existing source in target suites
1003 session = DBConn().session()
1004 self.check_source_against_db(dsc_filename, session)
1005 self.check_dsc_against_db(dsc_filename, session)
1010 ###########################################################################
1012 def get_changelog_versions(self, source_dir):
1013 """Extracts a the source package and (optionally) grabs the
1014 version history out of debian/changelog for the BTS."""
1018 # Find the .dsc (again)
1020 for f in self.pkg.files.keys():
1021 if self.pkg.files[f]["type"] == "dsc":
1024 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1025 if not dsc_filename:
1028 # Create a symlink mirror of the source files in our temporary directory
1029 for f in self.pkg.files.keys():
1030 m = re_issource.match(f)
1032 src = os.path.join(source_dir, f)
1033 # If a file is missing for whatever reason, give up.
1034 if not os.path.exists(src):
1037 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1038 self.pkg.orig_files[f].has_key("path"):
1040 dest = os.path.join(os.getcwd(), f)
1041 os.symlink(src, dest)
1043 # If the orig files are not a part of the upload, create symlinks to the
1045 for orig_file in self.pkg.orig_files.keys():
1046 if not self.pkg.orig_files[orig_file].has_key("path"):
1048 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1049 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1051 # Extract the source
1052 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1053 (result, output) = commands.getstatusoutput(cmd)
1055 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1056 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1059 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1062 # Get the upstream version
1063 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1064 if re_strip_revision.search(upstr_version):
1065 upstr_version = re_strip_revision.sub('', upstr_version)
1067 # Ensure the changelog file exists
1068 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1069 if not os.path.exists(changelog_filename):
1070 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1073 # Parse the changelog
1074 self.pkg.dsc["bts changelog"] = ""
1075 changelog_file = utils.open_file(changelog_filename)
1076 for line in changelog_file.readlines():
1077 m = re_changelog_versions.match(line)
1079 self.pkg.dsc["bts changelog"] += line
1080 changelog_file.close()
1082 # Check we found at least one revision in the changelog
1083 if not self.pkg.dsc["bts changelog"]:
1084 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1086 def check_source(self):
1087 # XXX: I'm fairly sure reprocess == 2 can never happen
1088 # AJT disabled the is_incoming check years ago - mhy
1089 # We should probably scrap or rethink the whole reprocess thing
1091 # a) there's no source
1092 # or b) reprocess is 2 - we will do this check next time when orig
1093 # tarball is in 'files'
1094 # or c) the orig files are MIA
1095 if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1096 or len(self.pkg.orig_files) == 0:
1099 tmpdir = utils.temp_dirname()
1101 # Move into the temporary directory
1105 # Get the changelog version history
1106 self.get_changelog_versions(cwd)
1108 # Move back and cleanup the temporary tree
1112 shutil.rmtree(tmpdir)
1114 if e.errno != errno.EACCES:
1116 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1118 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1119 # We probably have u-r or u-w directories so chmod everything
1121 cmd = "chmod -R u+rwx %s" % (tmpdir)
1122 result = os.system(cmd)
1124 utils.fubar("'%s' failed with result %s." % (cmd, result))
1125 shutil.rmtree(tmpdir)
1126 except Exception, e:
1127 print "foobar2 (%s)" % e
1128 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1130 ###########################################################################
1131 def ensure_hashes(self):
1132 # Make sure we recognise the format of the Files: field in the .changes
1133 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1134 if len(format) == 2:
1135 format = int(format[0]), int(format[1])
1137 format = int(float(format[0])), 0
1139 # We need to deal with the original changes blob, as the fields we need
1140 # might not be in the changes dict serialised into the .dak anymore.
1141 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1143 # Copy the checksums over to the current changes dict. This will keep
1144 # the existing modifications to it intact.
1145 for field in orig_changes:
1146 if field.startswith('checksums-'):
1147 self.pkg.changes[field] = orig_changes[field]
1149 # Check for unsupported hashes
1150 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1151 self.rejects.append(j)
1153 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1154 self.rejects.append(j)
1156 # We have to calculate the hash if we have an earlier changes version than
1157 # the hash appears in rather than require it exist in the changes file
1158 for hashname, hashfunc, version in utils.known_hashes:
1159 # TODO: Move _ensure_changes_hash into this class
1160 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1161 self.rejects.append(j)
1162 if "source" in self.pkg.changes["architecture"]:
1163 # TODO: Move _ensure_dsc_hash into this class
1164 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1165 self.rejects.append(j)
1167 def check_hashes(self):
1168 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1169 self.rejects.append(m)
1171 for m in utils.check_size(".changes", self.pkg.files):
1172 self.rejects.append(m)
1174 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1175 self.rejects.append(m)
1177 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1178 self.rejects.append(m)
1180 self.ensure_hashes()
1182 ###########################################################################
1184 def ensure_orig(self, target_dir='.', session=None):
1186 Ensures that all orig files mentioned in the changes file are present
1187 in target_dir. If they do not exist, they are symlinked into place.
1189 An list containing the symlinks that were created are returned (so they
1196 for filename, entry in self.pkg.dsc_files.iteritems():
1197 if not re_is_orig_source.match(filename):
1198 # File is not an orig; ignore
1201 if os.path.exists(filename):
1202 # File exists, no need to continue
1205 def symlink_if_valid(path):
1206 f = utils.open_file(path)
1207 md5sum = apt_pkg.md5sum(f)
1210 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1211 expected = (int(entry['size']), entry['md5sum'])
1213 if fingerprint != expected:
1216 dest = os.path.join(target_dir, filename)
1218 os.symlink(path, dest)
1219 symlinked.append(dest)
1225 session_ = DBConn().session()
1230 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1231 poolfile_path = os.path.join(
1232 poolfile.location.path, poolfile.filename
1235 if symlink_if_valid(poolfile_path):
1245 # Look in some other queues for the file
1246 queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1247 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1249 for queue in queues:
1250 if not cnf.get('Dir::Queue::%s' % queue):
1253 queuefile_path = os.path.join(
1254 cnf['Dir::Queue::%s' % queue], filename
1257 if not os.path.exists(queuefile_path):
1258 # Does not exist in this queue
1261 if symlink_if_valid(queuefile_path):
1266 ###########################################################################
1268 def check_lintian(self):
1271 # Don't reject binary uploads
1272 if not self.pkg.changes['architecture'].has_key('source'):
1275 # Only check some distributions
1277 for dist in ('unstable', 'experimental'):
1278 if dist in self.pkg.changes['distribution']:
1285 tagfile = cnf.get("Dinstall::LintianTags")
1287 # We don't have a tagfile, so just don't do anything.
1290 # Parse the yaml file
1291 sourcefile = file(tagfile, 'r')
1292 sourcecontent = sourcefile.read()
1295 lintiantags = yaml.load(sourcecontent)['lintian']
1296 except yaml.YAMLError, msg:
1297 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1300 # Try and find all orig mentioned in the .dsc
1301 symlinked = self.ensure_orig()
1303 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1304 # so put it together like it. We put all types of tags in one file and then sort
1305 # through lintians output later to see if its a fatal tag we detected, or not.
1306 # So we only run lintian once on all tags, even if we might reject on some, but not
1308 # Additionally build up a set of tags
1310 (fd, temp_filename) = utils.temp_filename()
1311 temptagfile = os.fdopen(fd, 'w')
1312 for tagtype in lintiantags:
1313 for tag in lintiantags[tagtype]:
1314 temptagfile.write("%s\n" % tag)
1318 # So now we should look at running lintian at the .changes file, capturing output
1320 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1321 (result, output) = commands.getstatusoutput(command)
1323 # We are done with lintian, remove our tempfile and any symlinks we created
1324 os.unlink(temp_filename)
1325 for symlink in symlinked:
1329 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1330 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1332 if len(output) == 0:
1337 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1339 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1340 # are having a victim for a reject.
1341 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1342 for line in output.split('\n'):
1343 m = re_parse_lintian.match(line)
1348 epackage = m.group(2)
1352 # So lets check if we know the tag at all.
1353 if etag not in tags:
1357 # We know it and it is overriden. Check that override is allowed.
1358 if etag in lintiantags['warning']:
1359 # The tag is overriden, and it is allowed to be overriden.
1360 # Don't add a reject message.
1362 elif etag in lintiantags['error']:
1363 # The tag is overriden - but is not allowed to be
1364 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1365 log("ftpmaster does not allow tag to be overridable", etag)
1367 # Tag is known, it is not overriden, direct reject.
1368 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1369 # Now tell if they *might* override it.
1370 if etag in lintiantags['warning']:
1371 log("auto rejecting", "overridable", etag)
1372 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1374 log("auto rejecting", "not overridable", etag)
1376 ###########################################################################
1377 def check_urgency(self):
1379 if self.pkg.changes["architecture"].has_key("source"):
1380 if not self.pkg.changes.has_key("urgency"):
1381 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1382 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1383 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1384 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1385 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1386 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1388 ###########################################################################
1390 # Sanity check the time stamps of files inside debs.
1391 # [Files in the near future cause ugly warnings and extreme time
1392 # travel can cause errors on extraction]
1394 def check_timestamps(self):
1397 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1398 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1399 tar = TarTime(future_cutoff, past_cutoff)
1401 for filename, entry in self.pkg.files.items():
1402 if entry["type"] == "deb":
1405 deb_file = utils.open_file(filename)
1406 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1409 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1410 except SystemError, e:
1411 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1412 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1415 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1419 future_files = tar.future_files.keys()
1421 num_future_files = len(future_files)
1422 future_file = future_files[0]
1423 future_date = tar.future_files[future_file]
1424 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1425 % (filename, num_future_files, future_file, time.ctime(future_date)))
1427 ancient_files = tar.ancient_files.keys()
1429 num_ancient_files = len(ancient_files)
1430 ancient_file = ancient_files[0]
1431 ancient_date = tar.ancient_files[ancient_file]
1432 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1433 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1435 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1437 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1438 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1440 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1446 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1447 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1448 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1449 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1450 self.pkg.changes["sponsoremail"] = uid_email
1455 ###########################################################################
1456 # check_signed_by_key checks
1457 ###########################################################################
1459 def check_signed_by_key(self):
1460 """Ensure the .changes is signed by an authorized uploader."""
1461 session = DBConn().session()
1463 # First of all we check that the person has proper upload permissions
1464 # and that this upload isn't blocked
1465 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1468 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1471 # TODO: Check that import-keyring adds UIDs properly
1473 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1476 # Check that the fingerprint which uploaded has permission to do so
1477 self.check_upload_permissions(fpr, session)
1479 # Check that this package is not in a transition
1480 self.check_transition(session)
1485 def check_upload_permissions(self, fpr, session):
1486 # Check any one-off upload blocks
1487 self.check_upload_blocks(fpr, session)
1489 # Start with DM as a special case
1490 # DM is a special case unfortunately, so we check it first
1491 # (keys with no source access get more access than DMs in one
1492 # way; DMs can only upload for their packages whether source
1493 # or binary, whereas keys with no access might be able to
1494 # upload some binaries)
1495 if fpr.source_acl.access_level == 'dm':
1496 self.check_dm_source_upload(fpr, session)
1498 # Check source-based permissions for other types
1499 if self.pkg.changes["architecture"].has_key("source"):
1500 if fpr.source_acl.access_level is None:
1501 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1502 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1503 self.rejects.append(rej)
1506 # If not a DM, we allow full upload rights
1507 uid_email = "%s@debian.org" % (fpr.uid.uid)
1508 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1511 # Check binary upload permissions
1512 # By this point we know that DMs can't have got here unless they
1513 # are allowed to deal with the package concerned so just apply
1515 if fpr.binary_acl.access_level == 'full':
1518 # Otherwise we're in the map case
1519 tmparches = self.pkg.changes["architecture"].copy()
1520 tmparches.pop('source', None)
1522 for bam in fpr.binary_acl_map:
1523 tmparches.pop(bam.architecture.arch_string, None)
1525 if len(tmparches.keys()) > 0:
1526 if fpr.binary_reject:
1527 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1528 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1529 self.rejects.append(rej)
1531 # TODO: This is where we'll implement reject vs throw away binaries later
1532 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1533 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1534 rej += "\nFingerprint: %s", (fpr.fingerprint)
1535 self.rejects.append(rej)
1538 def check_upload_blocks(self, fpr, session):
1539 """Check whether any upload blocks apply to this source, source
1540 version, uid / fpr combination"""
1542 def block_rej_template(fb):
1543 rej = 'Manual upload block in place for package %s' % fb.source
1544 if fb.version is not None:
1545 rej += ', version %s' % fb.version
1548 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1549 # version is None if the block applies to all versions
1550 if fb.version is None or fb.version == self.pkg.changes['version']:
1551 # Check both fpr and uid - either is enough to cause a reject
1552 if fb.fpr is not None:
1553 if fb.fpr.fingerprint == fpr.fingerprint:
1554 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1555 if fb.uid is not None:
1556 if fb.uid == fpr.uid:
1557 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1560 def check_dm_upload(self, fpr, session):
1561 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1562 ## none of the uploaded packages are NEW
1564 for f in self.pkg.files.keys():
1565 if self.pkg.files[f].has_key("byhand"):
1566 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1568 if self.pkg.files[f].has_key("new"):
1569 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1575 ## the most recent version of the package uploaded to unstable or
1576 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1577 ## section of its control file
1578 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1579 q = q.join(SrcAssociation)
1580 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1581 q = q.order_by(desc('source.version')).limit(1)
1586 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1587 self.rejects.append(rej)
1591 if not r.dm_upload_allowed:
1592 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1593 self.rejects.append(rej)
1596 ## the Maintainer: field of the uploaded .changes file corresponds with
1597 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1599 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1600 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1602 ## the most recent version of the package uploaded to unstable or
1603 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1604 ## non-developer maintainers cannot NMU or hijack packages)
1606 # srcuploaders includes the maintainer
1608 for sup in r.srcuploaders:
1609 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1610 # Eww - I hope we never have two people with the same name in Debian
1611 if email == fpr.uid.uid or name == fpr.uid.name:
1616 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1619 ## none of the packages are being taken over from other source packages
1620 for b in self.pkg.changes["binary"].keys():
1621 for suite in self.pkg.changes["distribution"].keys():
1622 q = session.query(DBSource)
1623 q = q.join(DBBinary).filter_by(package=b)
1624 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1627 if s.source != self.pkg.changes["source"]:
1628 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1632 def check_transition(self, session):
1635 sourcepkg = self.pkg.changes["source"]
1637 # No sourceful upload -> no need to do anything else, direct return
1638 # We also work with unstable uploads, not experimental or those going to some
1639 # proposed-updates queue
1640 if "source" not in self.pkg.changes["architecture"] or \
1641 "unstable" not in self.pkg.changes["distribution"]:
1644 # Also only check if there is a file defined (and existant) with
1646 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1647 if transpath == "" or not os.path.exists(transpath):
1650 # Parse the yaml file
1651 sourcefile = file(transpath, 'r')
1652 sourcecontent = sourcefile.read()
1654 transitions = yaml.load(sourcecontent)
1655 except yaml.YAMLError, msg:
1656 # This shouldn't happen, there is a wrapper to edit the file which
1657 # checks it, but we prefer to be safe than ending up rejecting
1659 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1662 # Now look through all defined transitions
1663 for trans in transitions:
1664 t = transitions[trans]
1665 source = t["source"]
1668 # Will be None if nothing is in testing.
1669 current = get_source_in_suite(source, "testing", session)
1670 if current is not None:
1671 compare = apt_pkg.VersionCompare(current.version, expected)
1673 if current is None or compare < 0:
1674 # This is still valid, the current version in testing is older than
1675 # the new version we wait for, or there is none in testing yet
1677 # Check if the source we look at is affected by this.
1678 if sourcepkg in t['packages']:
1679 # The source is affected, lets reject it.
1681 rejectmsg = "%s: part of the %s transition.\n\n" % (
1684 if current is not None:
1685 currentlymsg = "at version %s" % (current.version)
1687 currentlymsg = "not present in testing"
1689 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1691 rejectmsg += "\n".join(textwrap.wrap("""Your package
1692 is part of a testing transition designed to get %s migrated (it is
1693 currently %s, we need version %s). This transition is managed by the
1694 Release Team, and %s is the Release-Team member responsible for it.
1695 Please mail debian-release@lists.debian.org or contact %s directly if you
1696 need further assistance. You might want to upload to experimental until this
1697 transition is done."""
1698 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1700 self.rejects.append(rejectmsg)
1703 ###########################################################################
1704 # End check_signed_by_key checks
1705 ###########################################################################
1707 def build_summaries(self):
1708 """ Build a summary of changes the upload introduces. """
1710 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1712 short_summary = summary
1714 # This is for direport's benefit...
1715 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1718 summary += "Changes: " + f
1720 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1722 summary += self.announce(short_summary, 0)
1724 return (summary, short_summary)
1726 ###########################################################################
1728 def close_bugs(self, summary, action):
1730 Send mail to close bugs as instructed by the closes field in the changes file.
1731 Also add a line to summary if any work was done.
1733 @type summary: string
1734 @param summary: summary text, as given by L{build_summaries}
1737 @param action: Set to false no real action will be done.
1740 @return: summary. If action was taken, extended by the list of closed bugs.
1744 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1746 bugs = self.pkg.changes["closes"].keys()
1752 summary += "Closing bugs: "
1754 summary += "%s " % (bug)
1757 self.Subst["__BUG_NUMBER__"] = bug
1758 if self.pkg.changes["distribution"].has_key("stable"):
1759 self.Subst["__STABLE_WARNING__"] = """
1760 Note that this package is not part of the released stable Debian
1761 distribution. It may have dependencies on other unreleased software,
1762 or other instabilities. Please take care if you wish to install it.
1763 The update will eventually make its way into the next released Debian
1766 self.Subst["__STABLE_WARNING__"] = ""
1767 mail_message = utils.TemplateSubst(self.Subst, template)
1768 utils.send_mail(mail_message)
1770 # Clear up after ourselves
1771 del self.Subst["__BUG_NUMBER__"]
1772 del self.Subst["__STABLE_WARNING__"]
1774 if action and self.logger:
1775 self.logger.log(["closing bugs"] + bugs)
1781 ###########################################################################
1783 def announce(self, short_summary, action):
1785 Send an announce mail about a new upload.
1787 @type short_summary: string
1788 @param short_summary: Short summary text to include in the mail
1791 @param action: Set to false no real action will be done.
1794 @return: Textstring about action taken.
1799 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1801 # Only do announcements for source uploads with a recent dpkg-dev installed
1802 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1803 self.pkg.changes["architecture"].has_key("source"):
1809 self.Subst["__SHORT_SUMMARY__"] = short_summary
1811 for dist in self.pkg.changes["distribution"].keys():
1812 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1813 if announce_list == "" or lists_done.has_key(announce_list):
1816 lists_done[announce_list] = 1
1817 summary += "Announcing to %s\n" % (announce_list)
1821 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1822 if cnf.get("Dinstall::TrackingServer") and \
1823 self.pkg.changes["architecture"].has_key("source"):
1824 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1825 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1827 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1828 utils.send_mail(mail_message)
1830 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1832 if cnf.FindB("Dinstall::CloseBugs"):
1833 summary = self.close_bugs(summary, action)
1835 del self.Subst["__SHORT_SUMMARY__"]
1839 ###########################################################################
1841 def accept (self, summary, short_summary, targetdir=None):
1845 This moves all files referenced from the .changes into the I{accepted}
1846 queue, sends the accepted mail, announces to lists, closes bugs and
1847 also checks for override disparities. If enabled it will write out
1848 the version history for the BTS Version Tracking and will finally call
1851 @type summary: string
1852 @param summary: Summary text
1854 @type short_summary: string
1855 @param short_summary: Short summary
1860 stats = SummaryStats()
1862 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1864 if targetdir is None:
1865 targetdir = cnf["Dir::Queue::Accepted"]
1869 self.logger.log(["Accepting changes", self.pkg.changes_file])
1871 self.pkg.write_dot_dak(targetdir)
1873 # Move all the files into the accepted directory
1874 utils.move(self.pkg.changes_file, targetdir)
1876 for name, entry in sorted(self.pkg.files.items()):
1877 utils.move(name, targetdir)
1878 stats.accept_bytes += float(entry["size"])
1880 stats.accept_count += 1
1882 # Send accept mail, announce to lists, close bugs and check for
1883 # override disparities
1884 if not cnf["Dinstall::Options::No-Mail"]:
1886 self.Subst["__SUITE__"] = ""
1887 self.Subst["__SUMMARY__"] = summary
1888 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1889 utils.send_mail(mail_message)
1890 self.announce(short_summary, 1)
1892 ## Helper stuff for DebBugs Version Tracking
1893 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1894 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1895 # the conditionalization on dsc["bts changelog"] should be
1898 # Write out the version history from the changelog
1899 if self.pkg.changes["architecture"].has_key("source") and \
1900 self.pkg.dsc.has_key("bts changelog"):
1902 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1903 version_history = os.fdopen(fd, 'w')
1904 version_history.write(self.pkg.dsc["bts changelog"])
1905 version_history.close()
1906 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1907 self.pkg.changes_file[:-8]+".versions")
1908 os.rename(temp_filename, filename)
1909 os.chmod(filename, 0644)
1911 # Write out the binary -> source mapping.
1912 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1913 debinfo = os.fdopen(fd, 'w')
1914 for name, entry in sorted(self.pkg.files.items()):
1915 if entry["type"] == "deb":
1916 line = " ".join([entry["package"], entry["version"],
1917 entry["architecture"], entry["source package"],
1918 entry["source version"]])
1919 debinfo.write(line+"\n")
1921 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1922 self.pkg.changes_file[:-8]+".debinfo")
1923 os.rename(temp_filename, filename)
1924 os.chmod(filename, 0644)
1926 # This routine returns None on success or an error on failure
1927 # TODO: Replace queue copying using the new queue.add_file_from_pool routine
1928 # and by looking up which queues in suite.copy_queues
1929 #res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1934 def check_override(self):
1936 Checks override entries for validity. Mails "Override disparity" warnings,
1937 if that feature is enabled.
1939 Abandons the check if
1940 - override disparity checks are disabled
1941 - mail sending is disabled
1946 # Abandon the check if:
1947 # a) override disparity checks have been disabled
1948 # b) we're not sending mail
1949 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1950 cnf["Dinstall::Options::No-Mail"]:
1953 summary = self.pkg.check_override()
1958 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1961 self.Subst["__SUMMARY__"] = summary
1962 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1963 utils.send_mail(mail_message)
1964 del self.Subst["__SUMMARY__"]
1966 ###########################################################################
1968 def remove(self, from_dir=None):
1970 Used (for instance) in p-u to remove the package from unchecked
1972 if from_dir is None:
1973 os.chdir(self.pkg.directory)
1977 for f in self.pkg.files.keys():
1979 os.unlink(self.pkg.changes_file)
1981 ###########################################################################
1983 def move_to_dir (self, dest, perms=0660, changesperms=0664):
1985 Move files to dest with certain perms/changesperms
1987 utils.move(self.pkg.changes_file, dest, perms=changesperms)
1988 for f in self.pkg.files.keys():
1989 utils.move(f, dest, perms=perms)
1991 ###########################################################################
1993 def force_reject(self, reject_files):
1995 Forcefully move files from the current directory to the
1996 reject directory. If any file already exists in the reject
1997 directory it will be moved to the morgue to make way for
2001 @param files: file dictionary
2007 for file_entry in reject_files:
2008 # Skip any files which don't exist or which we don't have permission to copy.
2009 if os.access(file_entry, os.R_OK) == 0:
2012 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2015 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2017 # File exists? Let's try and move it to the morgue
2018 if e.errno == errno.EEXIST:
2019 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2021 morgue_file = utils.find_next_free(morgue_file)
2022 except NoFreeFilenameError:
2023 # Something's either gone badly Pete Tong, or
2024 # someone is trying to exploit us.
2025 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2027 utils.move(dest_file, morgue_file, perms=0660)
2029 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2032 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2036 # If we got here, we own the destination file, so we can
2037 # safely overwrite it.
2038 utils.move(file_entry, dest_file, 1, perms=0660)
2041 ###########################################################################
2042 def do_reject (self, manual=0, reject_message="", note=""):
2044 Reject an upload. If called without a reject message or C{manual} is
2045 true, spawn an editor so the user can write one.
2048 @param manual: manual or automated rejection
2050 @type reject_message: string
2051 @param reject_message: A reject message
2056 # If we weren't given a manual rejection message, spawn an
2057 # editor so the user can add one in...
2058 if manual and not reject_message:
2059 (fd, temp_filename) = utils.temp_filename()
2060 temp_file = os.fdopen(fd, 'w')
2063 temp_file.write(line)
2065 editor = os.environ.get("EDITOR","vi")
2067 while answer == 'E':
2068 os.system("%s %s" % (editor, temp_filename))
2069 temp_fh = utils.open_file(temp_filename)
2070 reject_message = "".join(temp_fh.readlines())
2072 print "Reject message:"
2073 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2074 prompt = "[R]eject, Edit, Abandon, Quit ?"
2076 while prompt.find(answer) == -1:
2077 answer = utils.our_raw_input(prompt)
2078 m = re_default_answer.search(prompt)
2081 answer = answer[:1].upper()
2082 os.unlink(temp_filename)
2088 print "Rejecting.\n"
2092 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2093 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2095 # Move all the files into the reject directory
2096 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2097 self.force_reject(reject_files)
2099 # If we fail here someone is probably trying to exploit the race
2100 # so let's just raise an exception ...
2101 if os.path.exists(reason_filename):
2102 os.unlink(reason_filename)
2103 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2105 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2109 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2110 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2111 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2112 os.write(reason_fd, reject_message)
2113 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2115 # Build up the rejection email
2116 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2117 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2118 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2119 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2120 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2121 # Write the rejection email out as the <foo>.reason file
2122 os.write(reason_fd, reject_mail_message)
2124 del self.Subst["__REJECTOR_ADDRESS__"]
2125 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2126 del self.Subst["__CC__"]
2130 # Send the rejection mail if appropriate
2131 if not cnf["Dinstall::Options::No-Mail"]:
2132 utils.send_mail(reject_mail_message)
2135 self.logger.log(["rejected", self.pkg.changes_file])
2139 ################################################################################
2140 def in_override_p(self, package, component, suite, binary_type, filename, session):
2142 Check if a package already has override entries in the DB
2144 @type package: string
2145 @param package: package name
2147 @type component: string
2148 @param component: database id of the component
2151 @param suite: database id of the suite
2153 @type binary_type: string
2154 @param binary_type: type of the package
2156 @type filename: string
2157 @param filename: filename we check
2159 @return: the database result. But noone cares anyway.
2165 if binary_type == "": # must be source
2168 file_type = binary_type
2170 # Override suite name; used for example with proposed-updates
2171 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2172 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2174 result = get_override(package, suite, component, file_type, session)
2176 # If checking for a source package fall back on the binary override type
2177 if file_type == "dsc" and len(result) < 1:
2178 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2180 # Remember the section and priority so we can check them later if appropriate
2183 self.pkg.files[filename]["override section"] = result.section.section
2184 self.pkg.files[filename]["override priority"] = result.priority.priority
2189 ################################################################################
2190 def get_anyversion(self, sv_list, suite):
2193 @param sv_list: list of (suite, version) tuples to check
2196 @param suite: suite name
2202 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2203 for (s, v) in sv_list:
2204 if s in [ x.lower() for x in anysuite ]:
2205 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2210 ################################################################################
2212 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2215 @param sv_list: list of (suite, version) tuples to check
2217 @type filename: string
2218 @param filename: XXX
2220 @type new_version: string
2221 @param new_version: XXX
2223 Ensure versions are newer than existing packages in target
2224 suites and that cross-suite version checking rules as
2225 set out in the conf file are satisfied.
2230 # Check versions for each target suite
2231 for target_suite in self.pkg.changes["distribution"].keys():
2232 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2233 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2235 # Enforce "must be newer than target suite" even if conffile omits it
2236 if target_suite not in must_be_newer_than:
2237 must_be_newer_than.append(target_suite)
2239 for (suite, existent_version) in sv_list:
2240 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2242 if suite in must_be_newer_than and sourceful and vercmp < 1:
2243 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2245 if suite in must_be_older_than and vercmp > -1:
2248 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2249 # we really use the other suite, ignoring the conflicting one ...
2250 addsuite = self.pkg.changes["distribution-version"][suite]
2252 add_version = self.get_anyversion(sv_list, addsuite)
2253 target_version = self.get_anyversion(sv_list, target_suite)
2256 # not add_version can only happen if we map to a suite
2257 # that doesn't enhance the suite we're propup'ing from.
2258 # so "propup-ver x a b c; map a d" is a problem only if
2259 # d doesn't enhance a.
2261 # i think we could always propagate in this case, rather
2262 # than complaining. either way, this isn't a REJECT issue
2264 # And - we really should complain to the dorks who configured dak
2265 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2266 self.pkg.changes.setdefault("propdistribution", {})
2267 self.pkg.changes["propdistribution"][addsuite] = 1
2269 elif not target_version:
2270 # not targets_version is true when the package is NEW
2271 # we could just stick with the "...old version..." REJECT
2272 # for this, I think.
2273 self.rejects.append("Won't propogate NEW packages.")
2274 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2275 # propogation would be redundant. no need to reject though.
2276 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2278 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2279 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2281 self.warnings.append("Propogating upload to %s" % (addsuite))
2282 self.pkg.changes.setdefault("propdistribution", {})
2283 self.pkg.changes["propdistribution"][addsuite] = 1
2287 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2289 ################################################################################
2290 def check_binary_against_db(self, filename, session):
2291 # Ensure version is sane
2292 q = session.query(BinAssociation)
2293 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2294 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2296 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2297 filename, self.pkg.files[filename]["version"], sourceful=False)
2299 # Check for any existing copies of the file
2300 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2301 q = q.filter_by(version=self.pkg.files[filename]["version"])
2302 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2305 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2307 ################################################################################
2309 def check_source_against_db(self, filename, session):
2312 source = self.pkg.dsc.get("source")
2313 version = self.pkg.dsc.get("version")
2315 # Ensure version is sane
2316 q = session.query(SrcAssociation)
2317 q = q.join(DBSource).filter(DBSource.source==source)
2319 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2320 filename, version, sourceful=True)
2322 ################################################################################
2323 def check_dsc_against_db(self, filename, session):
2326 @warning: NB: this function can remove entries from the 'files' index [if
2327 the orig tarball is a duplicate of the one in the archive]; if
2328 you're iterating over 'files' and call this function as part of
2329 the loop, be sure to add a check to the top of the loop to
2330 ensure you haven't just tried to dereference the deleted entry.
2335 self.pkg.orig_files = {} # XXX: do we need to clear it?
2336 orig_files = self.pkg.orig_files
2338 # Try and find all files mentioned in the .dsc. This has
2339 # to work harder to cope with the multiple possible
2340 # locations of an .orig.tar.gz.
2341 # The ordering on the select is needed to pick the newest orig
2342 # when it exists in multiple places.
2343 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2345 if self.pkg.files.has_key(dsc_name):
2346 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2347 actual_size = int(self.pkg.files[dsc_name]["size"])
2348 found = "%s in incoming" % (dsc_name)
2350 # Check the file does not already exist in the archive
2351 ql = get_poolfile_like_name(dsc_name, session)
2353 # Strip out anything that isn't '%s' or '/%s$'
2355 if not i.filename.endswith(dsc_name):
2358 # "[dak] has not broken them. [dak] has fixed a
2359 # brokenness. Your crappy hack exploited a bug in
2362 # "(Come on! I thought it was always obvious that
2363 # one just doesn't release different files with
2364 # the same name and version.)"
2365 # -- ajk@ on d-devel@l.d.o
2368 # Ignore exact matches for .orig.tar.gz
2370 if re_is_orig_source.match(dsc_name):
2372 if self.pkg.files.has_key(dsc_name) and \
2373 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2374 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2375 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2376 # TODO: Don't delete the entry, just mark it as not needed
2377 # This would fix the stupidity of changing something we often iterate over
2378 # whilst we're doing it
2379 del self.pkg.files[dsc_name]
2380 if not orig_files.has_key(dsc_name):
2381 orig_files[dsc_name] = {}
2382 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2386 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2388 elif re_is_orig_source.match(dsc_name):
2390 ql = get_poolfile_like_name(dsc_name, session)
2392 # Strip out anything that isn't '%s' or '/%s$'
2393 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2395 if not i.filename.endswith(dsc_name):
2399 # Unfortunately, we may get more than one match here if,
2400 # for example, the package was in potato but had an -sa
2401 # upload in woody. So we need to choose the right one.
2403 # default to something sane in case we don't match any or have only one
2408 old_file = os.path.join(i.location.path, i.filename)
2409 old_file_fh = utils.open_file(old_file)
2410 actual_md5 = apt_pkg.md5sum(old_file_fh)
2412 actual_size = os.stat(old_file)[stat.ST_SIZE]
2413 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2416 old_file = os.path.join(i.location.path, i.filename)
2417 old_file_fh = utils.open_file(old_file)
2418 actual_md5 = apt_pkg.md5sum(old_file_fh)
2420 actual_size = os.stat(old_file)[stat.ST_SIZE]
2422 suite_type = x.location.archive_type
2423 # need this for updating dsc_files in install()
2424 dsc_entry["files id"] = x.file_id
2425 # See install() in process-accepted...
2426 if not orig_files.has_key(dsc_name):
2427 orig_files[dsc_name] = {}
2428 orig_files[dsc_name]["id"] = x.file_id
2429 orig_files[dsc_name]["path"] = old_file
2430 orig_files[dsc_name]["location"] = x.location.location_id
2432 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2433 # Not there? Check the queue directories...
2434 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2435 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2437 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2438 if os.path.exists(in_otherdir):
2439 in_otherdir_fh = utils.open_file(in_otherdir)
2440 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2441 in_otherdir_fh.close()
2442 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2444 if not orig_files.has_key(dsc_name):
2445 orig_files[dsc_name] = {}
2446 orig_files[dsc_name]["path"] = in_otherdir
2449 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2452 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2454 if actual_md5 != dsc_entry["md5sum"]:
2455 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2456 if actual_size != int(dsc_entry["size"]):
2457 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2459 ################################################################################
2460 # This is used by process-new and process-holding to recheck a changes file
2461 # at the time we're running. It mainly wraps various other internal functions
2462 # and is similar to accepted_checks - these should probably be tidied up
2464 def recheck(self, session):
2466 for f in self.pkg.files.keys():
2467 # The .orig.tar.gz can disappear out from under us is it's a
2468 # duplicate of one in the archive.
2469 if not self.pkg.files.has_key(f):
2472 entry = self.pkg.files[f]
2474 # Check that the source still exists
2475 if entry["type"] == "deb":
2476 source_version = entry["source version"]
2477 source_package = entry["source package"]
2478 if not self.pkg.changes["architecture"].has_key("source") \
2479 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2480 source_epochless_version = re_no_epoch.sub('', source_version)
2481 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2483 for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
2484 if cnf.has_key("Dir::Queue::%s" % (q)):
2485 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2488 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2490 # Version and file overwrite checks
2491 if entry["type"] == "deb":
2492 self.check_binary_against_db(f, session)
2493 elif entry["type"] == "dsc":
2494 self.check_source_against_db(f, session)
2495 self.check_dsc_against_db(f, session)
2497 ################################################################################
2498 def accepted_checks(self, overwrite_checks, session):
2499 # Recheck anything that relies on the database; since that's not
2500 # frozen between accept and our run time when called from p-a.
2502 # overwrite_checks is set to False when installing to stable/oldstable
2507 # Find the .dsc (again)
2509 for f in self.pkg.files.keys():
2510 if self.pkg.files[f]["type"] == "dsc":
2513 for checkfile in self.pkg.files.keys():
2514 # The .orig.tar.gz can disappear out from under us is it's a
2515 # duplicate of one in the archive.
2516 if not self.pkg.files.has_key(checkfile):
2519 entry = self.pkg.files[checkfile]
2521 # Check that the source still exists
2522 if entry["type"] == "deb":
2523 source_version = entry["source version"]
2524 source_package = entry["source package"]
2525 if not self.pkg.changes["architecture"].has_key("source") \
2526 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2527 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2529 # Version and file overwrite checks
2530 if overwrite_checks:
2531 if entry["type"] == "deb":
2532 self.check_binary_against_db(checkfile, session)
2533 elif entry["type"] == "dsc":
2534 self.check_source_against_db(checkfile, session)
2535 self.check_dsc_against_db(dsc_filename, session)
2537 # propogate in the case it is in the override tables:
2538 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2539 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2540 propogate[suite] = 1
2542 nopropogate[suite] = 1
2544 for suite in propogate.keys():
2545 if suite in nopropogate:
2547 self.pkg.changes["distribution"][suite] = 1
2549 for checkfile in self.pkg.files.keys():
2550 # Check the package is still in the override tables
2551 for suite in self.pkg.changes["distribution"].keys():
2552 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2553 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2555 ################################################################################
2556 # This is not really a reject, but an unaccept, but since a) the code for
2557 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2558 # extremely rare, for now we'll go with whining at our admin folks...
2560 def do_unaccept(self):
2564 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2565 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2566 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2567 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2568 if cnf.has_key("Dinstall::Bcc"):
2569 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2571 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2573 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2575 # Write the rejection email out as the <foo>.reason file
2576 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2577 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2579 # If we fail here someone is probably trying to exploit the race
2580 # so let's just raise an exception ...
2581 if os.path.exists(reject_filename):
2582 os.unlink(reject_filename)
2584 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2585 os.write(fd, reject_mail_message)
2588 utils.send_mail(reject_mail_message)
2590 del self.Subst["__REJECTOR_ADDRESS__"]
2591 del self.Subst["__REJECT_MESSAGE__"]
2592 del self.Subst["__CC__"]
2594 ################################################################################
2595 # If any file of an upload has a recent mtime then chances are good
2596 # the file is still being uploaded.
2598 def upload_too_new(self):
2601 # Move back to the original directory to get accurate time stamps
2603 os.chdir(self.pkg.directory)
2604 file_list = self.pkg.files.keys()
2605 file_list.extend(self.pkg.dsc_files.keys())
2606 file_list.append(self.pkg.changes_file)
2609 last_modified = time.time()-os.path.getmtime(f)
2610 if last_modified < int(cnf["Dinstall::SkipTime"]):