5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
44 from dak_exceptions import *
47 from config import Config
48 from holding import Holding
50 from summarystats import SummaryStats
51 from utils import parse_changes, check_dsc_files
52 from textutils import fix_maintainer
53 from binary import Binary
55 ###############################################################################
57 def get_type(f, session):
59 Get the file type of C{f}
62 @param f: file entry from Changes object
64 @type session: SQLA Session
65 @param session: SQL Alchemy session object
72 if f.has_key("dbtype"):
73 file_type = f["dbtype"]
74 elif re_source_ext.match(f["type"]):
77 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
79 # Validate the override type
80 type_id = get_override_type(file_type, session)
82 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
86 ################################################################################
88 # Determine what parts in a .changes are NEW
90 def determine_new(changes, files, warn=1):
92 Determine what parts in a C{changes} file are NEW.
94 @type changes: Upload.Pkg.changes dict
95 @param changes: Changes dictionary
97 @type files: Upload.Pkg.files dict
98 @param files: Files dictionary
101 @param warn: Warn if overrides are added for (old)stable
104 @return: dictionary of NEW components.
109 session = DBConn().session()
111 # Build up a list of potentially new things
112 for name, f in files.items():
113 # Skip byhand elements
114 if f["type"] == "byhand":
117 priority = f["priority"]
118 section = f["section"]
119 file_type = get_type(f, session)
120 component = f["component"]
122 if file_type == "dsc":
125 if not new.has_key(pkg):
127 new[pkg]["priority"] = priority
128 new[pkg]["section"] = section
129 new[pkg]["type"] = file_type
130 new[pkg]["component"] = component
131 new[pkg]["files"] = []
133 old_type = new[pkg]["type"]
134 if old_type != file_type:
135 # source gets trumped by deb or udeb
136 if old_type == "dsc":
137 new[pkg]["priority"] = priority
138 new[pkg]["section"] = section
139 new[pkg]["type"] = file_type
140 new[pkg]["component"] = component
142 new[pkg]["files"].append(name)
144 if f.has_key("othercomponents"):
145 new[pkg]["othercomponents"] = f["othercomponents"]
147 for suite in changes["suite"].keys():
148 for pkg in new.keys():
149 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
151 for file_entry in new[pkg]["files"]:
152 if files[file_entry].has_key("new"):
153 del files[file_entry]["new"]
157 for s in ['stable', 'oldstable']:
158 if changes["suite"].has_key(s):
159 print "WARNING: overrides will be added for %s!" % s
160 for pkg in new.keys():
161 if new[pkg].has_key("othercomponents"):
162 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
168 ################################################################################
170 def check_valid(new):
172 Check if section and priority for NEW packages exist in database.
173 Additionally does sanity checks:
174 - debian-installer packages have to be udeb (or source)
175 - non debian-installer packages can not be udeb
176 - source priority can only be assigned to dsc file types
179 @param new: Dict of new packages with their section, priority and type.
182 for pkg in new.keys():
183 section_name = new[pkg]["section"]
184 priority_name = new[pkg]["priority"]
185 file_type = new[pkg]["type"]
187 section = get_section(section_name)
189 new[pkg]["section id"] = -1
191 new[pkg]["section id"] = section.section_id
193 priority = get_priority(priority_name)
195 new[pkg]["priority id"] = -1
197 new[pkg]["priority id"] = priority.priority_id
200 di = section_name.find("debian-installer") != -1
202 # If d-i, we must be udeb and vice-versa
203 if (di and file_type not in ("udeb", "dsc")) or \
204 (not di and file_type == "udeb"):
205 new[pkg]["section id"] = -1
207 # If dsc we need to be source and vice-versa
208 if (priority == "source" and file_type != "dsc") or \
209 (priority != "source" and file_type == "dsc"):
210 new[pkg]["priority id"] = -1
212 ###############################################################################
214 def check_status(files):
216 for f in files.keys():
217 if files[f]["type"] == "byhand":
219 elif files[f].has_key("new"):
223 ###############################################################################
225 # Used by Upload.check_timestamps
226 class TarTime(object):
227 def __init__(self, future_cutoff, past_cutoff):
229 self.future_cutoff = future_cutoff
230 self.past_cutoff = past_cutoff
233 self.future_files = {}
234 self.ancient_files = {}
236 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
237 if MTime > self.future_cutoff:
238 self.future_files[Name] = MTime
239 if MTime < self.past_cutoff:
240 self.ancient_files[Name] = MTime
242 ###############################################################################
244 class Upload(object):
246 Everything that has to do with an upload processed.
254 ###########################################################################
257 """ Reset a number of internal variables."""
259 # Initialize the substitution template map
262 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
263 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
264 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
265 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
273 def package_info(self):
275 Format various messages from this Upload to send to the maintainer.
279 ('Reject Reasons', self.rejects),
280 ('Warnings', self.warnings),
281 ('Notes', self.notes),
285 for title, messages in msgs:
287 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
291 ###########################################################################
292 def update_subst(self):
293 """ Set up the per-package template substitution mappings """
297 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
298 if not self.pkg.changes.has_key("architecture") or not \
299 isinstance(self.pkg.changes["architecture"], dict):
300 self.pkg.changes["architecture"] = { "Unknown" : "" }
302 # and maintainer2047 may not exist.
303 if not self.pkg.changes.has_key("maintainer2047"):
304 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
306 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
307 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
308 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
310 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
311 if self.pkg.changes["architecture"].has_key("source") and \
312 self.pkg.changes["changedby822"] != "" and \
313 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
315 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
316 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
317 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
319 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
320 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
321 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
323 if "sponsoremail" in self.pkg.changes:
324 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
326 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
327 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
329 # Apply any global override of the Maintainer field
330 if cnf.get("Dinstall::OverrideMaintainer"):
331 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
332 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
334 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
335 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
336 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
338 ###########################################################################
339 def load_changes(self, filename):
342 @rvalue: whether the changes file was valid or not. We may want to
343 reject even if this is True (see what gets put in self.rejects).
344 This is simply to prevent us even trying things later which will
345 fail because we couldn't properly parse the file.
348 self.pkg.changes_file = filename
350 # Parse the .changes field into a dictionary
352 self.pkg.changes.update(parse_changes(filename))
353 except CantOpenError:
354 self.rejects.append("%s: can't read file." % (filename))
356 except ParseChangesError, line:
357 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
359 except ChangesUnicodeError:
360 self.rejects.append("%s: changes file not proper utf-8" % (filename))
363 # Parse the Files field from the .changes into another dictionary
365 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
366 except ParseChangesError, line:
367 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
369 except UnknownFormatError, format:
370 self.rejects.append("%s: unknown format '%s'." % (filename, format))
373 # Check for mandatory fields
374 for i in ("distribution", "source", "binary", "architecture",
375 "version", "maintainer", "files", "changes", "description"):
376 if not self.pkg.changes.has_key(i):
377 # Avoid undefined errors later
378 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
381 # Strip a source version in brackets from the source field
382 if re_strip_srcver.search(self.pkg.changes["source"]):
383 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
385 # Ensure the source field is a valid package name.
386 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
387 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
389 # Split multi-value fields into a lower-level dictionary
390 for i in ("architecture", "distribution", "binary", "closes"):
391 o = self.pkg.changes.get(i, "")
393 del self.pkg.changes[i]
395 self.pkg.changes[i] = {}
398 self.pkg.changes[i][j] = 1
400 # Fix the Maintainer: field to be RFC822/2047 compatible
402 (self.pkg.changes["maintainer822"],
403 self.pkg.changes["maintainer2047"],
404 self.pkg.changes["maintainername"],
405 self.pkg.changes["maintaineremail"]) = \
406 fix_maintainer (self.pkg.changes["maintainer"])
407 except ParseMaintError, msg:
408 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
409 % (filename, self.pkg.changes["maintainer"], msg))
411 # ...likewise for the Changed-By: field if it exists.
413 (self.pkg.changes["changedby822"],
414 self.pkg.changes["changedby2047"],
415 self.pkg.changes["changedbyname"],
416 self.pkg.changes["changedbyemail"]) = \
417 fix_maintainer (self.pkg.changes.get("changed-by", ""))
418 except ParseMaintError, msg:
419 self.pkg.changes["changedby822"] = ""
420 self.pkg.changes["changedby2047"] = ""
421 self.pkg.changes["changedbyname"] = ""
422 self.pkg.changes["changedbyemail"] = ""
424 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
425 % (filename, changes["changed-by"], msg))
427 # Ensure all the values in Closes: are numbers
428 if self.pkg.changes.has_key("closes"):
429 for i in self.pkg.changes["closes"].keys():
430 if re_isanum.match (i) == None:
431 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
433 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
434 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
435 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
437 # Check there isn't already a changes file of the same name in one
438 # of the queue directories.
439 base_filename = os.path.basename(filename)
440 if get_knownchange(base_filename):
441 self.rejects.append("%s: a file with this name already exists." % (base_filename))
443 # Check the .changes is non-empty
444 if not self.pkg.files:
445 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
448 # Changes was syntactically valid even if we'll reject
451 ###########################################################################
453 def check_distributions(self):
454 "Check and map the Distribution field"
458 # Handle suite mappings
459 for m in Cnf.ValueList("SuiteMappings"):
462 if mtype == "map" or mtype == "silent-map":
463 (source, dest) = args[1:3]
464 if self.pkg.changes["distribution"].has_key(source):
465 del self.pkg.changes["distribution"][source]
466 self.pkg.changes["distribution"][dest] = 1
467 if mtype != "silent-map":
468 self.notes.append("Mapping %s to %s." % (source, dest))
469 if self.pkg.changes.has_key("distribution-version"):
470 if self.pkg.changes["distribution-version"].has_key(source):
471 self.pkg.changes["distribution-version"][source]=dest
472 elif mtype == "map-unreleased":
473 (source, dest) = args[1:3]
474 if self.pkg.changes["distribution"].has_key(source):
475 for arch in self.pkg.changes["architecture"].keys():
476 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
477 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
478 del self.pkg.changes["distribution"][source]
479 self.pkg.changes["distribution"][dest] = 1
481 elif mtype == "ignore":
483 if self.pkg.changes["distribution"].has_key(suite):
484 del self.pkg.changes["distribution"][suite]
485 self.warnings.append("Ignoring %s as a target suite." % (suite))
486 elif mtype == "reject":
488 if self.pkg.changes["distribution"].has_key(suite):
489 self.rejects.append("Uploads to %s are not accepted." % (suite))
490 elif mtype == "propup-version":
491 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
493 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
494 if self.pkg.changes["distribution"].has_key(args[1]):
495 self.pkg.changes.setdefault("distribution-version", {})
496 for suite in args[2:]:
497 self.pkg.changes["distribution-version"][suite] = suite
499 # Ensure there is (still) a target distribution
500 if len(self.pkg.changes["distribution"].keys()) < 1:
501 self.rejects.append("No valid distribution remaining.")
503 # Ensure target distributions exist
504 for suite in self.pkg.changes["distribution"].keys():
505 if not Cnf.has_key("Suite::%s" % (suite)):
506 self.rejects.append("Unknown distribution `%s'." % (suite))
508 ###########################################################################
510 def binary_file_checks(self, f, session):
512 entry = self.pkg.files[f]
514 # Extract package control information
515 deb_file = utils.open_file(f)
517 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
519 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
521 # Can't continue, none of the checks on control would work.
524 # Check for mandantory "Description:"
527 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
529 self.rejects.append("%s: Missing Description in binary package" % (f))
534 # Check for mandatory fields
535 for field in [ "Package", "Architecture", "Version" ]:
536 if control.Find(field) == None:
538 self.rejects.append("%s: No %s field in control." % (f, field))
541 # Ensure the package name matches the one give in the .changes
542 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
543 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
545 # Validate the package field
546 package = control.Find("Package")
547 if not re_valid_pkg_name.match(package):
548 self.rejects.append("%s: invalid package name '%s'." % (f, package))
550 # Validate the version field
551 version = control.Find("Version")
552 if not re_valid_version.match(version):
553 self.rejects.append("%s: invalid version number '%s'." % (f, version))
555 # Ensure the architecture of the .deb is one we know about.
556 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
557 architecture = control.Find("Architecture")
558 upload_suite = self.pkg.changes["distribution"].keys()[0]
560 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
561 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
562 self.rejects.append("Unknown architecture '%s'." % (architecture))
564 # Ensure the architecture of the .deb is one of the ones
565 # listed in the .changes.
566 if not self.pkg.changes["architecture"].has_key(architecture):
567 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
569 # Sanity-check the Depends field
570 depends = control.Find("Depends")
572 self.rejects.append("%s: Depends field is empty." % (f))
574 # Sanity-check the Provides field
575 provides = control.Find("Provides")
577 provide = re_spacestrip.sub('', provides)
579 self.rejects.append("%s: Provides field is empty." % (f))
580 prov_list = provide.split(",")
581 for prov in prov_list:
582 if not re_valid_pkg_name.match(prov):
583 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
585 # Check the section & priority match those given in the .changes (non-fatal)
586 if control.Find("Section") and entry["section"] != "" \
587 and entry["section"] != control.Find("Section"):
588 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
589 (f, control.Find("Section", ""), entry["section"]))
590 if control.Find("Priority") and entry["priority"] != "" \
591 and entry["priority"] != control.Find("Priority"):
592 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
593 (f, control.Find("Priority", ""), entry["priority"]))
595 entry["package"] = package
596 entry["architecture"] = architecture
597 entry["version"] = version
598 entry["maintainer"] = control.Find("Maintainer", "")
600 if f.endswith(".udeb"):
601 self.pkg.files[f]["dbtype"] = "udeb"
602 elif f.endswith(".deb"):
603 self.pkg.files[f]["dbtype"] = "deb"
605 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
607 entry["source"] = control.Find("Source", entry["package"])
609 # Get the source version
610 source = entry["source"]
613 if source.find("(") != -1:
614 m = re_extract_src_version.match(source)
616 source_version = m.group(2)
618 if not source_version:
619 source_version = self.pkg.files[f]["version"]
621 entry["source package"] = source
622 entry["source version"] = source_version
624 # Ensure the filename matches the contents of the .deb
625 m = re_isadeb.match(f)
628 file_package = m.group(1)
629 if entry["package"] != file_package:
630 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
631 (f, file_package, entry["dbtype"], entry["package"]))
632 epochless_version = re_no_epoch.sub('', control.Find("Version"))
635 file_version = m.group(2)
636 if epochless_version != file_version:
637 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
638 (f, file_version, entry["dbtype"], epochless_version))
641 file_architecture = m.group(3)
642 if entry["architecture"] != file_architecture:
643 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
644 (f, file_architecture, entry["dbtype"], entry["architecture"]))
646 # Check for existent source
647 source_version = entry["source version"]
648 source_package = entry["source package"]
649 if self.pkg.changes["architecture"].has_key("source"):
650 if source_version != self.pkg.changes["version"]:
651 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
652 (source_version, f, self.pkg.changes["version"]))
654 # Check in the SQL database
655 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
656 # Check in one of the other directories
657 source_epochless_version = re_no_epoch.sub('', source_version)
658 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
659 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
661 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
664 dsc_file_exists = False
665 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
666 if cnf.has_key("Dir::Queue::%s" % (myq)):
667 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
668 dsc_file_exists = True
671 if not dsc_file_exists:
672 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
674 # Check the version and for file overwrites
675 self.check_binary_against_db(f, session)
677 # Temporarily disable contents generation until we change the table storage layout
680 #if len(b.rejects) > 0:
681 # for j in b.rejects:
682 # self.rejects.append(j)
684 def source_file_checks(self, f, session):
685 entry = self.pkg.files[f]
687 m = re_issource.match(f)
691 entry["package"] = m.group(1)
692 entry["version"] = m.group(2)
693 entry["type"] = m.group(3)
695 # Ensure the source package name matches the Source filed in the .changes
696 if self.pkg.changes["source"] != entry["package"]:
697 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
699 # Ensure the source version matches the version in the .changes file
700 if re_is_orig_source.match(f):
701 changes_version = self.pkg.changes["chopversion2"]
703 changes_version = self.pkg.changes["chopversion"]
705 if changes_version != entry["version"]:
706 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
708 # Ensure the .changes lists source in the Architecture field
709 if not self.pkg.changes["architecture"].has_key("source"):
710 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
712 # Check the signature of a .dsc file
713 if entry["type"] == "dsc":
714 # check_signature returns either:
715 # (None, [list, of, rejects]) or (signature, [])
716 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
718 self.rejects.append(j)
720 entry["architecture"] = "source"
722 def per_suite_file_checks(self, f, suite, session):
724 entry = self.pkg.files[f]
725 archive = utils.where_am_i()
728 if entry.has_key("byhand"):
731 # Check we have fields we need to do these checks
733 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
734 if not entry.has_key(m):
735 self.rejects.append("file '%s' does not have field %s set" % (f, m))
741 # Handle component mappings
742 for m in cnf.ValueList("ComponentMappings"):
743 (source, dest) = m.split()
744 if entry["component"] == source:
745 entry["original component"] = source
746 entry["component"] = dest
748 # Ensure the component is valid for the target suite
749 if cnf.has_key("Suite:%s::Components" % (suite)) and \
750 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
751 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
754 # Validate the component
755 if not get_component(entry["component"], session):
756 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
759 # See if the package is NEW
760 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
763 # Validate the priority
764 if entry["priority"].find('/') != -1:
765 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
767 # Determine the location
768 location = cnf["Dir::Pool"]
769 l = get_location(location, entry["component"], archive, session)
771 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (entry["component"], archive))
772 entry["location id"] = -1
774 entry["location id"] = l.location_id
776 # Check the md5sum & size against existing files (if any)
777 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
779 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
780 entry["size"], entry["md5sum"], entry["location id"])
783 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
784 elif found is False and poolfile is not None:
785 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
788 entry["files id"] = None
790 entry["files id"] = poolfile.file_id
792 # Check for packages that have moved from one component to another
793 entry['suite'] = suite
794 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
796 entry["othercomponents"] = res.fetchone()[0]
798 def check_files(self, action=True):
799 file_keys = self.pkg.files.keys()
805 os.chdir(self.pkg.directory)
807 ret = holding.copy_to_holding(f)
809 # XXX: Should we bail out here or try and continue?
810 self.rejects.append(ret)
814 # Check there isn't already a .changes file of the same name in
815 # the proposed-updates "CopyChanges" storage directories.
816 # [NB: this check must be done post-suite mapping]
817 base_filename = os.path.basename(self.pkg.changes_file)
819 for suite in self.pkg.changes["distribution"].keys():
820 copychanges = "Suite::%s::CopyChanges" % (suite)
821 if cnf.has_key(copychanges) and \
822 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
823 self.rejects.append("%s: a file with this name already exists in %s" \
824 % (base_filename, cnf[copychanges]))
829 session = DBConn().session()
831 for f, entry in self.pkg.files.items():
832 # Ensure the file does not already exist in one of the accepted directories
833 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
834 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
835 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
836 self.rejects.append("%s file already exists in the %s directory." % (f, d))
838 if not re_taint_free.match(f):
839 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
841 # Check the file is readable
842 if os.access(f, os.R_OK) == 0:
843 # When running in -n, copy_to_holding() won't have
844 # generated the reject_message, so we need to.
846 if os.path.exists(f):
847 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
849 self.rejects.append("Can't read `%s'. [file not found]" % (f))
850 entry["type"] = "unreadable"
853 # If it's byhand skip remaining checks
854 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
856 entry["type"] = "byhand"
858 # Checks for a binary package...
859 elif re_isadeb.match(f):
861 entry["type"] = "deb"
863 # This routine appends to self.rejects/warnings as appropriate
864 self.binary_file_checks(f, session)
866 # Checks for a source package...
867 elif re_issource.match(f):
870 # This routine appends to self.rejects/warnings as appropriate
871 self.source_file_checks(f, session)
873 # Not a binary or source package? Assume byhand...
876 entry["type"] = "byhand"
878 # Per-suite file checks
879 entry["oldfiles"] = {}
880 for suite in self.pkg.changes["distribution"].keys():
881 self.per_suite_file_checks(f, suite, session)
885 # If the .changes file says it has source, it must have source.
886 if self.pkg.changes["architecture"].has_key("source"):
888 self.rejects.append("no source found and Architecture line in changes mention source.")
890 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
891 self.rejects.append("source only uploads are not supported.")
893 ###########################################################################
894 def check_dsc(self, action=True, session=None):
895 """Returns bool indicating whether or not the source changes are valid"""
896 # Ensure there is source to check
897 if not self.pkg.changes["architecture"].has_key("source"):
902 for f, entry in self.pkg.files.items():
903 if entry["type"] == "dsc":
905 self.rejects.append("can not process a .changes file with multiple .dsc's.")
910 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
912 self.rejects.append("source uploads must contain a dsc file")
915 # Parse the .dsc file
917 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
918 except CantOpenError:
919 # if not -n copy_to_holding() will have done this for us...
921 self.rejects.append("%s: can't read file." % (dsc_filename))
922 except ParseChangesError, line:
923 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
924 except InvalidDscError, line:
925 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
926 except ChangesUnicodeError:
927 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
929 # Build up the file list of files mentioned by the .dsc
931 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
932 except NoFilesFieldError:
933 self.rejects.append("%s: no Files: field." % (dsc_filename))
935 except UnknownFormatError, format:
936 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
938 except ParseChangesError, line:
939 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
942 # Enforce mandatory fields
943 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
944 if not self.pkg.dsc.has_key(i):
945 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
948 # Validate the source and version fields
949 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
950 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
951 if not re_valid_version.match(self.pkg.dsc["version"]):
952 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
954 # Only a limited list of source formats are allowed in each suite
955 for dist in self.pkg.changes["distribution"].keys():
956 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
957 if self.pkg.dsc["format"] not in allowed:
958 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
960 # Validate the Maintainer field
962 # We ignore the return value
963 fix_maintainer(self.pkg.dsc["maintainer"])
964 except ParseMaintError, msg:
965 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
966 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
968 # Validate the build-depends field(s)
969 for field_name in [ "build-depends", "build-depends-indep" ]:
970 field = self.pkg.dsc.get(field_name)
972 # Have apt try to parse them...
974 apt_pkg.ParseSrcDepends(field)
976 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
978 # Ensure the version number in the .dsc matches the version number in the .changes
979 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
980 changes_version = self.pkg.files[dsc_filename]["version"]
982 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
983 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
985 # Ensure the Files field contain only what's expected
986 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
988 # Ensure source is newer than existing source in target suites
989 session = DBConn().session()
990 self.check_source_against_db(dsc_filename, session)
991 self.check_dsc_against_db(dsc_filename, session)
996 ###########################################################################
998 def get_changelog_versions(self, source_dir):
999 """Extracts a the source package and (optionally) grabs the
1000 version history out of debian/changelog for the BTS."""
1004 # Find the .dsc (again)
1006 for f in self.pkg.files.keys():
1007 if self.pkg.files[f]["type"] == "dsc":
1010 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1011 if not dsc_filename:
1014 # Create a symlink mirror of the source files in our temporary directory
1015 for f in self.pkg.files.keys():
1016 m = re_issource.match(f)
1018 src = os.path.join(source_dir, f)
1019 # If a file is missing for whatever reason, give up.
1020 if not os.path.exists(src):
1023 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1024 self.pkg.orig_files[f].has_key("path"):
1026 dest = os.path.join(os.getcwd(), f)
1027 os.symlink(src, dest)
1029 # If the orig files are not a part of the upload, create symlinks to the
1031 for orig_file in self.pkg.orig_files.keys():
1032 if not self.pkg.orig_files[orig_file].has_key("path"):
1034 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1035 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1037 # Extract the source
1038 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1039 (result, output) = commands.getstatusoutput(cmd)
1041 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1042 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1045 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1048 # Get the upstream version
1049 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1050 if re_strip_revision.search(upstr_version):
1051 upstr_version = re_strip_revision.sub('', upstr_version)
1053 # Ensure the changelog file exists
1054 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1055 if not os.path.exists(changelog_filename):
1056 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1059 # Parse the changelog
1060 self.pkg.dsc["bts changelog"] = ""
1061 changelog_file = utils.open_file(changelog_filename)
1062 for line in changelog_file.readlines():
1063 m = re_changelog_versions.match(line)
1065 self.pkg.dsc["bts changelog"] += line
1066 changelog_file.close()
1068 # Check we found at least one revision in the changelog
1069 if not self.pkg.dsc["bts changelog"]:
1070 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1072 def check_source(self):
1074 # a) there's no source
1075 # or c) the orig files are MIA
1076 if not self.pkg.changes["architecture"].has_key("source") \
1077 or len(self.pkg.orig_files) == 0:
1080 tmpdir = utils.temp_dirname()
1082 # Move into the temporary directory
1086 # Get the changelog version history
1087 self.get_changelog_versions(cwd)
1089 # Move back and cleanup the temporary tree
1093 shutil.rmtree(tmpdir)
1095 if e.errno != errno.EACCES:
1097 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1099 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1100 # We probably have u-r or u-w directories so chmod everything
1102 cmd = "chmod -R u+rwx %s" % (tmpdir)
1103 result = os.system(cmd)
1105 utils.fubar("'%s' failed with result %s." % (cmd, result))
1106 shutil.rmtree(tmpdir)
1107 except Exception, e:
1108 print "foobar2 (%s)" % e
1109 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1111 ###########################################################################
1112 def ensure_hashes(self):
1113 # Make sure we recognise the format of the Files: field in the .changes
1114 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1115 if len(format) == 2:
1116 format = int(format[0]), int(format[1])
1118 format = int(float(format[0])), 0
1120 # We need to deal with the original changes blob, as the fields we need
1121 # might not be in the changes dict serialised into the .dak anymore.
1122 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1124 # Copy the checksums over to the current changes dict. This will keep
1125 # the existing modifications to it intact.
1126 for field in orig_changes:
1127 if field.startswith('checksums-'):
1128 self.pkg.changes[field] = orig_changes[field]
1130 # Check for unsupported hashes
1131 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1132 self.rejects.append(j)
1134 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1135 self.rejects.append(j)
1137 # We have to calculate the hash if we have an earlier changes version than
1138 # the hash appears in rather than require it exist in the changes file
1139 for hashname, hashfunc, version in utils.known_hashes:
1140 # TODO: Move _ensure_changes_hash into this class
1141 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1142 self.rejects.append(j)
1143 if "source" in self.pkg.changes["architecture"]:
1144 # TODO: Move _ensure_dsc_hash into this class
1145 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1146 self.rejects.append(j)
1148 def check_hashes(self):
1149 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1150 self.rejects.append(m)
1152 for m in utils.check_size(".changes", self.pkg.files):
1153 self.rejects.append(m)
1155 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1156 self.rejects.append(m)
1158 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1159 self.rejects.append(m)
1161 self.ensure_hashes()
1163 ###########################################################################
1165 def ensure_orig(self, target_dir='.', session=None):
1167 Ensures that all orig files mentioned in the changes file are present
1168 in target_dir. If they do not exist, they are symlinked into place.
1170 An list containing the symlinks that were created are returned (so they
1177 for filename, entry in self.pkg.dsc_files.iteritems():
1178 if not re_is_orig_source.match(filename):
1179 # File is not an orig; ignore
1182 if os.path.exists(filename):
1183 # File exists, no need to continue
1186 def symlink_if_valid(path):
1187 f = utils.open_file(path)
1188 md5sum = apt_pkg.md5sum(f)
1191 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1192 expected = (int(entry['size']), entry['md5sum'])
1194 if fingerprint != expected:
1197 dest = os.path.join(target_dir, filename)
1199 os.symlink(path, dest)
1200 symlinked.append(dest)
1206 session_ = DBConn().session()
1211 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1212 poolfile_path = os.path.join(
1213 poolfile.location.path, poolfile.filename
1216 if symlink_if_valid(poolfile_path):
1226 # Look in some other queues for the file
1227 queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1228 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1230 for queue in queues:
1231 if not cnf.get('Dir::Queue::%s' % queue):
1234 queuefile_path = os.path.join(
1235 cnf['Dir::Queue::%s' % queue], filename
1238 if not os.path.exists(queuefile_path):
1239 # Does not exist in this queue
1242 if symlink_if_valid(queuefile_path):
1247 ###########################################################################
1249 def check_lintian(self):
1252 # Don't reject binary uploads
1253 if not self.pkg.changes['architecture'].has_key('source'):
1256 # Only check some distributions
1258 for dist in ('unstable', 'experimental'):
1259 if dist in self.pkg.changes['distribution']:
1266 tagfile = cnf.get("Dinstall::LintianTags")
1268 # We don't have a tagfile, so just don't do anything.
1271 # Parse the yaml file
1272 sourcefile = file(tagfile, 'r')
1273 sourcecontent = sourcefile.read()
1276 lintiantags = yaml.load(sourcecontent)['lintian']
1277 except yaml.YAMLError, msg:
1278 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1281 # Try and find all orig mentioned in the .dsc
1282 symlinked = self.ensure_orig()
1284 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1285 # so put it together like it. We put all types of tags in one file and then sort
1286 # through lintians output later to see if its a fatal tag we detected, or not.
1287 # So we only run lintian once on all tags, even if we might reject on some, but not
1289 # Additionally build up a set of tags
1291 (fd, temp_filename) = utils.temp_filename()
1292 temptagfile = os.fdopen(fd, 'w')
1293 for tagtype in lintiantags:
1294 for tag in lintiantags[tagtype]:
1295 temptagfile.write("%s\n" % tag)
1299 # So now we should look at running lintian at the .changes file, capturing output
1301 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1302 (result, output) = commands.getstatusoutput(command)
1304 # We are done with lintian, remove our tempfile and any symlinks we created
1305 os.unlink(temp_filename)
1306 for symlink in symlinked:
1310 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1311 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1313 if len(output) == 0:
1318 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1320 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1321 # are having a victim for a reject.
1322 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1323 for line in output.split('\n'):
1324 m = re_parse_lintian.match(line)
1329 epackage = m.group(2)
1333 # So lets check if we know the tag at all.
1334 if etag not in tags:
1338 # We know it and it is overriden. Check that override is allowed.
1339 if etag in lintiantags['warning']:
1340 # The tag is overriden, and it is allowed to be overriden.
1341 # Don't add a reject message.
1343 elif etag in lintiantags['error']:
1344 # The tag is overriden - but is not allowed to be
1345 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1346 log("ftpmaster does not allow tag to be overridable", etag)
1348 # Tag is known, it is not overriden, direct reject.
1349 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1350 # Now tell if they *might* override it.
1351 if etag in lintiantags['warning']:
1352 log("auto rejecting", "overridable", etag)
1353 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1355 log("auto rejecting", "not overridable", etag)
1357 ###########################################################################
1358 def check_urgency(self):
1360 if self.pkg.changes["architecture"].has_key("source"):
1361 if not self.pkg.changes.has_key("urgency"):
1362 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1363 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1364 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1365 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1366 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1367 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1369 ###########################################################################
1371 # Sanity check the time stamps of files inside debs.
1372 # [Files in the near future cause ugly warnings and extreme time
1373 # travel can cause errors on extraction]
1375 def check_timestamps(self):
1378 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1379 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1380 tar = TarTime(future_cutoff, past_cutoff)
1382 for filename, entry in self.pkg.files.items():
1383 if entry["type"] == "deb":
1386 deb_file = utils.open_file(filename)
1387 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1390 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1391 except SystemError, e:
1392 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1393 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1396 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1400 future_files = tar.future_files.keys()
1402 num_future_files = len(future_files)
1403 future_file = future_files[0]
1404 future_date = tar.future_files[future_file]
1405 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1406 % (filename, num_future_files, future_file, time.ctime(future_date)))
1408 ancient_files = tar.ancient_files.keys()
1410 num_ancient_files = len(ancient_files)
1411 ancient_file = ancient_files[0]
1412 ancient_date = tar.ancient_files[ancient_file]
1413 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1414 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1416 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1418 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1419 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1421 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1427 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1428 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1429 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1430 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1431 self.pkg.changes["sponsoremail"] = uid_email
1436 ###########################################################################
1437 # check_signed_by_key checks
1438 ###########################################################################
1440 def check_signed_by_key(self):
1441 """Ensure the .changes is signed by an authorized uploader."""
1442 session = DBConn().session()
1444 # First of all we check that the person has proper upload permissions
1445 # and that this upload isn't blocked
1446 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1449 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1452 # TODO: Check that import-keyring adds UIDs properly
1454 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1457 # Check that the fingerprint which uploaded has permission to do so
1458 self.check_upload_permissions(fpr, session)
1460 # Check that this package is not in a transition
1461 self.check_transition(session)
1466 def check_upload_permissions(self, fpr, session):
1467 # Check any one-off upload blocks
1468 self.check_upload_blocks(fpr, session)
1470 # Start with DM as a special case
1471 # DM is a special case unfortunately, so we check it first
1472 # (keys with no source access get more access than DMs in one
1473 # way; DMs can only upload for their packages whether source
1474 # or binary, whereas keys with no access might be able to
1475 # upload some binaries)
1476 if fpr.source_acl.access_level == 'dm':
1477 self.check_dm_source_upload(fpr, session)
1479 # Check source-based permissions for other types
1480 if self.pkg.changes["architecture"].has_key("source"):
1481 if fpr.source_acl.access_level is None:
1482 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1483 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1484 self.rejects.append(rej)
1487 # If not a DM, we allow full upload rights
1488 uid_email = "%s@debian.org" % (fpr.uid.uid)
1489 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1492 # Check binary upload permissions
1493 # By this point we know that DMs can't have got here unless they
1494 # are allowed to deal with the package concerned so just apply
1496 if fpr.binary_acl.access_level == 'full':
1499 # Otherwise we're in the map case
1500 tmparches = self.pkg.changes["architecture"].copy()
1501 tmparches.pop('source', None)
1503 for bam in fpr.binary_acl_map:
1504 tmparches.pop(bam.architecture.arch_string, None)
1506 if len(tmparches.keys()) > 0:
1507 if fpr.binary_reject:
1508 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1509 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1510 self.rejects.append(rej)
1512 # TODO: This is where we'll implement reject vs throw away binaries later
1513 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1514 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1515 rej += "\nFingerprint: %s", (fpr.fingerprint)
1516 self.rejects.append(rej)
1519 def check_upload_blocks(self, fpr, session):
1520 """Check whether any upload blocks apply to this source, source
1521 version, uid / fpr combination"""
1523 def block_rej_template(fb):
1524 rej = 'Manual upload block in place for package %s' % fb.source
1525 if fb.version is not None:
1526 rej += ', version %s' % fb.version
1529 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1530 # version is None if the block applies to all versions
1531 if fb.version is None or fb.version == self.pkg.changes['version']:
1532 # Check both fpr and uid - either is enough to cause a reject
1533 if fb.fpr is not None:
1534 if fb.fpr.fingerprint == fpr.fingerprint:
1535 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1536 if fb.uid is not None:
1537 if fb.uid == fpr.uid:
1538 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1541 def check_dm_upload(self, fpr, session):
1542 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1543 ## none of the uploaded packages are NEW
1545 for f in self.pkg.files.keys():
1546 if self.pkg.files[f].has_key("byhand"):
1547 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1549 if self.pkg.files[f].has_key("new"):
1550 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1556 ## the most recent version of the package uploaded to unstable or
1557 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1558 ## section of its control file
1559 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1560 q = q.join(SrcAssociation)
1561 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1562 q = q.order_by(desc('source.version')).limit(1)
1567 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1568 self.rejects.append(rej)
1572 if not r.dm_upload_allowed:
1573 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1574 self.rejects.append(rej)
1577 ## the Maintainer: field of the uploaded .changes file corresponds with
1578 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1580 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1581 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1583 ## the most recent version of the package uploaded to unstable or
1584 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1585 ## non-developer maintainers cannot NMU or hijack packages)
1587 # srcuploaders includes the maintainer
1589 for sup in r.srcuploaders:
1590 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1591 # Eww - I hope we never have two people with the same name in Debian
1592 if email == fpr.uid.uid or name == fpr.uid.name:
1597 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1600 ## none of the packages are being taken over from other source packages
1601 for b in self.pkg.changes["binary"].keys():
1602 for suite in self.pkg.changes["distribution"].keys():
1603 q = session.query(DBSource)
1604 q = q.join(DBBinary).filter_by(package=b)
1605 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1608 if s.source != self.pkg.changes["source"]:
1609 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1613 def check_transition(self, session):
1616 sourcepkg = self.pkg.changes["source"]
1618 # No sourceful upload -> no need to do anything else, direct return
1619 # We also work with unstable uploads, not experimental or those going to some
1620 # proposed-updates queue
1621 if "source" not in self.pkg.changes["architecture"] or \
1622 "unstable" not in self.pkg.changes["distribution"]:
1625 # Also only check if there is a file defined (and existant) with
1627 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1628 if transpath == "" or not os.path.exists(transpath):
1631 # Parse the yaml file
1632 sourcefile = file(transpath, 'r')
1633 sourcecontent = sourcefile.read()
1635 transitions = yaml.load(sourcecontent)
1636 except yaml.YAMLError, msg:
1637 # This shouldn't happen, there is a wrapper to edit the file which
1638 # checks it, but we prefer to be safe than ending up rejecting
1640 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1643 # Now look through all defined transitions
1644 for trans in transitions:
1645 t = transitions[trans]
1646 source = t["source"]
1649 # Will be None if nothing is in testing.
1650 current = get_source_in_suite(source, "testing", session)
1651 if current is not None:
1652 compare = apt_pkg.VersionCompare(current.version, expected)
1654 if current is None or compare < 0:
1655 # This is still valid, the current version in testing is older than
1656 # the new version we wait for, or there is none in testing yet
1658 # Check if the source we look at is affected by this.
1659 if sourcepkg in t['packages']:
1660 # The source is affected, lets reject it.
1662 rejectmsg = "%s: part of the %s transition.\n\n" % (
1665 if current is not None:
1666 currentlymsg = "at version %s" % (current.version)
1668 currentlymsg = "not present in testing"
1670 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1672 rejectmsg += "\n".join(textwrap.wrap("""Your package
1673 is part of a testing transition designed to get %s migrated (it is
1674 currently %s, we need version %s). This transition is managed by the
1675 Release Team, and %s is the Release-Team member responsible for it.
1676 Please mail debian-release@lists.debian.org or contact %s directly if you
1677 need further assistance. You might want to upload to experimental until this
1678 transition is done."""
1679 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1681 self.rejects.append(rejectmsg)
1684 ###########################################################################
1685 # End check_signed_by_key checks
1686 ###########################################################################
1688 def build_summaries(self):
1689 """ Build a summary of changes the upload introduces. """
1691 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1693 short_summary = summary
1695 # This is for direport's benefit...
1696 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1699 summary += "Changes: " + f
1701 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1703 summary += self.announce(short_summary, 0)
1705 return (summary, short_summary)
1707 ###########################################################################
1709 def close_bugs(self, summary, action):
1711 Send mail to close bugs as instructed by the closes field in the changes file.
1712 Also add a line to summary if any work was done.
1714 @type summary: string
1715 @param summary: summary text, as given by L{build_summaries}
1718 @param action: Set to false no real action will be done.
1721 @return: summary. If action was taken, extended by the list of closed bugs.
1725 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1727 bugs = self.pkg.changes["closes"].keys()
1733 summary += "Closing bugs: "
1735 summary += "%s " % (bug)
1738 self.Subst["__BUG_NUMBER__"] = bug
1739 if self.pkg.changes["distribution"].has_key("stable"):
1740 self.Subst["__STABLE_WARNING__"] = """
1741 Note that this package is not part of the released stable Debian
1742 distribution. It may have dependencies on other unreleased software,
1743 or other instabilities. Please take care if you wish to install it.
1744 The update will eventually make its way into the next released Debian
1747 self.Subst["__STABLE_WARNING__"] = ""
1748 mail_message = utils.TemplateSubst(self.Subst, template)
1749 utils.send_mail(mail_message)
1751 # Clear up after ourselves
1752 del self.Subst["__BUG_NUMBER__"]
1753 del self.Subst["__STABLE_WARNING__"]
1755 if action and self.logger:
1756 self.logger.log(["closing bugs"] + bugs)
1762 ###########################################################################
1764 def announce(self, short_summary, action):
1766 Send an announce mail about a new upload.
1768 @type short_summary: string
1769 @param short_summary: Short summary text to include in the mail
1772 @param action: Set to false no real action will be done.
1775 @return: Textstring about action taken.
1780 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1782 # Only do announcements for source uploads with a recent dpkg-dev installed
1783 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1784 self.pkg.changes["architecture"].has_key("source"):
1790 self.Subst["__SHORT_SUMMARY__"] = short_summary
1792 for dist in self.pkg.changes["distribution"].keys():
1793 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1794 if announce_list == "" or lists_done.has_key(announce_list):
1797 lists_done[announce_list] = 1
1798 summary += "Announcing to %s\n" % (announce_list)
1802 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1803 if cnf.get("Dinstall::TrackingServer") and \
1804 self.pkg.changes["architecture"].has_key("source"):
1805 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1806 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1808 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1809 utils.send_mail(mail_message)
1811 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1813 if cnf.FindB("Dinstall::CloseBugs"):
1814 summary = self.close_bugs(summary, action)
1816 del self.Subst["__SHORT_SUMMARY__"]
1820 ###########################################################################
1822 def accept (self, summary, short_summary, session):
1826 This moves all files referenced from the .changes into the pool,
1827 sends the accepted mail, announces to lists, closes bugs and
1828 also checks for override disparities. If enabled it will write out
1829 the version history for the BTS Version Tracking and will finally call
1832 @type summary: string
1833 @param summary: Summary text
1835 @type short_summary: string
1836 @param short_summary: Short summary
1840 stats = SummaryStats()
1843 Logger.log(["installing changes", u.pkg.changes_file])
1845 # Add the .dsc file to the DB first
1846 for newfile, entry in u.pkg.files.items():
1847 if entry["type"] == "dsc":
1848 dsc_component, dsc_location_id = add_dsc_to_db(u, newfile, session)
1850 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1851 for newfile, entry in u.pkg.files.items():
1852 if entry["type"] == "deb":
1853 add_deb_to_db(u, newfile, session)
1855 # If this is a sourceful diff only upload that is moving
1856 # cross-component we need to copy the .orig files into the new
1857 # component too for the same reasons as above.
1858 if u.pkg.changes["architecture"].has_key("source"):
1859 for orig_file in u.pkg.orig_files.keys():
1860 if not u.pkg.orig_files[orig_file].has_key("id"):
1861 continue # Skip if it's not in the pool
1862 orig_file_id = u.pkg.orig_files[orig_file]["id"]
1863 if u.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1864 continue # Skip if the location didn't change
1867 oldf = get_poolfile_by_id(orig_file_id, session)
1868 old_filename = os.path.join(oldf.location.path, oldf.filename)
1869 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1870 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1872 new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1874 # TODO: Care about size/md5sum collisions etc
1875 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
1878 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1879 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1881 # TODO: Check that there's only 1 here
1882 source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
1883 dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
1884 dscf.poolfile_id = newf.file_id
1888 # Install the files into the pool
1889 for newfile, entry in u.pkg.files.items():
1890 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1891 utils.move(newfile, destination)
1892 Logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1893 summarystats.accept_bytes += float(entry["size"])
1895 # Copy the .changes file across for suite which need it.
1897 for suite_name in u.pkg.changes["distribution"].keys():
1898 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1899 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1901 for dest in copy_changes.keys():
1902 utils.copy(u.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1904 # We're done - commit the database changes
1906 # Our SQL session will automatically start a new transaction after
1909 # Move the .changes into the 'done' directory
1910 utils.move(u.pkg.changes_file,
1911 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(u.pkg.changes_file)))
1913 if u.pkg.changes["architecture"].has_key("source") and log_urgency:
1914 UrgencyLog().log(u.pkg.dsc["source"], u.pkg.dsc["version"], u.pkg.changes["urgency"])
1916 # Send accept mail, announce to lists, close bugs and check for
1917 # override disparities
1918 if not cnf["Dinstall::Options::No-Mail"]:
1920 self.Subst["__SUITE__"] = ""
1921 self.Subst["__SUMMARY__"] = summary
1922 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1923 utils.send_mail(mail_message)
1924 self.announce(short_summary, 1)
1926 ## Helper stuff for DebBugs Version Tracking
1927 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1928 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1929 # the conditionalization on dsc["bts changelog"] should be
1932 # Write out the version history from the changelog
1933 if self.pkg.changes["architecture"].has_key("source") and \
1934 self.pkg.dsc.has_key("bts changelog"):
1936 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1937 version_history = os.fdopen(fd, 'w')
1938 version_history.write(self.pkg.dsc["bts changelog"])
1939 version_history.close()
1940 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1941 self.pkg.changes_file[:-8]+".versions")
1942 os.rename(temp_filename, filename)
1943 os.chmod(filename, 0644)
1945 # Write out the binary -> source mapping.
1946 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1947 debinfo = os.fdopen(fd, 'w')
1948 for name, entry in sorted(self.pkg.files.items()):
1949 if entry["type"] == "deb":
1950 line = " ".join([entry["package"], entry["version"],
1951 entry["architecture"], entry["source package"],
1952 entry["source version"]])
1953 debinfo.write(line+"\n")
1955 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1956 self.pkg.changes_file[:-8]+".debinfo")
1957 os.rename(temp_filename, filename)
1958 os.chmod(filename, 0644)
1961 # res = get_or_set_queue('buildd', session).autobuild_upload(self.pkg, session)
1964 # now_date = datetime.now()
1969 summarystats.accept_count += 1
1971 def check_override(self):
1973 Checks override entries for validity. Mails "Override disparity" warnings,
1974 if that feature is enabled.
1976 Abandons the check if
1977 - override disparity checks are disabled
1978 - mail sending is disabled
1983 # Abandon the check if:
1984 # a) override disparity checks have been disabled
1985 # b) we're not sending mail
1986 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1987 cnf["Dinstall::Options::No-Mail"]:
1990 summary = self.pkg.check_override()
1995 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1998 self.Subst["__SUMMARY__"] = summary
1999 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2000 utils.send_mail(mail_message)
2001 del self.Subst["__SUMMARY__"]
2003 ###########################################################################
2005 def remove(self, from_dir=None):
2007 Used (for instance) in p-u to remove the package from unchecked
2009 if from_dir is None:
2010 os.chdir(self.pkg.directory)
2014 for f in self.pkg.files.keys():
2016 os.unlink(self.pkg.changes_file)
2018 ###########################################################################
2020 def move_to_dir (self, dest, perms=0660, changesperms=0664):
2022 Move files to dest with certain perms/changesperms
2024 utils.move(self.pkg.changes_file, dest, perms=changesperms)
2025 for f in self.pkg.files.keys():
2026 utils.move(f, dest, perms=perms)
2028 ###########################################################################
2030 def force_reject(self, reject_files):
2032 Forcefully move files from the current directory to the
2033 reject directory. If any file already exists in the reject
2034 directory it will be moved to the morgue to make way for
2038 @param files: file dictionary
2044 for file_entry in reject_files:
2045 # Skip any files which don't exist or which we don't have permission to copy.
2046 if os.access(file_entry, os.R_OK) == 0:
2049 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2052 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2054 # File exists? Let's try and move it to the morgue
2055 if e.errno == errno.EEXIST:
2056 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2058 morgue_file = utils.find_next_free(morgue_file)
2059 except NoFreeFilenameError:
2060 # Something's either gone badly Pete Tong, or
2061 # someone is trying to exploit us.
2062 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2064 utils.move(dest_file, morgue_file, perms=0660)
2066 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2069 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2073 # If we got here, we own the destination file, so we can
2074 # safely overwrite it.
2075 utils.move(file_entry, dest_file, 1, perms=0660)
2078 ###########################################################################
2079 def do_reject (self, manual=0, reject_message="", note=""):
2081 Reject an upload. If called without a reject message or C{manual} is
2082 true, spawn an editor so the user can write one.
2085 @param manual: manual or automated rejection
2087 @type reject_message: string
2088 @param reject_message: A reject message
2093 # If we weren't given a manual rejection message, spawn an
2094 # editor so the user can add one in...
2095 if manual and not reject_message:
2096 (fd, temp_filename) = utils.temp_filename()
2097 temp_file = os.fdopen(fd, 'w')
2100 temp_file.write(line)
2102 editor = os.environ.get("EDITOR","vi")
2104 while answer == 'E':
2105 os.system("%s %s" % (editor, temp_filename))
2106 temp_fh = utils.open_file(temp_filename)
2107 reject_message = "".join(temp_fh.readlines())
2109 print "Reject message:"
2110 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2111 prompt = "[R]eject, Edit, Abandon, Quit ?"
2113 while prompt.find(answer) == -1:
2114 answer = utils.our_raw_input(prompt)
2115 m = re_default_answer.search(prompt)
2118 answer = answer[:1].upper()
2119 os.unlink(temp_filename)
2125 print "Rejecting.\n"
2129 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2130 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2132 # Move all the files into the reject directory
2133 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2134 self.force_reject(reject_files)
2136 # If we fail here someone is probably trying to exploit the race
2137 # so let's just raise an exception ...
2138 if os.path.exists(reason_filename):
2139 os.unlink(reason_filename)
2140 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2142 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2146 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2147 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2148 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2149 os.write(reason_fd, reject_message)
2150 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2152 # Build up the rejection email
2153 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2154 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2155 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2156 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2157 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2158 # Write the rejection email out as the <foo>.reason file
2159 os.write(reason_fd, reject_mail_message)
2161 del self.Subst["__REJECTOR_ADDRESS__"]
2162 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2163 del self.Subst["__CC__"]
2167 # Send the rejection mail if appropriate
2168 if not cnf["Dinstall::Options::No-Mail"]:
2169 utils.send_mail(reject_mail_message)
2172 self.logger.log(["rejected", self.pkg.changes_file])
2176 ################################################################################
2177 def in_override_p(self, package, component, suite, binary_type, filename, session):
2179 Check if a package already has override entries in the DB
2181 @type package: string
2182 @param package: package name
2184 @type component: string
2185 @param component: database id of the component
2188 @param suite: database id of the suite
2190 @type binary_type: string
2191 @param binary_type: type of the package
2193 @type filename: string
2194 @param filename: filename we check
2196 @return: the database result. But noone cares anyway.
2202 if binary_type == "": # must be source
2205 file_type = binary_type
2207 # Override suite name; used for example with proposed-updates
2208 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2209 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2211 result = get_override(package, suite, component, file_type, session)
2213 # If checking for a source package fall back on the binary override type
2214 if file_type == "dsc" and len(result) < 1:
2215 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2217 # Remember the section and priority so we can check them later if appropriate
2220 self.pkg.files[filename]["override section"] = result.section.section
2221 self.pkg.files[filename]["override priority"] = result.priority.priority
2226 ################################################################################
2227 def get_anyversion(self, sv_list, suite):
2230 @param sv_list: list of (suite, version) tuples to check
2233 @param suite: suite name
2239 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2240 for (s, v) in sv_list:
2241 if s in [ x.lower() for x in anysuite ]:
2242 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2247 ################################################################################
2249 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2252 @param sv_list: list of (suite, version) tuples to check
2254 @type filename: string
2255 @param filename: XXX
2257 @type new_version: string
2258 @param new_version: XXX
2260 Ensure versions are newer than existing packages in target
2261 suites and that cross-suite version checking rules as
2262 set out in the conf file are satisfied.
2267 # Check versions for each target suite
2268 for target_suite in self.pkg.changes["distribution"].keys():
2269 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2270 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2272 # Enforce "must be newer than target suite" even if conffile omits it
2273 if target_suite not in must_be_newer_than:
2274 must_be_newer_than.append(target_suite)
2276 for (suite, existent_version) in sv_list:
2277 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2279 if suite in must_be_newer_than and sourceful and vercmp < 1:
2280 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2282 if suite in must_be_older_than and vercmp > -1:
2285 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2286 # we really use the other suite, ignoring the conflicting one ...
2287 addsuite = self.pkg.changes["distribution-version"][suite]
2289 add_version = self.get_anyversion(sv_list, addsuite)
2290 target_version = self.get_anyversion(sv_list, target_suite)
2293 # not add_version can only happen if we map to a suite
2294 # that doesn't enhance the suite we're propup'ing from.
2295 # so "propup-ver x a b c; map a d" is a problem only if
2296 # d doesn't enhance a.
2298 # i think we could always propagate in this case, rather
2299 # than complaining. either way, this isn't a REJECT issue
2301 # And - we really should complain to the dorks who configured dak
2302 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2303 self.pkg.changes.setdefault("propdistribution", {})
2304 self.pkg.changes["propdistribution"][addsuite] = 1
2306 elif not target_version:
2307 # not targets_version is true when the package is NEW
2308 # we could just stick with the "...old version..." REJECT
2309 # for this, I think.
2310 self.rejects.append("Won't propogate NEW packages.")
2311 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2312 # propogation would be redundant. no need to reject though.
2313 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2315 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2316 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2318 self.warnings.append("Propogating upload to %s" % (addsuite))
2319 self.pkg.changes.setdefault("propdistribution", {})
2320 self.pkg.changes["propdistribution"][addsuite] = 1
2324 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2326 ################################################################################
2327 def check_binary_against_db(self, filename, session):
2328 # Ensure version is sane
2329 q = session.query(BinAssociation)
2330 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2331 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2333 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2334 filename, self.pkg.files[filename]["version"], sourceful=False)
2336 # Check for any existing copies of the file
2337 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2338 q = q.filter_by(version=self.pkg.files[filename]["version"])
2339 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2342 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2344 ################################################################################
2346 def check_source_against_db(self, filename, session):
2349 source = self.pkg.dsc.get("source")
2350 version = self.pkg.dsc.get("version")
2352 # Ensure version is sane
2353 q = session.query(SrcAssociation)
2354 q = q.join(DBSource).filter(DBSource.source==source)
2356 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2357 filename, version, sourceful=True)
2359 ################################################################################
2360 def check_dsc_against_db(self, filename, session):
2363 @warning: NB: this function can remove entries from the 'files' index [if
2364 the orig tarball is a duplicate of the one in the archive]; if
2365 you're iterating over 'files' and call this function as part of
2366 the loop, be sure to add a check to the top of the loop to
2367 ensure you haven't just tried to dereference the deleted entry.
2372 self.pkg.orig_files = {} # XXX: do we need to clear it?
2373 orig_files = self.pkg.orig_files
2375 # Try and find all files mentioned in the .dsc. This has
2376 # to work harder to cope with the multiple possible
2377 # locations of an .orig.tar.gz.
2378 # The ordering on the select is needed to pick the newest orig
2379 # when it exists in multiple places.
2380 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2382 if self.pkg.files.has_key(dsc_name):
2383 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2384 actual_size = int(self.pkg.files[dsc_name]["size"])
2385 found = "%s in incoming" % (dsc_name)
2387 # Check the file does not already exist in the archive
2388 ql = get_poolfile_like_name(dsc_name, session)
2390 # Strip out anything that isn't '%s' or '/%s$'
2392 if not i.filename.endswith(dsc_name):
2395 # "[dak] has not broken them. [dak] has fixed a
2396 # brokenness. Your crappy hack exploited a bug in
2399 # "(Come on! I thought it was always obvious that
2400 # one just doesn't release different files with
2401 # the same name and version.)"
2402 # -- ajk@ on d-devel@l.d.o
2405 # Ignore exact matches for .orig.tar.gz
2407 if re_is_orig_source.match(dsc_name):
2409 if self.pkg.files.has_key(dsc_name) and \
2410 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2411 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2412 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2413 # TODO: Don't delete the entry, just mark it as not needed
2414 # This would fix the stupidity of changing something we often iterate over
2415 # whilst we're doing it
2416 del self.pkg.files[dsc_name]
2417 if not orig_files.has_key(dsc_name):
2418 orig_files[dsc_name] = {}
2419 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2423 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2425 elif re_is_orig_source.match(dsc_name):
2427 ql = get_poolfile_like_name(dsc_name, session)
2429 # Strip out anything that isn't '%s' or '/%s$'
2430 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2432 if not i.filename.endswith(dsc_name):
2436 # Unfortunately, we may get more than one match here if,
2437 # for example, the package was in potato but had an -sa
2438 # upload in woody. So we need to choose the right one.
2440 # default to something sane in case we don't match any or have only one
2445 old_file = os.path.join(i.location.path, i.filename)
2446 old_file_fh = utils.open_file(old_file)
2447 actual_md5 = apt_pkg.md5sum(old_file_fh)
2449 actual_size = os.stat(old_file)[stat.ST_SIZE]
2450 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2453 old_file = os.path.join(i.location.path, i.filename)
2454 old_file_fh = utils.open_file(old_file)
2455 actual_md5 = apt_pkg.md5sum(old_file_fh)
2457 actual_size = os.stat(old_file)[stat.ST_SIZE]
2459 suite_type = x.location.archive_type
2460 # need this for updating dsc_files in install()
2461 dsc_entry["files id"] = x.file_id
2462 # See install() in process-accepted...
2463 if not orig_files.has_key(dsc_name):
2464 orig_files[dsc_name] = {}
2465 orig_files[dsc_name]["id"] = x.file_id
2466 orig_files[dsc_name]["path"] = old_file
2467 orig_files[dsc_name]["location"] = x.location.location_id
2469 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2470 # Not there? Check the queue directories...
2471 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2472 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2474 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2475 if os.path.exists(in_otherdir):
2476 in_otherdir_fh = utils.open_file(in_otherdir)
2477 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2478 in_otherdir_fh.close()
2479 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2481 if not orig_files.has_key(dsc_name):
2482 orig_files[dsc_name] = {}
2483 orig_files[dsc_name]["path"] = in_otherdir
2486 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2489 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2491 if actual_md5 != dsc_entry["md5sum"]:
2492 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2493 if actual_size != int(dsc_entry["size"]):
2494 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2496 ################################################################################
2497 # This is used by process-new and process-holding to recheck a changes file
2498 # at the time we're running. It mainly wraps various other internal functions
2499 # and is similar to accepted_checks - these should probably be tidied up
2501 def recheck(self, session):
2503 for f in self.pkg.files.keys():
2504 # The .orig.tar.gz can disappear out from under us is it's a
2505 # duplicate of one in the archive.
2506 if not self.pkg.files.has_key(f):
2509 entry = self.pkg.files[f]
2511 # Check that the source still exists
2512 if entry["type"] == "deb":
2513 source_version = entry["source version"]
2514 source_package = entry["source package"]
2515 if not self.pkg.changes["architecture"].has_key("source") \
2516 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2517 source_epochless_version = re_no_epoch.sub('', source_version)
2518 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2520 for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
2521 if cnf.has_key("Dir::Queue::%s" % (q)):
2522 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2525 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2527 # Version and file overwrite checks
2528 if entry["type"] == "deb":
2529 self.check_binary_against_db(f, session)
2530 elif entry["type"] == "dsc":
2531 self.check_source_against_db(f, session)
2532 self.check_dsc_against_db(f, session)
2534 ################################################################################
2535 def accepted_checks(self, overwrite_checks, session):
2536 # Recheck anything that relies on the database; since that's not
2537 # frozen between accept and our run time when called from p-a.
2539 # overwrite_checks is set to False when installing to stable/oldstable
2544 # Find the .dsc (again)
2546 for f in self.pkg.files.keys():
2547 if self.pkg.files[f]["type"] == "dsc":
2550 for checkfile in self.pkg.files.keys():
2551 # The .orig.tar.gz can disappear out from under us is it's a
2552 # duplicate of one in the archive.
2553 if not self.pkg.files.has_key(checkfile):
2556 entry = self.pkg.files[checkfile]
2558 # Check that the source still exists
2559 if entry["type"] == "deb":
2560 source_version = entry["source version"]
2561 source_package = entry["source package"]
2562 if not self.pkg.changes["architecture"].has_key("source") \
2563 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2564 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2566 # Version and file overwrite checks
2567 if overwrite_checks:
2568 if entry["type"] == "deb":
2569 self.check_binary_against_db(checkfile, session)
2570 elif entry["type"] == "dsc":
2571 self.check_source_against_db(checkfile, session)
2572 self.check_dsc_against_db(dsc_filename, session)
2574 # propogate in the case it is in the override tables:
2575 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2576 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2577 propogate[suite] = 1
2579 nopropogate[suite] = 1
2581 for suite in propogate.keys():
2582 if suite in nopropogate:
2584 self.pkg.changes["distribution"][suite] = 1
2586 for checkfile in self.pkg.files.keys():
2587 # Check the package is still in the override tables
2588 for suite in self.pkg.changes["distribution"].keys():
2589 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2590 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2592 ################################################################################
2593 # This is not really a reject, but an unaccept, but since a) the code for
2594 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2595 # extremely rare, for now we'll go with whining at our admin folks...
2597 def do_unaccept(self):
2601 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2602 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2603 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2604 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2605 if cnf.has_key("Dinstall::Bcc"):
2606 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2608 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2610 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2612 # Write the rejection email out as the <foo>.reason file
2613 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2614 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2616 # If we fail here someone is probably trying to exploit the race
2617 # so let's just raise an exception ...
2618 if os.path.exists(reject_filename):
2619 os.unlink(reject_filename)
2621 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2622 os.write(fd, reject_mail_message)
2625 utils.send_mail(reject_mail_message)
2627 del self.Subst["__REJECTOR_ADDRESS__"]
2628 del self.Subst["__REJECT_MESSAGE__"]
2629 del self.Subst["__CC__"]
2631 ################################################################################
2632 # If any file of an upload has a recent mtime then chances are good
2633 # the file is still being uploaded.
2635 def upload_too_new(self):
2638 # Move back to the original directory to get accurate time stamps
2640 os.chdir(self.pkg.directory)
2641 file_list = self.pkg.files.keys()
2642 file_list.extend(self.pkg.dsc_files.keys())
2643 file_list.append(self.pkg.changes_file)
2646 last_modified = time.time()-os.path.getmtime(f)
2647 if last_modified < int(cnf["Dinstall::SkipTime"]):