5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
52 from summarystats import SummaryStats
53 from utils import parse_changes, check_dsc_files
54 from textutils import fix_maintainer
55 from binary import Binary
57 ###############################################################################
59 def get_type(f, session):
61 Get the file type of C{f}
64 @param f: file entry from Changes object
66 @type session: SQLA Session
67 @param session: SQL Alchemy session object
74 if f.has_key("dbtype"):
75 file_type = f["dbtype"]
76 elif re_source_ext.match(f["type"]):
79 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
81 # Validate the override type
82 type_id = get_override_type(file_type, session)
84 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
88 ################################################################################
90 # Determine what parts in a .changes are NEW
92 def determine_new(changes, files, warn=1):
94 Determine what parts in a C{changes} file are NEW.
96 @type changes: Upload.Pkg.changes dict
97 @param changes: Changes dictionary
99 @type files: Upload.Pkg.files dict
100 @param files: Files dictionary
103 @param warn: Warn if overrides are added for (old)stable
106 @return: dictionary of NEW components.
111 session = DBConn().session()
113 # Build up a list of potentially new things
114 for name, f in files.items():
115 # Skip byhand elements
116 if f["type"] == "byhand":
119 priority = f["priority"]
120 section = f["section"]
121 file_type = get_type(f, session)
122 component = f["component"]
124 if file_type == "dsc":
127 if not new.has_key(pkg):
129 new[pkg]["priority"] = priority
130 new[pkg]["section"] = section
131 new[pkg]["type"] = file_type
132 new[pkg]["component"] = component
133 new[pkg]["files"] = []
135 old_type = new[pkg]["type"]
136 if old_type != file_type:
137 # source gets trumped by deb or udeb
138 if old_type == "dsc":
139 new[pkg]["priority"] = priority
140 new[pkg]["section"] = section
141 new[pkg]["type"] = file_type
142 new[pkg]["component"] = component
144 new[pkg]["files"].append(name)
146 if f.has_key("othercomponents"):
147 new[pkg]["othercomponents"] = f["othercomponents"]
149 for suite in changes["suite"].keys():
150 for pkg in new.keys():
151 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
153 for file_entry in new[pkg]["files"]:
154 if files[file_entry].has_key("new"):
155 del files[file_entry]["new"]
159 for s in ['stable', 'oldstable']:
160 if changes["suite"].has_key(s):
161 print "WARNING: overrides will be added for %s!" % s
162 for pkg in new.keys():
163 if new[pkg].has_key("othercomponents"):
164 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
170 ################################################################################
172 def check_valid(new):
174 Check if section and priority for NEW packages exist in database.
175 Additionally does sanity checks:
176 - debian-installer packages have to be udeb (or source)
177 - non debian-installer packages can not be udeb
178 - source priority can only be assigned to dsc file types
181 @param new: Dict of new packages with their section, priority and type.
184 for pkg in new.keys():
185 section_name = new[pkg]["section"]
186 priority_name = new[pkg]["priority"]
187 file_type = new[pkg]["type"]
189 section = get_section(section_name)
191 new[pkg]["section id"] = -1
193 new[pkg]["section id"] = section.section_id
195 priority = get_priority(priority_name)
197 new[pkg]["priority id"] = -1
199 new[pkg]["priority id"] = priority.priority_id
202 di = section_name.find("debian-installer") != -1
204 # If d-i, we must be udeb and vice-versa
205 if (di and file_type not in ("udeb", "dsc")) or \
206 (not di and file_type == "udeb"):
207 new[pkg]["section id"] = -1
209 # If dsc we need to be source and vice-versa
210 if (priority == "source" and file_type != "dsc") or \
211 (priority != "source" and file_type == "dsc"):
212 new[pkg]["priority id"] = -1
214 ###############################################################################
216 def check_status(files):
218 for f in files.keys():
219 if files[f]["type"] == "byhand":
221 elif files[f].has_key("new"):
225 ###############################################################################
227 # Used by Upload.check_timestamps
228 class TarTime(object):
229 def __init__(self, future_cutoff, past_cutoff):
231 self.future_cutoff = future_cutoff
232 self.past_cutoff = past_cutoff
235 self.future_files = {}
236 self.ancient_files = {}
238 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
239 if MTime > self.future_cutoff:
240 self.future_files[Name] = MTime
241 if MTime < self.past_cutoff:
242 self.ancient_files[Name] = MTime
244 ###############################################################################
246 class Upload(object):
248 Everything that has to do with an upload processed.
256 ###########################################################################
259 """ Reset a number of internal variables."""
261 # Initialize the substitution template map
264 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
265 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
266 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
267 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
275 def package_info(self):
277 Format various messages from this Upload to send to the maintainer.
281 ('Reject Reasons', self.rejects),
282 ('Warnings', self.warnings),
283 ('Notes', self.notes),
287 for title, messages in msgs:
289 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
293 ###########################################################################
294 def update_subst(self):
295 """ Set up the per-package template substitution mappings """
299 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
300 if not self.pkg.changes.has_key("architecture") or not \
301 isinstance(self.pkg.changes["architecture"], DictType):
302 self.pkg.changes["architecture"] = { "Unknown" : "" }
304 # and maintainer2047 may not exist.
305 if not self.pkg.changes.has_key("maintainer2047"):
306 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
308 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
309 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
310 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
312 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
313 if self.pkg.changes["architecture"].has_key("source") and \
314 self.pkg.changes["changedby822"] != "" and \
315 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
317 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
318 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
319 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
321 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
322 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
323 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
325 if "sponsoremail" in self.pkg.changes:
326 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
328 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
329 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
331 # Apply any global override of the Maintainer field
332 if cnf.get("Dinstall::OverrideMaintainer"):
333 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
334 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
336 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
337 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
338 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
340 ###########################################################################
341 def load_changes(self, filename):
344 @rvalue: whether the changes file was valid or not. We may want to
345 reject even if this is True (see what gets put in self.rejects).
346 This is simply to prevent us even trying things later which will
347 fail because we couldn't properly parse the file.
350 self.pkg.changes_file = filename
352 # Parse the .changes field into a dictionary
354 self.pkg.changes.update(parse_changes(filename))
355 except CantOpenError:
356 self.rejects.append("%s: can't read file." % (filename))
358 except ParseChangesError, line:
359 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
361 except ChangesUnicodeError:
362 self.rejects.append("%s: changes file not proper utf-8" % (filename))
365 # Parse the Files field from the .changes into another dictionary
367 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
368 except ParseChangesError, line:
369 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
371 except UnknownFormatError, format:
372 self.rejects.append("%s: unknown format '%s'." % (filename, format))
375 # Check for mandatory fields
376 for i in ("distribution", "source", "binary", "architecture",
377 "version", "maintainer", "files", "changes", "description"):
378 if not self.pkg.changes.has_key(i):
379 # Avoid undefined errors later
380 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
383 # Strip a source version in brackets from the source field
384 if re_strip_srcver.search(self.pkg.changes["source"]):
385 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
387 # Ensure the source field is a valid package name.
388 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
389 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
391 # Split multi-value fields into a lower-level dictionary
392 for i in ("architecture", "distribution", "binary", "closes"):
393 o = self.pkg.changes.get(i, "")
395 del self.pkg.changes[i]
397 self.pkg.changes[i] = {}
400 self.pkg.changes[i][j] = 1
402 # Fix the Maintainer: field to be RFC822/2047 compatible
404 (self.pkg.changes["maintainer822"],
405 self.pkg.changes["maintainer2047"],
406 self.pkg.changes["maintainername"],
407 self.pkg.changes["maintaineremail"]) = \
408 fix_maintainer (self.pkg.changes["maintainer"])
409 except ParseMaintError, msg:
410 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
411 % (filename, changes["maintainer"], msg))
413 # ...likewise for the Changed-By: field if it exists.
415 (self.pkg.changes["changedby822"],
416 self.pkg.changes["changedby2047"],
417 self.pkg.changes["changedbyname"],
418 self.pkg.changes["changedbyemail"]) = \
419 fix_maintainer (self.pkg.changes.get("changed-by", ""))
420 except ParseMaintError, msg:
421 self.pkg.changes["changedby822"] = ""
422 self.pkg.changes["changedby2047"] = ""
423 self.pkg.changes["changedbyname"] = ""
424 self.pkg.changes["changedbyemail"] = ""
426 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
427 % (filename, changes["changed-by"], msg))
429 # Ensure all the values in Closes: are numbers
430 if self.pkg.changes.has_key("closes"):
431 for i in self.pkg.changes["closes"].keys():
432 if re_isanum.match (i) == None:
433 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
435 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
436 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
437 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
439 # Check there isn't already a changes file of the same name in one
440 # of the queue directories.
441 base_filename = os.path.basename(filename)
442 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
443 if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
444 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
446 # Check the .changes is non-empty
447 if not self.pkg.files:
448 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
451 # Changes was syntactically valid even if we'll reject
454 ###########################################################################
456 def check_distributions(self):
457 "Check and map the Distribution field"
461 # Handle suite mappings
462 for m in Cnf.ValueList("SuiteMappings"):
465 if mtype == "map" or mtype == "silent-map":
466 (source, dest) = args[1:3]
467 if self.pkg.changes["distribution"].has_key(source):
468 del self.pkg.changes["distribution"][source]
469 self.pkg.changes["distribution"][dest] = 1
470 if mtype != "silent-map":
471 self.notes.append("Mapping %s to %s." % (source, dest))
472 if self.pkg.changes.has_key("distribution-version"):
473 if self.pkg.changes["distribution-version"].has_key(source):
474 self.pkg.changes["distribution-version"][source]=dest
475 elif mtype == "map-unreleased":
476 (source, dest) = args[1:3]
477 if self.pkg.changes["distribution"].has_key(source):
478 for arch in self.pkg.changes["architecture"].keys():
479 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
480 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
481 del self.pkg.changes["distribution"][source]
482 self.pkg.changes["distribution"][dest] = 1
484 elif mtype == "ignore":
486 if self.pkg.changes["distribution"].has_key(suite):
487 del self.pkg.changes["distribution"][suite]
488 self.warnings.append("Ignoring %s as a target suite." % (suite))
489 elif mtype == "reject":
491 if self.pkg.changes["distribution"].has_key(suite):
492 self.rejects.append("Uploads to %s are not accepted." % (suite))
493 elif mtype == "propup-version":
494 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
496 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
497 if self.pkg.changes["distribution"].has_key(args[1]):
498 self.pkg.changes.setdefault("distribution-version", {})
499 for suite in args[2:]:
500 self.pkg.changes["distribution-version"][suite] = suite
502 # Ensure there is (still) a target distribution
503 if len(self.pkg.changes["distribution"].keys()) < 1:
504 self.rejects.append("No valid distribution remaining.")
506 # Ensure target distributions exist
507 for suite in self.pkg.changes["distribution"].keys():
508 if not Cnf.has_key("Suite::%s" % (suite)):
509 self.rejects.append("Unknown distribution `%s'." % (suite))
511 ###########################################################################
513 def binary_file_checks(self, f, session):
515 entry = self.pkg.files[f]
517 # Extract package control information
518 deb_file = utils.open_file(f)
520 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
522 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
524 # Can't continue, none of the checks on control would work.
527 # Check for mandantory "Description:"
530 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
532 self.rejects.append("%s: Missing Description in binary package" % (f))
537 # Check for mandatory fields
538 for field in [ "Package", "Architecture", "Version" ]:
539 if control.Find(field) == None:
541 self.rejects.append("%s: No %s field in control." % (f, field))
544 # Ensure the package name matches the one give in the .changes
545 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
546 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
548 # Validate the package field
549 package = control.Find("Package")
550 if not re_valid_pkg_name.match(package):
551 self.rejects.append("%s: invalid package name '%s'." % (f, package))
553 # Validate the version field
554 version = control.Find("Version")
555 if not re_valid_version.match(version):
556 self.rejects.append("%s: invalid version number '%s'." % (f, version))
558 # Ensure the architecture of the .deb is one we know about.
559 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
560 architecture = control.Find("Architecture")
561 upload_suite = self.pkg.changes["distribution"].keys()[0]
563 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
564 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
565 self.rejects.append("Unknown architecture '%s'." % (architecture))
567 # Ensure the architecture of the .deb is one of the ones
568 # listed in the .changes.
569 if not self.pkg.changes["architecture"].has_key(architecture):
570 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
572 # Sanity-check the Depends field
573 depends = control.Find("Depends")
575 self.rejects.append("%s: Depends field is empty." % (f))
577 # Sanity-check the Provides field
578 provides = control.Find("Provides")
580 provide = re_spacestrip.sub('', provides)
582 self.rejects.append("%s: Provides field is empty." % (f))
583 prov_list = provide.split(",")
584 for prov in prov_list:
585 if not re_valid_pkg_name.match(prov):
586 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
588 # Check the section & priority match those given in the .changes (non-fatal)
589 if control.Find("Section") and entry["section"] != "" \
590 and entry["section"] != control.Find("Section"):
591 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
592 (f, control.Find("Section", ""), entry["section"]))
593 if control.Find("Priority") and entry["priority"] != "" \
594 and entry["priority"] != control.Find("Priority"):
595 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
596 (f, control.Find("Priority", ""), entry["priority"]))
598 entry["package"] = package
599 entry["architecture"] = architecture
600 entry["version"] = version
601 entry["maintainer"] = control.Find("Maintainer", "")
603 if f.endswith(".udeb"):
604 self.pkg.files[f]["dbtype"] = "udeb"
605 elif f.endswith(".deb"):
606 self.pkg.files[f]["dbtype"] = "deb"
608 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
610 entry["source"] = control.Find("Source", entry["package"])
612 # Get the source version
613 source = entry["source"]
616 if source.find("(") != -1:
617 m = re_extract_src_version.match(source)
619 source_version = m.group(2)
621 if not source_version:
622 source_version = self.pkg.files[f]["version"]
624 entry["source package"] = source
625 entry["source version"] = source_version
627 # Ensure the filename matches the contents of the .deb
628 m = re_isadeb.match(f)
631 file_package = m.group(1)
632 if entry["package"] != file_package:
633 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
634 (f, file_package, entry["dbtype"], entry["package"]))
635 epochless_version = re_no_epoch.sub('', control.Find("Version"))
638 file_version = m.group(2)
639 if epochless_version != file_version:
640 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
641 (f, file_version, entry["dbtype"], epochless_version))
644 file_architecture = m.group(3)
645 if entry["architecture"] != file_architecture:
646 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
647 (f, file_architecture, entry["dbtype"], entry["architecture"]))
649 # Check for existent source
650 source_version = entry["source version"]
651 source_package = entry["source package"]
652 if self.pkg.changes["architecture"].has_key("source"):
653 if source_version != self.pkg.changes["version"]:
654 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
655 (source_version, f, self.pkg.changes["version"]))
657 # Check in the SQL database
658 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
659 # Check in one of the other directories
660 source_epochless_version = re_no_epoch.sub('', source_version)
661 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
662 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
664 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
667 dsc_file_exists = False
668 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
669 if cnf.has_key("Dir::Queue::%s" % (myq)):
670 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
671 dsc_file_exists = True
674 if not dsc_file_exists:
675 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
677 # Check the version and for file overwrites
678 self.check_binary_against_db(f, session)
680 # Temporarily disable contents generation until we change the table storage layout
683 #if len(b.rejects) > 0:
684 # for j in b.rejects:
685 # self.rejects.append(j)
687 def source_file_checks(self, f, session):
688 entry = self.pkg.files[f]
690 m = re_issource.match(f)
694 entry["package"] = m.group(1)
695 entry["version"] = m.group(2)
696 entry["type"] = m.group(3)
698 # Ensure the source package name matches the Source filed in the .changes
699 if self.pkg.changes["source"] != entry["package"]:
700 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
702 # Ensure the source version matches the version in the .changes file
703 if re_is_orig_source.match(f):
704 changes_version = self.pkg.changes["chopversion2"]
706 changes_version = self.pkg.changes["chopversion"]
708 if changes_version != entry["version"]:
709 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
711 # Ensure the .changes lists source in the Architecture field
712 if not self.pkg.changes["architecture"].has_key("source"):
713 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
715 # Check the signature of a .dsc file
716 if entry["type"] == "dsc":
717 # check_signature returns either:
718 # (None, [list, of, rejects]) or (signature, [])
719 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
721 self.rejects.append(j)
723 entry["architecture"] = "source"
725 def per_suite_file_checks(self, f, suite, session):
727 entry = self.pkg.files[f]
728 archive = utils.where_am_i()
731 if entry.has_key("byhand"):
734 # Check we have fields we need to do these checks
736 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
737 if not entry.has_key(m):
738 self.rejects.append("file '%s' does not have field %s set" % (f, m))
744 # Handle component mappings
745 for m in cnf.ValueList("ComponentMappings"):
746 (source, dest) = m.split()
747 if entry["component"] == source:
748 entry["original component"] = source
749 entry["component"] = dest
751 # Ensure the component is valid for the target suite
752 if cnf.has_key("Suite:%s::Components" % (suite)) and \
753 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
754 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
757 # Validate the component
758 if not get_component(entry["component"], session):
759 self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
762 # See if the package is NEW
763 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
766 # Validate the priority
767 if entry["priority"].find('/') != -1:
768 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
770 # Determine the location
771 location = cnf["Dir::Pool"]
772 l = get_location(location, entry["component"], archive, session)
774 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
775 entry["location id"] = -1
777 entry["location id"] = l.location_id
779 # Check the md5sum & size against existing files (if any)
780 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
782 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
783 entry["size"], entry["md5sum"], entry["location id"])
786 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
787 elif found is False and poolfile is not None:
788 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
791 entry["files id"] = None
793 entry["files id"] = poolfile.file_id
795 # Check for packages that have moved from one component to another
796 entry['suite'] = suite
797 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
799 entry["othercomponents"] = res.fetchone()[0]
801 def check_files(self, action=True):
802 archive = utils.where_am_i()
803 file_keys = self.pkg.files.keys()
807 # XXX: As far as I can tell, this can no longer happen - see
808 # comments by AJ in old revisions - mhy
809 # if reprocess is 2 we've already done this and we're checking
810 # things again for the new .orig.tar.gz.
811 # [Yes, I'm fully aware of how disgusting this is]
812 if action and self.reprocess < 2:
814 os.chdir(self.pkg.directory)
816 ret = holding.copy_to_holding(f)
818 # XXX: Should we bail out here or try and continue?
819 self.rejects.append(ret)
823 # Check there isn't already a .changes or .dak file of the same name in
824 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
825 # [NB: this check must be done post-suite mapping]
826 base_filename = os.path.basename(self.pkg.changes_file)
827 dot_dak_filename = base_filename[:-8] + ".dak"
829 for suite in self.pkg.changes["distribution"].keys():
830 copychanges = "Suite::%s::CopyChanges" % (suite)
831 if cnf.has_key(copychanges) and \
832 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
833 self.rejects.append("%s: a file with this name already exists in %s" \
834 % (base_filename, cnf[copychanges]))
836 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
837 if cnf.has_key(copy_dot_dak) and \
838 os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
839 self.rejects.append("%s: a file with this name already exists in %s" \
840 % (dot_dak_filename, Cnf[copy_dot_dak]))
846 session = DBConn().session()
848 for f, entry in self.pkg.files.items():
849 # Ensure the file does not already exist in one of the accepted directories
850 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
851 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
852 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
853 self.rejects.append("%s file already exists in the %s directory." % (f, d))
855 if not re_taint_free.match(f):
856 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
858 # Check the file is readable
859 if os.access(f, os.R_OK) == 0:
860 # When running in -n, copy_to_holding() won't have
861 # generated the reject_message, so we need to.
863 if os.path.exists(f):
864 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
866 self.rejects.append("Can't read `%s'. [file not found]" % (f))
867 entry["type"] = "unreadable"
870 # If it's byhand skip remaining checks
871 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
873 entry["type"] = "byhand"
875 # Checks for a binary package...
876 elif re_isadeb.match(f):
878 entry["type"] = "deb"
880 # This routine appends to self.rejects/warnings as appropriate
881 self.binary_file_checks(f, session)
883 # Checks for a source package...
884 elif re_issource.match(f):
887 # This routine appends to self.rejects/warnings as appropriate
888 self.source_file_checks(f, session)
890 # Not a binary or source package? Assume byhand...
893 entry["type"] = "byhand"
895 # Per-suite file checks
896 entry["oldfiles"] = {}
897 for suite in self.pkg.changes["distribution"].keys():
898 self.per_suite_file_checks(f, suite, session)
902 # If the .changes file says it has source, it must have source.
903 if self.pkg.changes["architecture"].has_key("source"):
905 self.rejects.append("no source found and Architecture line in changes mention source.")
907 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
908 self.rejects.append("source only uploads are not supported.")
910 ###########################################################################
911 def check_dsc(self, action=True, session=None):
912 """Returns bool indicating whether or not the source changes are valid"""
913 # Ensure there is source to check
914 if not self.pkg.changes["architecture"].has_key("source"):
919 for f, entry in self.pkg.files.items():
920 if entry["type"] == "dsc":
922 self.rejects.append("can not process a .changes file with multiple .dsc's.")
927 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
929 self.rejects.append("source uploads must contain a dsc file")
932 # Parse the .dsc file
934 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
935 except CantOpenError:
936 # if not -n copy_to_holding() will have done this for us...
938 self.rejects.append("%s: can't read file." % (dsc_filename))
939 except ParseChangesError, line:
940 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
941 except InvalidDscError, line:
942 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
943 except ChangesUnicodeError:
944 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
946 # Build up the file list of files mentioned by the .dsc
948 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
949 except NoFilesFieldError:
950 self.rejects.append("%s: no Files: field." % (dsc_filename))
952 except UnknownFormatError, format:
953 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
955 except ParseChangesError, line:
956 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
959 # Enforce mandatory fields
960 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
961 if not self.pkg.dsc.has_key(i):
962 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
965 # Validate the source and version fields
966 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
967 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
968 if not re_valid_version.match(self.pkg.dsc["version"]):
969 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
971 # Only a limited list of source formats are allowed in each suite
972 for dist in self.pkg.changes["distribution"].keys():
973 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
974 if self.pkg.dsc["format"] not in allowed:
975 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
977 # Validate the Maintainer field
979 # We ignore the return value
980 fix_maintainer(self.pkg.dsc["maintainer"])
981 except ParseMaintError, msg:
982 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
983 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
985 # Validate the build-depends field(s)
986 for field_name in [ "build-depends", "build-depends-indep" ]:
987 field = self.pkg.dsc.get(field_name)
989 # Have apt try to parse them...
991 apt_pkg.ParseSrcDepends(field)
993 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
995 # Ensure the version number in the .dsc matches the version number in the .changes
996 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
997 changes_version = self.pkg.files[dsc_filename]["version"]
999 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1000 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1002 # Ensure the Files field contain only what's expected
1003 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1005 # Ensure source is newer than existing source in target suites
1006 session = DBConn().session()
1007 self.check_source_against_db(dsc_filename, session)
1008 self.check_dsc_against_db(dsc_filename, session)
1013 ###########################################################################
1015 def get_changelog_versions(self, source_dir):
1016 """Extracts a the source package and (optionally) grabs the
1017 version history out of debian/changelog for the BTS."""
1021 # Find the .dsc (again)
1023 for f in self.pkg.files.keys():
1024 if self.pkg.files[f]["type"] == "dsc":
1027 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1028 if not dsc_filename:
1031 # Create a symlink mirror of the source files in our temporary directory
1032 for f in self.pkg.files.keys():
1033 m = re_issource.match(f)
1035 src = os.path.join(source_dir, f)
1036 # If a file is missing for whatever reason, give up.
1037 if not os.path.exists(src):
1040 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1041 self.pkg.orig_files[f].has_key("path"):
1043 dest = os.path.join(os.getcwd(), f)
1044 os.symlink(src, dest)
1046 # If the orig files are not a part of the upload, create symlinks to the
1048 for orig_file in self.pkg.orig_files.keys():
1049 if not self.pkg.orig_files[orig_file].has_key("path"):
1051 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1052 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1054 # Extract the source
1055 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1056 (result, output) = commands.getstatusoutput(cmd)
1058 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1059 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1062 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1065 # Get the upstream version
1066 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1067 if re_strip_revision.search(upstr_version):
1068 upstr_version = re_strip_revision.sub('', upstr_version)
1070 # Ensure the changelog file exists
1071 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1072 if not os.path.exists(changelog_filename):
1073 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1076 # Parse the changelog
1077 self.pkg.dsc["bts changelog"] = ""
1078 changelog_file = utils.open_file(changelog_filename)
1079 for line in changelog_file.readlines():
1080 m = re_changelog_versions.match(line)
1082 self.pkg.dsc["bts changelog"] += line
1083 changelog_file.close()
1085 # Check we found at least one revision in the changelog
1086 if not self.pkg.dsc["bts changelog"]:
1087 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1089 def check_source(self):
1090 # XXX: I'm fairly sure reprocess == 2 can never happen
1091 # AJT disabled the is_incoming check years ago - mhy
1092 # We should probably scrap or rethink the whole reprocess thing
1094 # a) there's no source
1095 # or b) reprocess is 2 - we will do this check next time when orig
1096 # tarball is in 'files'
1097 # or c) the orig files are MIA
1098 if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1099 or len(self.pkg.orig_files) == 0:
1102 tmpdir = utils.temp_dirname()
1104 # Move into the temporary directory
1108 # Get the changelog version history
1109 self.get_changelog_versions(cwd)
1111 # Move back and cleanup the temporary tree
1115 shutil.rmtree(tmpdir)
1117 if e.errno != errno.EACCES:
1119 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1121 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1122 # We probably have u-r or u-w directories so chmod everything
1124 cmd = "chmod -R u+rwx %s" % (tmpdir)
1125 result = os.system(cmd)
1127 utils.fubar("'%s' failed with result %s." % (cmd, result))
1128 shutil.rmtree(tmpdir)
1129 except Exception, e:
1130 print "foobar2 (%s)" % e
1131 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1133 ###########################################################################
1134 def ensure_hashes(self):
1135 # Make sure we recognise the format of the Files: field in the .changes
1136 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1137 if len(format) == 2:
1138 format = int(format[0]), int(format[1])
1140 format = int(float(format[0])), 0
1142 # We need to deal with the original changes blob, as the fields we need
1143 # might not be in the changes dict serialised into the .dak anymore.
1144 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1146 # Copy the checksums over to the current changes dict. This will keep
1147 # the existing modifications to it intact.
1148 for field in orig_changes:
1149 if field.startswith('checksums-'):
1150 self.pkg.changes[field] = orig_changes[field]
1152 # Check for unsupported hashes
1153 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1154 self.rejects.append(j)
1156 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1157 self.rejects.append(j)
1159 # We have to calculate the hash if we have an earlier changes version than
1160 # the hash appears in rather than require it exist in the changes file
1161 for hashname, hashfunc, version in utils.known_hashes:
1162 # TODO: Move _ensure_changes_hash into this class
1163 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1164 self.rejects.append(j)
1165 if "source" in self.pkg.changes["architecture"]:
1166 # TODO: Move _ensure_dsc_hash into this class
1167 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1168 self.rejects.append(j)
1170 def check_hashes(self):
1171 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1172 self.rejects.append(m)
1174 for m in utils.check_size(".changes", self.pkg.files):
1175 self.rejects.append(m)
1177 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1178 self.rejects.append(m)
1180 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1181 self.rejects.append(m)
1183 self.ensure_hashes()
1185 ###########################################################################
1187 def ensure_orig(self, target_dir='.', session=None):
1189 Ensures that all orig files mentioned in the changes file are present
1190 in target_dir. If they do not exist, they are symlinked into place.
1192 An list containing the symlinks that were created are returned (so they
1199 for filename, entry in self.pkg.dsc_files.iteritems():
1200 if not re_is_orig_source.match(filename):
1201 # File is not an orig; ignore
1204 if os.path.exists(filename):
1205 # File exists, no need to continue
1208 def symlink_if_valid(path):
1209 f = utils.open_file(path)
1210 md5sum = apt_pkg.md5sum(f)
1213 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1214 expected = (int(entry['size']), entry['md5sum'])
1216 if fingerprint != expected:
1219 dest = os.path.join(target_dir, filename)
1221 os.symlink(path, dest)
1222 symlinked.append(dest)
1228 session_ = DBConn().session()
1233 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1234 poolfile_path = os.path.join(
1235 poolfile.location.path, poolfile.filename
1238 if symlink_if_valid(poolfile_path):
1248 # Look in some other queues for the file
1249 queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1250 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1252 for queue in queues:
1253 if not cnf.get('Dir::Queue::%s' % queue):
1256 queuefile_path = os.path.join(
1257 cnf['Dir::Queue::%s' % queue], filename
1260 if not os.path.exists(queuefile_path):
1261 # Does not exist in this queue
1264 if symlink_if_valid(queuefile_path):
1269 ###########################################################################
1271 def check_lintian(self):
1274 # Don't reject binary uploads
1275 if not self.pkg.changes['architecture'].has_key('source'):
1278 # Only check some distributions
1280 for dist in ('unstable', 'experimental'):
1281 if dist in self.pkg.changes['distribution']:
1288 tagfile = cnf.get("Dinstall::LintianTags")
1290 # We don't have a tagfile, so just don't do anything.
1293 # Parse the yaml file
1294 sourcefile = file(tagfile, 'r')
1295 sourcecontent = sourcefile.read()
1298 lintiantags = yaml.load(sourcecontent)['lintian']
1299 except yaml.YAMLError, msg:
1300 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1303 # Try and find all orig mentioned in the .dsc
1304 symlinked = self.ensure_orig()
1306 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1307 # so put it together like it. We put all types of tags in one file and then sort
1308 # through lintians output later to see if its a fatal tag we detected, or not.
1309 # So we only run lintian once on all tags, even if we might reject on some, but not
1311 # Additionally build up a set of tags
1313 (fd, temp_filename) = utils.temp_filename()
1314 temptagfile = os.fdopen(fd, 'w')
1315 for tagtype in lintiantags:
1316 for tag in lintiantags[tagtype]:
1317 temptagfile.write("%s\n" % tag)
1321 # So now we should look at running lintian at the .changes file, capturing output
1323 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1324 (result, output) = commands.getstatusoutput(command)
1326 # We are done with lintian, remove our tempfile and any symlinks we created
1327 os.unlink(temp_filename)
1328 for symlink in symlinked:
1332 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1333 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1335 if len(output) == 0:
1340 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1342 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1343 # are having a victim for a reject.
1344 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1345 for line in output.split('\n'):
1346 m = re_parse_lintian.match(line)
1351 epackage = m.group(2)
1355 # So lets check if we know the tag at all.
1356 if etag not in tags:
1360 # We know it and it is overriden. Check that override is allowed.
1361 if etag in lintiantags['warning']:
1362 # The tag is overriden, and it is allowed to be overriden.
1363 # Don't add a reject message.
1365 elif etag in lintiantags['error']:
1366 # The tag is overriden - but is not allowed to be
1367 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1368 log("ftpmaster does not allow tag to be overridable", etag)
1370 # Tag is known, it is not overriden, direct reject.
1371 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1372 # Now tell if they *might* override it.
1373 if etag in lintiantags['warning']:
1374 log("auto rejecting", "overridable", etag)
1375 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1377 log("auto rejecting", "not overridable", etag)
1379 ###########################################################################
1380 def check_urgency(self):
1382 if self.pkg.changes["architecture"].has_key("source"):
1383 if not self.pkg.changes.has_key("urgency"):
1384 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1385 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1386 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1387 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1388 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1389 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1391 ###########################################################################
1393 # Sanity check the time stamps of files inside debs.
1394 # [Files in the near future cause ugly warnings and extreme time
1395 # travel can cause errors on extraction]
1397 def check_timestamps(self):
1400 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1401 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1402 tar = TarTime(future_cutoff, past_cutoff)
1404 for filename, entry in self.pkg.files.items():
1405 if entry["type"] == "deb":
1408 deb_file = utils.open_file(filename)
1409 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1412 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1413 except SystemError, e:
1414 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1415 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1418 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1422 future_files = tar.future_files.keys()
1424 num_future_files = len(future_files)
1425 future_file = future_files[0]
1426 future_date = tar.future_files[future_file]
1427 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1428 % (filename, num_future_files, future_file, time.ctime(future_date)))
1430 ancient_files = tar.ancient_files.keys()
1432 num_ancient_files = len(ancient_files)
1433 ancient_file = ancient_files[0]
1434 ancient_date = tar.ancient_files[ancient_file]
1435 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1436 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1438 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1440 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1441 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1443 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1449 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1450 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1451 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1452 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1453 self.pkg.changes["sponsoremail"] = uid_email
1458 ###########################################################################
1459 # check_signed_by_key checks
1460 ###########################################################################
1462 def check_signed_by_key(self):
1463 """Ensure the .changes is signed by an authorized uploader."""
1464 session = DBConn().session()
1466 # First of all we check that the person has proper upload permissions
1467 # and that this upload isn't blocked
1468 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1471 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1474 # TODO: Check that import-keyring adds UIDs properly
1476 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1479 # Check that the fingerprint which uploaded has permission to do so
1480 self.check_upload_permissions(fpr, session)
1482 # Check that this package is not in a transition
1483 self.check_transition(session)
1488 def check_upload_permissions(self, fpr, session):
1489 # Check any one-off upload blocks
1490 self.check_upload_blocks(fpr, session)
1492 # Start with DM as a special case
1493 # DM is a special case unfortunately, so we check it first
1494 # (keys with no source access get more access than DMs in one
1495 # way; DMs can only upload for their packages whether source
1496 # or binary, whereas keys with no access might be able to
1497 # upload some binaries)
1498 if fpr.source_acl.access_level == 'dm':
1499 self.check_dm_source_upload(fpr, session)
1501 # Check source-based permissions for other types
1502 if self.pkg.changes["architecture"].has_key("source"):
1503 if fpr.source_acl.access_level is None:
1504 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1505 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1506 self.rejects.append(rej)
1509 # If not a DM, we allow full upload rights
1510 uid_email = "%s@debian.org" % (fpr.uid.uid)
1511 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1514 # Check binary upload permissions
1515 # By this point we know that DMs can't have got here unless they
1516 # are allowed to deal with the package concerned so just apply
1518 if fpr.binary_acl.access_level == 'full':
1521 # Otherwise we're in the map case
1522 tmparches = self.pkg.changes["architecture"].copy()
1523 tmparches.pop('source', None)
1525 for bam in fpr.binary_acl_map:
1526 tmparches.pop(bam.architecture.arch_string, None)
1528 if len(tmparches.keys()) > 0:
1529 if fpr.binary_reject:
1530 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1531 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1532 self.rejects.append(rej)
1534 # TODO: This is where we'll implement reject vs throw away binaries later
1535 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1536 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1537 rej += "\nFingerprint: %s", (fpr.fingerprint)
1538 self.rejects.append(rej)
1541 def check_upload_blocks(self, fpr, session):
1542 """Check whether any upload blocks apply to this source, source
1543 version, uid / fpr combination"""
1545 def block_rej_template(fb):
1546 rej = 'Manual upload block in place for package %s' % fb.source
1547 if fb.version is not None:
1548 rej += ', version %s' % fb.version
1551 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1552 # version is None if the block applies to all versions
1553 if fb.version is None or fb.version == self.pkg.changes['version']:
1554 # Check both fpr and uid - either is enough to cause a reject
1555 if fb.fpr is not None:
1556 if fb.fpr.fingerprint == fpr.fingerprint:
1557 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1558 if fb.uid is not None:
1559 if fb.uid == fpr.uid:
1560 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1563 def check_dm_upload(self, fpr, session):
1564 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1565 ## none of the uploaded packages are NEW
1567 for f in self.pkg.files.keys():
1568 if self.pkg.files[f].has_key("byhand"):
1569 self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1571 if self.pkg.files[f].has_key("new"):
1572 self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1578 ## the most recent version of the package uploaded to unstable or
1579 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1580 ## section of its control file
1581 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1582 q = q.join(SrcAssociation)
1583 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1584 q = q.order_by(desc('source.version')).limit(1)
1589 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1590 self.rejects.append(rej)
1594 if not r.dm_upload_allowed:
1595 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1596 self.rejects.append(rej)
1599 ## the Maintainer: field of the uploaded .changes file corresponds with
1600 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1602 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1603 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1605 ## the most recent version of the package uploaded to unstable or
1606 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1607 ## non-developer maintainers cannot NMU or hijack packages)
1609 # srcuploaders includes the maintainer
1611 for sup in r.srcuploaders:
1612 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1613 # Eww - I hope we never have two people with the same name in Debian
1614 if email == fpr.uid.uid or name == fpr.uid.name:
1619 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1622 ## none of the packages are being taken over from other source packages
1623 for b in self.pkg.changes["binary"].keys():
1624 for suite in self.pkg.changes["distribution"].keys():
1625 q = session.query(DBSource)
1626 q = q.join(DBBinary).filter_by(package=b)
1627 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1630 if s.source != self.pkg.changes["source"]:
1631 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1635 def check_transition(self, session):
1638 sourcepkg = self.pkg.changes["source"]
1640 # No sourceful upload -> no need to do anything else, direct return
1641 # We also work with unstable uploads, not experimental or those going to some
1642 # proposed-updates queue
1643 if "source" not in self.pkg.changes["architecture"] or \
1644 "unstable" not in self.pkg.changes["distribution"]:
1647 # Also only check if there is a file defined (and existant) with
1649 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1650 if transpath == "" or not os.path.exists(transpath):
1653 # Parse the yaml file
1654 sourcefile = file(transpath, 'r')
1655 sourcecontent = sourcefile.read()
1657 transitions = yaml.load(sourcecontent)
1658 except yaml.YAMLError, msg:
1659 # This shouldn't happen, there is a wrapper to edit the file which
1660 # checks it, but we prefer to be safe than ending up rejecting
1662 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1665 # Now look through all defined transitions
1666 for trans in transitions:
1667 t = transitions[trans]
1668 source = t["source"]
1671 # Will be None if nothing is in testing.
1672 current = get_source_in_suite(source, "testing", session)
1673 if current is not None:
1674 compare = apt_pkg.VersionCompare(current.version, expected)
1676 if current is None or compare < 0:
1677 # This is still valid, the current version in testing is older than
1678 # the new version we wait for, or there is none in testing yet
1680 # Check if the source we look at is affected by this.
1681 if sourcepkg in t['packages']:
1682 # The source is affected, lets reject it.
1684 rejectmsg = "%s: part of the %s transition.\n\n" % (
1687 if current is not None:
1688 currentlymsg = "at version %s" % (current.version)
1690 currentlymsg = "not present in testing"
1692 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1694 rejectmsg += "\n".join(textwrap.wrap("""Your package
1695 is part of a testing transition designed to get %s migrated (it is
1696 currently %s, we need version %s). This transition is managed by the
1697 Release Team, and %s is the Release-Team member responsible for it.
1698 Please mail debian-release@lists.debian.org or contact %s directly if you
1699 need further assistance. You might want to upload to experimental until this
1700 transition is done."""
1701 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1703 self.rejects.append(rejectmsg)
1706 ###########################################################################
1707 # End check_signed_by_key checks
1708 ###########################################################################
1710 def build_summaries(self):
1711 """ Build a summary of changes the upload introduces. """
1713 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1715 short_summary = summary
1717 # This is for direport's benefit...
1718 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1721 summary += "Changes: " + f
1723 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1725 summary += self.announce(short_summary, 0)
1727 return (summary, short_summary)
1729 ###########################################################################
1731 def close_bugs(self, summary, action):
1733 Send mail to close bugs as instructed by the closes field in the changes file.
1734 Also add a line to summary if any work was done.
1736 @type summary: string
1737 @param summary: summary text, as given by L{build_summaries}
1740 @param action: Set to false no real action will be done.
1743 @return: summary. If action was taken, extended by the list of closed bugs.
1747 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1749 bugs = self.pkg.changes["closes"].keys()
1755 summary += "Closing bugs: "
1757 summary += "%s " % (bug)
1760 self.Subst["__BUG_NUMBER__"] = bug
1761 if self.pkg.changes["distribution"].has_key("stable"):
1762 self.Subst["__STABLE_WARNING__"] = """
1763 Note that this package is not part of the released stable Debian
1764 distribution. It may have dependencies on other unreleased software,
1765 or other instabilities. Please take care if you wish to install it.
1766 The update will eventually make its way into the next released Debian
1769 self.Subst["__STABLE_WARNING__"] = ""
1770 mail_message = utils.TemplateSubst(self.Subst, template)
1771 utils.send_mail(mail_message)
1773 # Clear up after ourselves
1774 del self.Subst["__BUG_NUMBER__"]
1775 del self.Subst["__STABLE_WARNING__"]
1777 if action and self.logger:
1778 self.logger.log(["closing bugs"] + bugs)
1784 ###########################################################################
1786 def announce(self, short_summary, action):
1788 Send an announce mail about a new upload.
1790 @type short_summary: string
1791 @param short_summary: Short summary text to include in the mail
1794 @param action: Set to false no real action will be done.
1797 @return: Textstring about action taken.
1802 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1804 # Only do announcements for source uploads with a recent dpkg-dev installed
1805 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1806 self.pkg.changes["architecture"].has_key("source"):
1812 self.Subst["__SHORT_SUMMARY__"] = short_summary
1814 for dist in self.pkg.changes["distribution"].keys():
1815 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1816 if announce_list == "" or lists_done.has_key(announce_list):
1819 lists_done[announce_list] = 1
1820 summary += "Announcing to %s\n" % (announce_list)
1824 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1825 if cnf.get("Dinstall::TrackingServer") and \
1826 self.pkg.changes["architecture"].has_key("source"):
1827 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1828 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1830 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1831 utils.send_mail(mail_message)
1833 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1835 if cnf.FindB("Dinstall::CloseBugs"):
1836 summary = self.close_bugs(summary, action)
1838 del self.Subst["__SHORT_SUMMARY__"]
1842 ###########################################################################
1844 def accept (self, summary, short_summary, targetdir=None):
1848 This moves all files referenced from the .changes into the I{accepted}
1849 queue, sends the accepted mail, announces to lists, closes bugs and
1850 also checks for override disparities. If enabled it will write out
1851 the version history for the BTS Version Tracking and will finally call
1854 @type summary: string
1855 @param summary: Summary text
1857 @type short_summary: string
1858 @param short_summary: Short summary
1863 stats = SummaryStats()
1865 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1867 if targetdir is None:
1868 targetdir = cnf["Dir::Queue::Accepted"]
1872 self.logger.log(["Accepting changes", self.pkg.changes_file])
1874 self.pkg.write_dot_dak(targetdir)
1876 # Move all the files into the accepted directory
1877 utils.move(self.pkg.changes_file, targetdir)
1879 for name, entry in sorted(self.pkg.files.items()):
1880 utils.move(name, targetdir)
1881 stats.accept_bytes += float(entry["size"])
1883 stats.accept_count += 1
1885 # Send accept mail, announce to lists, close bugs and check for
1886 # override disparities
1887 if not cnf["Dinstall::Options::No-Mail"]:
1889 self.Subst["__SUITE__"] = ""
1890 self.Subst["__SUMMARY__"] = summary
1891 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1892 utils.send_mail(mail_message)
1893 self.announce(short_summary, 1)
1895 ## Helper stuff for DebBugs Version Tracking
1896 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1897 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1898 # the conditionalization on dsc["bts changelog"] should be
1901 # Write out the version history from the changelog
1902 if self.pkg.changes["architecture"].has_key("source") and \
1903 self.pkg.dsc.has_key("bts changelog"):
1905 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1906 version_history = os.fdopen(fd, 'w')
1907 version_history.write(self.pkg.dsc["bts changelog"])
1908 version_history.close()
1909 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1910 self.pkg.changes_file[:-8]+".versions")
1911 os.rename(temp_filename, filename)
1912 os.chmod(filename, 0644)
1914 # Write out the binary -> source mapping.
1915 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1916 debinfo = os.fdopen(fd, 'w')
1917 for name, entry in sorted(self.pkg.files.items()):
1918 if entry["type"] == "deb":
1919 line = " ".join([entry["package"], entry["version"],
1920 entry["architecture"], entry["source package"],
1921 entry["source version"]])
1922 debinfo.write(line+"\n")
1924 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1925 self.pkg.changes_file[:-8]+".debinfo")
1926 os.rename(temp_filename, filename)
1927 os.chmod(filename, 0644)
1929 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1930 # <Ganneff> we do call queue_build too
1931 # <mhy> well yes, we'd have had to if we were inserting into accepted
1932 # <Ganneff> now. thats database only.
1933 # <mhy> urgh, that's going to get messy
1934 # <Ganneff> so i make the p-n call to it *also* using accepted/
1935 # <mhy> but then the packages will be in the queue_build table without the files being there
1936 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1937 # <mhy> ah, good point
1938 # <Ganneff> so it will work out, as unchecked move it over
1939 # <mhy> that's all completely sick
1942 # This routine returns None on success or an error on failure
1943 res = get_or_set_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1948 def check_override(self):
1950 Checks override entries for validity. Mails "Override disparity" warnings,
1951 if that feature is enabled.
1953 Abandons the check if
1954 - override disparity checks are disabled
1955 - mail sending is disabled
1960 # Abandon the check if:
1961 # a) override disparity checks have been disabled
1962 # b) we're not sending mail
1963 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1964 cnf["Dinstall::Options::No-Mail"]:
1967 summary = self.pkg.check_override()
1972 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1975 self.Subst["__SUMMARY__"] = summary
1976 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1977 utils.send_mail(mail_message)
1978 del self.Subst["__SUMMARY__"]
1980 ###########################################################################
1982 def remove(self, dir=None):
1984 Used (for instance) in p-u to remove the package from unchecked
1987 os.chdir(self.pkg.directory)
1991 for f in self.pkg.files.keys():
1993 os.unlink(self.pkg.changes_file)
1995 ###########################################################################
1997 def move_to_dir (self, dest, perms=0660, changesperms=0664):
1999 Move files to dest with certain perms/changesperms
2001 utils.move(self.pkg.changes_file, dest, perms=changesperms)
2002 for f in self.pkg.files.keys():
2003 utils.move(f, dest, perms=perms)
2005 ###########################################################################
2007 def force_reject(self, reject_files):
2009 Forcefully move files from the current directory to the
2010 reject directory. If any file already exists in the reject
2011 directory it will be moved to the morgue to make way for
2015 @param files: file dictionary
2021 for file_entry in reject_files:
2022 # Skip any files which don't exist or which we don't have permission to copy.
2023 if os.access(file_entry, os.R_OK) == 0:
2026 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2029 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2031 # File exists? Let's try and move it to the morgue
2032 if e.errno == errno.EEXIST:
2033 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2035 morgue_file = utils.find_next_free(morgue_file)
2036 except NoFreeFilenameError:
2037 # Something's either gone badly Pete Tong, or
2038 # someone is trying to exploit us.
2039 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2041 utils.move(dest_file, morgue_file, perms=0660)
2043 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2046 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2050 # If we got here, we own the destination file, so we can
2051 # safely overwrite it.
2052 utils.move(file_entry, dest_file, 1, perms=0660)
2055 ###########################################################################
2056 def do_reject (self, manual=0, reject_message="", note=""):
2058 Reject an upload. If called without a reject message or C{manual} is
2059 true, spawn an editor so the user can write one.
2062 @param manual: manual or automated rejection
2064 @type reject_message: string
2065 @param reject_message: A reject message
2070 # If we weren't given a manual rejection message, spawn an
2071 # editor so the user can add one in...
2072 if manual and not reject_message:
2073 (fd, temp_filename) = utils.temp_filename()
2074 temp_file = os.fdopen(fd, 'w')
2077 temp_file.write(line)
2079 editor = os.environ.get("EDITOR","vi")
2081 while answer == 'E':
2082 os.system("%s %s" % (editor, temp_filename))
2083 temp_fh = utils.open_file(temp_filename)
2084 reject_message = "".join(temp_fh.readlines())
2086 print "Reject message:"
2087 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2088 prompt = "[R]eject, Edit, Abandon, Quit ?"
2090 while prompt.find(answer) == -1:
2091 answer = utils.our_raw_input(prompt)
2092 m = re_default_answer.search(prompt)
2095 answer = answer[:1].upper()
2096 os.unlink(temp_filename)
2102 print "Rejecting.\n"
2106 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2107 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2109 # Move all the files into the reject directory
2110 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2111 self.force_reject(reject_files)
2113 # If we fail here someone is probably trying to exploit the race
2114 # so let's just raise an exception ...
2115 if os.path.exists(reason_filename):
2116 os.unlink(reason_filename)
2117 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2119 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2123 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2124 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2125 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2126 os.write(reason_fd, reject_message)
2127 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2129 # Build up the rejection email
2130 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2131 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2132 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2133 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2134 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2135 # Write the rejection email out as the <foo>.reason file
2136 os.write(reason_fd, reject_mail_message)
2138 del self.Subst["__REJECTOR_ADDRESS__"]
2139 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2140 del self.Subst["__CC__"]
2144 # Send the rejection mail if appropriate
2145 if not cnf["Dinstall::Options::No-Mail"]:
2146 utils.send_mail(reject_mail_message)
2149 self.logger.log(["rejected", self.pkg.changes_file])
2153 ################################################################################
2154 def in_override_p(self, package, component, suite, binary_type, file, session):
2156 Check if a package already has override entries in the DB
2158 @type package: string
2159 @param package: package name
2161 @type component: string
2162 @param component: database id of the component
2165 @param suite: database id of the suite
2167 @type binary_type: string
2168 @param binary_type: type of the package
2171 @param file: filename we check
2173 @return: the database result. But noone cares anyway.
2179 if binary_type == "": # must be source
2182 file_type = binary_type
2184 # Override suite name; used for example with proposed-updates
2185 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2186 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2188 result = get_override(package, suite, component, file_type, session)
2190 # If checking for a source package fall back on the binary override type
2191 if file_type == "dsc" and len(result) < 1:
2192 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2194 # Remember the section and priority so we can check them later if appropriate
2197 self.pkg.files[file]["override section"] = result.section.section
2198 self.pkg.files[file]["override priority"] = result.priority.priority
2203 ################################################################################
2204 def get_anyversion(self, sv_list, suite):
2207 @param sv_list: list of (suite, version) tuples to check
2210 @param suite: suite name
2216 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2217 for (s, v) in sv_list:
2218 if s in [ x.lower() for x in anysuite ]:
2219 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2224 ################################################################################
2226 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
2229 @param sv_list: list of (suite, version) tuples to check
2234 @type new_version: string
2235 @param new_version: XXX
2237 Ensure versions are newer than existing packages in target
2238 suites and that cross-suite version checking rules as
2239 set out in the conf file are satisfied.
2244 # Check versions for each target suite
2245 for target_suite in self.pkg.changes["distribution"].keys():
2246 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2247 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2249 # Enforce "must be newer than target suite" even if conffile omits it
2250 if target_suite not in must_be_newer_than:
2251 must_be_newer_than.append(target_suite)
2253 for (suite, existent_version) in sv_list:
2254 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2256 if suite in must_be_newer_than and sourceful and vercmp < 1:
2257 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2259 if suite in must_be_older_than and vercmp > -1:
2262 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2263 # we really use the other suite, ignoring the conflicting one ...
2264 addsuite = self.pkg.changes["distribution-version"][suite]
2266 add_version = self.get_anyversion(sv_list, addsuite)
2267 target_version = self.get_anyversion(sv_list, target_suite)
2270 # not add_version can only happen if we map to a suite
2271 # that doesn't enhance the suite we're propup'ing from.
2272 # so "propup-ver x a b c; map a d" is a problem only if
2273 # d doesn't enhance a.
2275 # i think we could always propagate in this case, rather
2276 # than complaining. either way, this isn't a REJECT issue
2278 # And - we really should complain to the dorks who configured dak
2279 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2280 self.pkg.changes.setdefault("propdistribution", {})
2281 self.pkg.changes["propdistribution"][addsuite] = 1
2283 elif not target_version:
2284 # not targets_version is true when the package is NEW
2285 # we could just stick with the "...old version..." REJECT
2286 # for this, I think.
2287 self.rejects.append("Won't propogate NEW packages.")
2288 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2289 # propogation would be redundant. no need to reject though.
2290 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2292 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2293 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2295 self.warnings.append("Propogating upload to %s" % (addsuite))
2296 self.pkg.changes.setdefault("propdistribution", {})
2297 self.pkg.changes["propdistribution"][addsuite] = 1
2301 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2303 ################################################################################
2304 def check_binary_against_db(self, file, session):
2305 # Ensure version is sane
2306 q = session.query(BinAssociation)
2307 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
2308 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
2310 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2311 file, self.pkg.files[file]["version"], sourceful=False)
2313 # Check for any existing copies of the file
2314 q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
2315 q = q.filter_by(version=self.pkg.files[file]["version"])
2316 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
2319 self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
2321 ################################################################################
2323 def check_source_against_db(self, file, session):
2326 source = self.pkg.dsc.get("source")
2327 version = self.pkg.dsc.get("version")
2329 # Ensure version is sane
2330 q = session.query(SrcAssociation)
2331 q = q.join(DBSource).filter(DBSource.source==source)
2333 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2334 file, version, sourceful=True)
2336 ################################################################################
2337 def check_dsc_against_db(self, file, session):
2340 @warning: NB: this function can remove entries from the 'files' index [if
2341 the orig tarball is a duplicate of the one in the archive]; if
2342 you're iterating over 'files' and call this function as part of
2343 the loop, be sure to add a check to the top of the loop to
2344 ensure you haven't just tried to dereference the deleted entry.
2349 self.pkg.orig_files = {} # XXX: do we need to clear it?
2350 orig_files = self.pkg.orig_files
2352 # Try and find all files mentioned in the .dsc. This has
2353 # to work harder to cope with the multiple possible
2354 # locations of an .orig.tar.gz.
2355 # The ordering on the select is needed to pick the newest orig
2356 # when it exists in multiple places.
2357 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2359 if self.pkg.files.has_key(dsc_name):
2360 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2361 actual_size = int(self.pkg.files[dsc_name]["size"])
2362 found = "%s in incoming" % (dsc_name)
2364 # Check the file does not already exist in the archive
2365 ql = get_poolfile_like_name(dsc_name, session)
2367 # Strip out anything that isn't '%s' or '/%s$'
2369 if not i.filename.endswith(dsc_name):
2372 # "[dak] has not broken them. [dak] has fixed a
2373 # brokenness. Your crappy hack exploited a bug in
2376 # "(Come on! I thought it was always obvious that
2377 # one just doesn't release different files with
2378 # the same name and version.)"
2379 # -- ajk@ on d-devel@l.d.o
2382 # Ignore exact matches for .orig.tar.gz
2384 if re_is_orig_source.match(dsc_name):
2386 if self.pkg.files.has_key(dsc_name) and \
2387 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2388 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2389 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2390 # TODO: Don't delete the entry, just mark it as not needed
2391 # This would fix the stupidity of changing something we often iterate over
2392 # whilst we're doing it
2393 del self.pkg.files[dsc_name]
2394 if not orig_files.has_key(dsc_name):
2395 orig_files[dsc_name] = {}
2396 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2400 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2402 elif re_is_orig_source.match(dsc_name):
2404 ql = get_poolfile_like_name(dsc_name, session)
2406 # Strip out anything that isn't '%s' or '/%s$'
2407 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2409 if not i.filename.endswith(dsc_name):
2413 # Unfortunately, we may get more than one match here if,
2414 # for example, the package was in potato but had an -sa
2415 # upload in woody. So we need to choose the right one.
2417 # default to something sane in case we don't match any or have only one
2422 old_file = os.path.join(i.location.path, i.filename)
2423 old_file_fh = utils.open_file(old_file)
2424 actual_md5 = apt_pkg.md5sum(old_file_fh)
2426 actual_size = os.stat(old_file)[stat.ST_SIZE]
2427 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2430 old_file = os.path.join(i.location.path, i.filename)
2431 old_file_fh = utils.open_file(old_file)
2432 actual_md5 = apt_pkg.md5sum(old_file_fh)
2434 actual_size = os.stat(old_file)[stat.ST_SIZE]
2436 suite_type = x.location.archive_type
2437 # need this for updating dsc_files in install()
2438 dsc_entry["files id"] = x.file_id
2439 # See install() in process-accepted...
2440 if not orig_files.has_key(dsc_name):
2441 orig_files[dsc_name] = {}
2442 orig_files[dsc_name]["id"] = x.file_id
2443 orig_files[dsc_name]["path"] = old_file
2444 orig_files[dsc_name]["location"] = x.location.location_id
2446 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2447 # Not there? Check the queue directories...
2448 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2449 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2451 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2452 if os.path.exists(in_otherdir):
2453 in_otherdir_fh = utils.open_file(in_otherdir)
2454 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2455 in_otherdir_fh.close()
2456 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2458 if not orig_files.has_key(dsc_name):
2459 orig_files[dsc_name] = {}
2460 orig_files[dsc_name]["path"] = in_otherdir
2463 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2466 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2468 if actual_md5 != dsc_entry["md5sum"]:
2469 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2470 if actual_size != int(dsc_entry["size"]):
2471 self.rejects.append("size for %s doesn't match %s." % (found, file))
2473 ################################################################################
2474 # This is used by process-new and process-holding to recheck a changes file
2475 # at the time we're running. It mainly wraps various other internal functions
2476 # and is similar to accepted_checks - these should probably be tidied up
2478 def recheck(self, session):
2480 for f in self.pkg.files.keys():
2481 # The .orig.tar.gz can disappear out from under us is it's a
2482 # duplicate of one in the archive.
2483 if not self.pkg.files.has_key(f):
2486 entry = self.pkg.files[f]
2488 # Check that the source still exists
2489 if entry["type"] == "deb":
2490 source_version = entry["source version"]
2491 source_package = entry["source package"]
2492 if not self.pkg.changes["architecture"].has_key("source") \
2493 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2494 source_epochless_version = re_no_epoch.sub('', source_version)
2495 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2497 for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
2498 if cnf.has_key("Dir::Queue::%s" % (q)):
2499 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2502 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2504 # Version and file overwrite checks
2505 if entry["type"] == "deb":
2506 self.check_binary_against_db(f, session)
2507 elif entry["type"] == "dsc":
2508 self.check_source_against_db(f, session)
2509 self.check_dsc_against_db(f, session)
2511 ################################################################################
2512 def accepted_checks(self, overwrite_checks, session):
2513 # Recheck anything that relies on the database; since that's not
2514 # frozen between accept and our run time when called from p-a.
2516 # overwrite_checks is set to False when installing to stable/oldstable
2521 # Find the .dsc (again)
2523 for f in self.pkg.files.keys():
2524 if self.pkg.files[f]["type"] == "dsc":
2527 for checkfile in self.pkg.files.keys():
2528 # The .orig.tar.gz can disappear out from under us is it's a
2529 # duplicate of one in the archive.
2530 if not self.pkg.files.has_key(checkfile):
2533 entry = self.pkg.files[checkfile]
2535 # Check that the source still exists
2536 if entry["type"] == "deb":
2537 source_version = entry["source version"]
2538 source_package = entry["source package"]
2539 if not self.pkg.changes["architecture"].has_key("source") \
2540 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2541 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2543 # Version and file overwrite checks
2544 if overwrite_checks:
2545 if entry["type"] == "deb":
2546 self.check_binary_against_db(checkfile, session)
2547 elif entry["type"] == "dsc":
2548 self.check_source_against_db(checkfile, session)
2549 self.check_dsc_against_db(dsc_filename, session)
2551 # propogate in the case it is in the override tables:
2552 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2553 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2554 propogate[suite] = 1
2556 nopropogate[suite] = 1
2558 for suite in propogate.keys():
2559 if suite in nopropogate:
2561 self.pkg.changes["distribution"][suite] = 1
2563 for checkfile in self.pkg.files.keys():
2564 # Check the package is still in the override tables
2565 for suite in self.pkg.changes["distribution"].keys():
2566 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2567 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2569 ################################################################################
2570 # This is not really a reject, but an unaccept, but since a) the code for
2571 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2572 # extremely rare, for now we'll go with whining at our admin folks...
2574 def do_unaccept(self):
2578 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2579 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2580 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2581 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2582 if cnf.has_key("Dinstall::Bcc"):
2583 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2585 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2587 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2589 # Write the rejection email out as the <foo>.reason file
2590 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2591 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2593 # If we fail here someone is probably trying to exploit the race
2594 # so let's just raise an exception ...
2595 if os.path.exists(reject_filename):
2596 os.unlink(reject_filename)
2598 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2599 os.write(fd, reject_mail_message)
2602 utils.send_mail(reject_mail_message)
2604 del self.Subst["__REJECTOR_ADDRESS__"]
2605 del self.Subst["__REJECT_MESSAGE__"]
2606 del self.Subst["__CC__"]
2608 ################################################################################
2609 # If any file of an upload has a recent mtime then chances are good
2610 # the file is still being uploaded.
2612 def upload_too_new(self):
2615 # Move back to the original directory to get accurate time stamps
2617 os.chdir(self.pkg.directory)
2618 file_list = self.pkg.files.keys()
2619 file_list.extend(self.pkg.dsc_files.keys())
2620 file_list.append(self.pkg.changes_file)
2623 last_modified = time.time()-os.path.getmtime(f)
2624 if last_modified < int(cnf["Dinstall::SkipTime"]):