5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from dak_exceptions import *
44 from config import Config
45 from holding import Holding
47 from summarystats import SummaryStats
48 from utils import parse_changes
49 from textutils import fix_maintainer
51 ###############################################################################
53 def get_type(f, session=None):
55 Get the file type of C{f}
58 @param f: file entry from Changes object
65 session = DBConn().session()
68 if f.has_key("dbtype"):
69 file_type = file["dbtype"]
70 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
73 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
75 # Validate the override type
76 type_id = get_override_type(file_type, session)
78 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
82 ################################################################################
84 # Determine what parts in a .changes are NEW
86 def determine_new(changes, files, warn=1):
88 Determine what parts in a C{changes} file are NEW.
90 @type changes: Upload.Pkg.changes dict
91 @param changes: Changes dictionary
93 @type files: Upload.Pkg.files dict
94 @param files: Files dictionary
97 @param warn: Warn if overrides are added for (old)stable
100 @return: dictionary of NEW components.
105 session = DBConn().session()
107 # Build up a list of potentially new things
108 for name, f in files.items():
109 # Skip byhand elements
110 if f["type"] == "byhand":
113 priority = f["priority"]
114 section = f["section"]
115 file_type = get_type(f)
116 component = f["component"]
118 if file_type == "dsc":
121 if not new.has_key(pkg):
123 new[pkg]["priority"] = priority
124 new[pkg]["section"] = section
125 new[pkg]["type"] = file_type
126 new[pkg]["component"] = component
127 new[pkg]["files"] = []
129 old_type = new[pkg]["type"]
130 if old_type != file_type:
131 # source gets trumped by deb or udeb
132 if old_type == "dsc":
133 new[pkg]["priority"] = priority
134 new[pkg]["section"] = section
135 new[pkg]["type"] = file_type
136 new[pkg]["component"] = component
138 new[pkg]["files"].append(name)
140 if f.has_key("othercomponents"):
141 new[pkg]["othercomponents"] = f["othercomponents"]
143 for suite in changes["suite"].keys():
144 for pkg in new.keys():
145 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
147 for file_entry in new[pkg]["files"]:
148 if files[file_entry].has_key("new"):
149 del files[file_entry]["new"]
153 for s in ['stable', 'oldstable']:
154 if changes["suite"].has_key(s):
155 print "WARNING: overrides will be added for %s!" % s
156 for pkg in new.keys():
157 if new[pkg].has_key("othercomponents"):
158 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
162 ################################################################################
164 def check_valid(new):
166 Check if section and priority for NEW packages exist in database.
167 Additionally does sanity checks:
168 - debian-installer packages have to be udeb (or source)
169 - non debian-installer packages can not be udeb
170 - source priority can only be assigned to dsc file types
173 @param new: Dict of new packages with their section, priority and type.
176 for pkg in new.keys():
177 section_name = new[pkg]["section"]
178 priority_name = new[pkg]["priority"]
179 file_type = new[pkg]["type"]
181 section = get_section(section_name)
183 new[pkg]["section id"] = -1
185 new[pkg]["section id"] = section.section_id
187 priority = get_priority(priority_name)
189 new[pkg]["priority id"] = -1
191 new[pkg]["priority id"] = priority.priority_id
194 di = section_name.find("debian-installer") != -1
196 # If d-i, we must be udeb and vice-versa
197 if (di and file_type not in ("udeb", "dsc")) or \
198 (not di and file_type == "udeb"):
199 new[pkg]["section id"] = -1
201 # If dsc we need to be source and vice-versa
202 if (priority == "source" and file_type != "dsc") or \
203 (priority != "source" and file_type == "dsc"):
204 new[pkg]["priority id"] = -1
206 ###############################################################################
208 def lookup_uid_from_fingerprint(fpr, session):
211 # This is a stupid default, but see the comments below
214 user = get_uid_from_fingerprint(changes["fingerprint"], session)
218 if user.name is None:
223 # Check the relevant fingerprint (which we have to have)
224 for f in uid.fingerprint:
225 if f.fingerprint == changes['fingerprint']:
226 is_dm = f.keyring.debian_maintainer
229 return (uid, uid_name, is_dm)
231 ###############################################################################
233 # Used by Upload.check_timestamps
234 class TarTime(object):
235 def __init__(self, future_cutoff, past_cutoff):
237 self.future_cutoff = future_cutoff
238 self.past_cutoff = past_cutoff
241 self.future_files = {}
242 self.ancient_files = {}
244 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
245 if MTime > self.future_cutoff:
246 self.future_files[Name] = MTime
247 if MTime < self.past_cutoff:
248 self.ancient_files[Name] = MTime
250 ###############################################################################
252 class Upload(object):
254 Everything that has to do with an upload processed.
261 ###########################################################################
264 """ Reset a number of internal variables."""
266 # Initialize the substitution template map
269 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
270 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
271 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
272 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
280 def package_info(self):
283 if len(self.rejects) > 0:
284 msg += "Reject Reasons:\n"
285 msg += "\n".join(self.rejects)
287 if len(self.warnings) > 0:
289 msg += "\n".join(self.warnings)
291 if len(self.notes) > 0:
293 msg += "\n".join(self.notes)
297 ###########################################################################
298 def update_subst(self):
299 """ Set up the per-package template substitution mappings """
303 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
304 if not self.pkg.changes.has_key("architecture") or not \
305 isinstance(changes["architecture"], DictType):
306 self.pkg.changes["architecture"] = { "Unknown" : "" }
308 # and maintainer2047 may not exist.
309 if not self.pkg.changes.has_key("maintainer2047"):
310 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
312 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
313 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
314 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
316 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
317 if self.pkg.changes["architecture"].has_key("source") and \
318 self.pkg.changes["changedby822"] != "" and \
319 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
321 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
322 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], changes["maintainer2047"])
323 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
325 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
326 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
327 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
329 if "sponsoremail" in self.pkg.changes:
330 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
332 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
333 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
335 # Apply any global override of the Maintainer field
336 if cnf.get("Dinstall::OverrideMaintainer"):
337 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
338 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
340 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
341 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
342 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
344 ###########################################################################
345 def load_changes(self, filename):
348 @rvalue: whether the changes file was valid or not. We may want to
349 reject even if this is True (see what gets put in self.rejects).
350 This is simply to prevent us even trying things later which will
351 fail because we couldn't properly parse the file.
353 self.pkg.changes_file = filename
355 # Parse the .changes field into a dictionary
357 self.pkg.changes.update(parse_changes(filename))
358 except CantOpenError:
359 self.rejects.append("%s: can't read file." % (filename))
361 except ParseChangesError, line:
362 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
364 except ChangesUnicodeError:
365 self.rejects.append("%s: changes file not proper utf-8" % (filename))
368 # Parse the Files field from the .changes into another dictionary
370 self.pkg.files.update(build_file_list(self.pkg.changes))
371 except ParseChangesError, line:
372 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
374 except UnknownFormatError, format:
375 self.rejects.append("%s: unknown format '%s'." % (filename, format))
378 # Check for mandatory fields
379 for i in ("distribution", "source", "binary", "architecture",
380 "version", "maintainer", "files", "changes", "description"):
381 if not self.pkg.changes.has_key(i):
382 # Avoid undefined errors later
383 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
386 # Strip a source version in brackets from the source field
387 if re_strip_srcver.search(self.pkg.changes["source"]):
388 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
390 # Ensure the source field is a valid package name.
391 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
392 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
394 # Split multi-value fields into a lower-level dictionary
395 for i in ("architecture", "distribution", "binary", "closes"):
396 o = self.pkg.changes.get(i, "")
398 del self.pkg.changes[i]
400 self.pkg.changes[i] = {}
403 self.pkg.changes[i][j] = 1
405 # Fix the Maintainer: field to be RFC822/2047 compatible
407 (self.pkg.changes["maintainer822"],
408 self.pkg.changes["maintainer2047"],
409 self.pkg.changes["maintainername"],
410 self.pkg.changes["maintaineremail"]) = \
411 fix_maintainer (self.pkg.changes["maintainer"])
412 except ParseMaintError, msg:
413 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
414 % (filename, changes["maintainer"], msg))
416 # ...likewise for the Changed-By: field if it exists.
418 (self.pkg.changes["changedby822"],
419 self.pkg.changes["changedby2047"],
420 self.pkg.changes["changedbyname"],
421 self.pkg.changes["changedbyemail"]) = \
422 fix_maintainer (self.pkg.changes.get("changed-by", ""))
423 except ParseMaintError, msg:
424 self.pkg.changes["changedby822"] = ""
425 self.pkg.changes["changedby2047"] = ""
426 self.pkg.changes["changedbyname"] = ""
427 self.pkg.changes["changedbyemail"] = ""
429 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
430 % (filename, changes["changed-by"], msg))
432 # Ensure all the values in Closes: are numbers
433 if self.pkg.changes.has_key("closes"):
434 for i in self.pkg.changes["closes"].keys():
435 if re_isanum.match (i) == None:
436 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
438 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
439 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
440 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
442 # Check there isn't already a changes file of the same name in one
443 # of the queue directories.
444 base_filename = os.path.basename(filename)
445 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
446 if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
447 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
449 # Check the .changes is non-empty
450 if not self.pkg.files:
451 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
454 # Changes was syntactically valid even if we'll reject
457 ###########################################################################
459 def check_distributions(self):
460 "Check and map the Distribution field"
464 # Handle suite mappings
465 for m in Cnf.ValueList("SuiteMappings"):
468 if mtype == "map" or mtype == "silent-map":
469 (source, dest) = args[1:3]
470 if self.pkg.changes["distribution"].has_key(source):
471 del self.pkg.changes["distribution"][source]
472 self.pkg.changes["distribution"][dest] = 1
473 if mtype != "silent-map":
474 self.notes.append("Mapping %s to %s." % (source, dest))
475 if self.pkg.changes.has_key("distribution-version"):
476 if self.pkg.changes["distribution-version"].has_key(source):
477 self.pkg.changes["distribution-version"][source]=dest
478 elif mtype == "map-unreleased":
479 (source, dest) = args[1:3]
480 if self.pkg.changes["distribution"].has_key(source):
481 for arch in self.pkg.changes["architecture"].keys():
482 if arch not in [ arch_string for a in get_suite_architectures(source) ]:
483 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
484 del self.pkg.changes["distribution"][source]
485 self.pkg.changes["distribution"][dest] = 1
487 elif mtype == "ignore":
489 if self.pkg.changes["distribution"].has_key(suite):
490 del self.pkg.changes["distribution"][suite]
491 self.warnings.append("Ignoring %s as a target suite." % (suite))
492 elif mtype == "reject":
494 if self.pkg.changes["distribution"].has_key(suite):
495 self.rejects.append("Uploads to %s are not accepted." % (suite))
496 elif mtype == "propup-version":
497 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
499 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
500 if self.pkg.changes["distribution"].has_key(args[1]):
501 self.pkg.changes.setdefault("distribution-version", {})
502 for suite in args[2:]:
503 self.pkg.changes["distribution-version"][suite] = suite
505 # Ensure there is (still) a target distribution
506 if len(self.pkg.changes["distribution"].keys()) < 1:
507 self.rejects.append("No valid distribution remaining.")
509 # Ensure target distributions exist
510 for suite in self.pkg.changes["distribution"].keys():
511 if not Cnf.has_key("Suite::%s" % (suite)):
512 self.rejects.append("Unknown distribution `%s'." % (suite))
514 ###########################################################################
516 def binary_file_checks(self, f, session):
518 entry = self.pkg.files[f]
520 # Extract package control information
521 deb_file = utils.open_file(f)
523 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
525 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
527 # Can't continue, none of the checks on control would work.
530 # Check for mandantory "Description:"
533 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
535 self.rejects.append("%s: Missing Description in binary package" % (f))
540 # Check for mandatory fields
541 for field in [ "Package", "Architecture", "Version" ]:
542 if control.Find(field) == None:
544 self.rejects.append("%s: No %s field in control." % (f, field))
547 # Ensure the package name matches the one give in the .changes
548 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
549 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
551 # Validate the package field
552 package = control.Find("Package")
553 if not re_valid_pkg_name.match(package):
554 self.rejects.append("%s: invalid package name '%s'." % (f, package))
556 # Validate the version field
557 version = control.Find("Version")
558 if not re_valid_version.match(version):
559 self.rejects.append("%s: invalid version number '%s'." % (f, version))
561 # Ensure the architecture of the .deb is one we know about.
562 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
563 architecture = control.Find("Architecture")
564 upload_suite = self.pkg.changes["distribution"].keys()[0]
566 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
567 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
568 self.rejects.append("Unknown architecture '%s'." % (architecture))
570 # Ensure the architecture of the .deb is one of the ones
571 # listed in the .changes.
572 if not self.pkg.changes["architecture"].has_key(architecture):
573 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
575 # Sanity-check the Depends field
576 depends = control.Find("Depends")
578 self.rejects.append("%s: Depends field is empty." % (f))
580 # Sanity-check the Provides field
581 provides = control.Find("Provides")
583 provide = re_spacestrip.sub('', provides)
585 self.rejects.append("%s: Provides field is empty." % (f))
586 prov_list = provide.split(",")
587 for prov in prov_list:
588 if not re_valid_pkg_name.match(prov):
589 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
591 # Check the section & priority match those given in the .changes (non-fatal)
592 if control.Find("Section") and entry["section"] != "" \
593 and entry["section"] != control.Find("Section"):
594 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
595 (f, control.Find("Section", ""), entry["section"]))
596 if control.Find("Priority") and entry["priority"] != "" \
597 and entry["priority"] != control.Find("Priority"):
598 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
599 (f, control.Find("Priority", ""), entry["priority"]))
601 entry["package"] = package
602 entry["architecture"] = architecture
603 entry["version"] = version
604 entry["maintainer"] = control.Find("Maintainer", "")
606 if f.endswith(".udeb"):
607 files[f]["dbtype"] = "udeb"
608 elif f.endswith(".deb"):
609 files[f]["dbtype"] = "deb"
611 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
613 entry["source"] = control.Find("Source", entry["package"])
615 # Get the source version
616 source = entry["source"]
619 if source.find("(") != -1:
620 m = re_extract_src_version.match(source)
622 source_version = m.group(2)
624 if not source_version:
625 source_version = files[f]["version"]
627 entry["source package"] = source
628 entry["source version"] = source_version
630 # Ensure the filename matches the contents of the .deb
631 m = re_isadeb.match(f)
634 file_package = m.group(1)
635 if entry["package"] != file_package:
636 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
637 (f, file_package, entry["dbtype"], entry["package"]))
638 epochless_version = re_no_epoch.sub('', control.Find("Version"))
641 file_version = m.group(2)
642 if epochless_version != file_version:
643 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
644 (f, file_version, entry["dbtype"], epochless_version))
647 file_architecture = m.group(3)
648 if entry["architecture"] != file_architecture:
649 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
650 (f, file_architecture, entry["dbtype"], entry["architecture"]))
652 # Check for existent source
653 source_version = entry["source version"]
654 source_package = entry["source package"]
655 if self.pkg.changes["architecture"].has_key("source"):
656 if source_version != self.pkg.changes["version"]:
657 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
658 (source_version, f, self.pkg.changes["version"]))
660 # Check in the SQL database
661 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
662 # Check in one of the other directories
663 source_epochless_version = re_no_epoch.sub('', source_version)
664 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
665 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
667 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
670 dsc_file_exists = False
671 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
672 if cnf.has_key("Dir::Queue::%s" % (myq)):
673 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
674 dsc_file_exists = True
677 if not dsc_file_exists:
678 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
680 # Check the version and for file overwrites
681 self.check_binary_against_db(f, session)
683 b = Binary(f).scan_package()
684 if len(b.rejects) > 0:
686 self.rejects.append(j)
688 def source_file_checks(self, f, session):
689 entry = self.pkg.files[f]
691 m = re_issource.match(f)
695 entry["package"] = m.group(1)
696 entry["version"] = m.group(2)
697 entry["type"] = m.group(3)
699 # Ensure the source package name matches the Source filed in the .changes
700 if self.pkg.changes["source"] != entry["package"]:
701 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
703 # Ensure the source version matches the version in the .changes file
704 if entry["type"] == "orig.tar.gz":
705 changes_version = self.pkg.changes["chopversion2"]
707 changes_version = self.pkg.changes["chopversion"]
709 if changes_version != entry["version"]:
710 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
712 # Ensure the .changes lists source in the Architecture field
713 if not self.pkg.changes["architecture"].has_key("source"):
714 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
716 # Check the signature of a .dsc file
717 if entry["type"] == "dsc":
718 # check_signature returns either:
719 # (None, [list, of, rejects]) or (signature, [])
720 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
722 self.rejects.append(j)
724 entry["architecture"] = "source"
726 def per_suite_file_checks(self, f, suite, session):
728 entry = self.pkg.files[f]
731 if entry.has_key("byhand"):
734 # Handle component mappings
735 for m in cnf.ValueList("ComponentMappings"):
736 (source, dest) = m.split()
737 if entry["component"] == source:
738 entry["original component"] = source
739 entry["component"] = dest
741 # Ensure the component is valid for the target suite
742 if cnf.has_key("Suite:%s::Components" % (suite)) and \
743 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
744 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
747 # Validate the component
748 component = entry["component"]
749 if not get_component(component, session):
750 self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
753 # See if the package is NEW
754 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
757 # Validate the priority
758 if entry["priority"].find('/') != -1:
759 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
761 # Determine the location
762 location = cnf["Dir::Pool"]
763 l = get_location(location, component, archive, session)
765 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
766 entry["location id"] = -1
768 entry["location id"] = l.location_id
770 # Check the md5sum & size against existing files (if any)
771 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
773 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
774 entry["size"], entry["md5sum"], entry["location id"])
777 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
778 elif found is False and poolfile is not None:
779 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
782 entry["files id"] = None
784 entry["files id"] = poolfile.file_id
786 # Check for packages that have moved from one component to another
787 entry['suite'] = suite
788 res = get_binary_components(files[f]['package'], suite, entry["architecture"], session)
790 entry["othercomponents"] = res.fetchone()[0]
792 def check_files(self, action=True):
793 archive = utils.where_am_i()
794 file_keys = self.pkg.files.keys()
798 # XXX: As far as I can tell, this can no longer happen - see
799 # comments by AJ in old revisions - mhy
800 # if reprocess is 2 we've already done this and we're checking
801 # things again for the new .orig.tar.gz.
802 # [Yes, I'm fully aware of how disgusting this is]
803 if action and self.reprocess < 2:
805 os.chdir(self.pkg.directory)
807 ret = holding.copy_to_holding(f)
809 # XXX: Should we bail out here or try and continue?
810 self.rejects.append(ret)
814 # Check there isn't already a .changes or .dak file of the same name in
815 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
816 # [NB: this check must be done post-suite mapping]
817 base_filename = os.path.basename(self.pkg.changes_file)
818 dot_dak_filename = base_filename[:-8] + ".dak"
820 for suite in self.pkg.changes["distribution"].keys():
821 copychanges = "Suite::%s::CopyChanges" % (suite)
822 if cnf.has_key(copychanges) and \
823 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
824 self.rejects.append("%s: a file with this name already exists in %s" \
825 % (base_filename, cnf[copychanges]))
827 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
828 if cnf.has_key(copy_dot_dak) and \
829 os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
830 self.rejects.append("%s: a file with this name already exists in %s" \
831 % (dot_dak_filename, Cnf[copy_dot_dak]))
837 s = DBConn().session()
839 for f, entry in self.pkg.files.items():
840 # Ensure the file does not already exist in one of the accepted directories
841 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
842 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
843 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
844 self.rejects.append("%s file already exists in the %s directory." % (f, d))
846 if not re_taint_free.match(f):
847 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
849 # Check the file is readable
850 if os.access(f, os.R_OK) == 0:
851 # When running in -n, copy_to_holding() won't have
852 # generated the reject_message, so we need to.
854 if os.path.exists(f):
855 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
857 self.rejects.append("Can't read `%s'. [file not found]" % (f))
858 entry["type"] = "unreadable"
861 # If it's byhand skip remaining checks
862 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
864 entry["type"] = "byhand"
866 # Checks for a binary package...
867 elif re_isadeb.match(f):
869 entry["type"] = "deb"
871 # This routine appends to self.rejects/warnings as appropriate
872 self.binary_file_checks(f, session)
874 # Checks for a source package...
875 elif re_issource.match(f):
878 # This routine appends to self.rejects/warnings as appropriate
879 self.source_file_checks(f, session)
881 # Not a binary or source package? Assume byhand...
884 entry["type"] = "byhand"
886 # Per-suite file checks
887 entry["oldfiles"] = {}
888 for suite in self.pkg.changes["distribution"].keys():
889 self.per_suite_file_checks(f, suite, session)
891 # If the .changes file says it has source, it must have source.
892 if self.pkg.changes["architecture"].has_key("source"):
894 self.rejects.append("no source found and Architecture line in changes mention source.")
896 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
897 self.rejects.append("source only uploads are not supported.")
899 ###########################################################################
900 def check_dsc(self, action=True):
901 """Returns bool indicating whether or not the source changes are valid"""
902 # Ensure there is source to check
903 if not self.pkg.changes["architecture"].has_key("source"):
908 for f, entry in self.pkg.files.items():
909 if entry["type"] == "dsc":
911 self.rejects.append("can not process a .changes file with multiple .dsc's.")
916 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
918 self.rejects.append("source uploads must contain a dsc file")
921 # Parse the .dsc file
923 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
924 except CantOpenError:
925 # if not -n copy_to_holding() will have done this for us...
927 self.rejects.append("%s: can't read file." % (dsc_filename))
928 except ParseChangesError, line:
929 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
930 except InvalidDscError, line:
931 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
932 except ChangesUnicodeError:
933 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
935 # Build up the file list of files mentioned by the .dsc
937 self.pkg.dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
938 except NoFilesFieldError:
939 self.rejects.append("%s: no Files: field." % (dsc_filename))
941 except UnknownFormatError, format:
942 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
944 except ParseChangesError, line:
945 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
948 # Enforce mandatory fields
949 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
950 if not self.pkg.dsc.has_key(i):
951 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
954 # Validate the source and version fields
955 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
956 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
957 if not re_valid_version.match(dsc["version"]):
958 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
960 # Bumping the version number of the .dsc breaks extraction by stable's
961 # dpkg-source. So let's not do that...
962 if self.pkg.dsc["format"] != "1.0":
963 self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
965 # Validate the Maintainer field
967 # We ignore the return value
968 fix_maintainer(self.pkg.dsc["maintainer"])
969 except ParseMaintError, msg:
970 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
971 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
973 # Validate the build-depends field(s)
974 for field_name in [ "build-depends", "build-depends-indep" ]:
975 field = self.pkg.dsc.get(field_name)
977 # Check for broken dpkg-dev lossage...
978 if field.startswith("ARRAY"):
979 self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \
980 (dsc_filename, field_name.title()))
982 # Have apt try to parse them...
984 apt_pkg.ParseSrcDepends(field)
986 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
988 # Ensure the version number in the .dsc matches the version number in the .changes
989 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
990 changes_version = self.pkg.files[dsc_filename]["version"]
992 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
993 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
995 # Ensure there is a .tar.gz in the .dsc file
997 for f in dsc_files.keys():
998 m = re_issource.match(f)
1000 self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
1003 if ftype == "orig.tar.gz" or ftype == "tar.gz":
1007 self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
1009 # Ensure source is newer than existing source in target suites
1010 self.check_source_against_db(dsc_filename, session)
1012 self.check_dsc_against_db(dsc_filename)
1016 ###########################################################################
1018 def get_changelog_versions(self, source_dir):
1019 """Extracts a the source package and (optionally) grabs the
1020 version history out of debian/changelog for the BTS."""
1024 # Find the .dsc (again)
1026 for f in self.files.keys():
1027 if files[f]["type"] == "dsc":
1030 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1031 if not dsc_filename:
1034 # Create a symlink mirror of the source files in our temporary directory
1035 for f in self.files.keys():
1036 m = re_issource.match(f)
1038 src = os.path.join(source_dir, f)
1039 # If a file is missing for whatever reason, give up.
1040 if not os.path.exists(src):
1043 if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
1045 dest = os.path.join(os.getcwd(), f)
1046 os.symlink(src, dest)
1048 # If the orig.tar.gz is not a part of the upload, create a symlink to the
1050 if self.pkg.orig_tar_gz:
1051 dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
1052 os.symlink(self.pkg.orig_tar_gz, dest)
1054 # Extract the source
1055 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1056 (result, output) = commands.getstatusoutput(cmd)
1058 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1059 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
1062 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1065 # Get the upstream version
1066 upstr_version = re_no_epoch.sub('', dsc["version"])
1067 if re_strip_revision.search(upstr_version):
1068 upstr_version = re_strip_revision.sub('', upstr_version)
1070 # Ensure the changelog file exists
1071 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1072 if not os.path.exists(changelog_filename):
1073 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1076 # Parse the changelog
1077 self.pkg.dsc["bts changelog"] = ""
1078 changelog_file = utils.open_file(changelog_filename)
1079 for line in changelog_file.readlines():
1080 m = re_changelog_versions.match(line)
1082 self.pkg.dsc["bts changelog"] += line
1083 changelog_file.close()
1085 # Check we found at least one revision in the changelog
1086 if not self.pkg.dsc["bts changelog"]:
1087 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1089 def check_source(self):
1090 # XXX: I'm fairly sure reprocess == 2 can never happen
1091 # AJT disabled the is_incoming check years ago - mhy
1092 # We should probably scrap or rethink the whole reprocess thing
1094 # a) there's no source
1095 # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
1096 # or c) the orig.tar.gz is MIA
1097 if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1098 or self.pkg.orig_tar_gz == -1:
1101 tmpdir = utils.temp_dirname()
1103 # Move into the temporary directory
1107 # Get the changelog version history
1108 self.get_changelog_versions(cwd)
1110 # Move back and cleanup the temporary tree
1114 shutil.rmtree(tmpdir)
1116 if e.errno != errno.EACCES:
1117 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1119 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1120 # We probably have u-r or u-w directories so chmod everything
1122 cmd = "chmod -R u+rwx %s" % (tmpdir)
1123 result = os.system(cmd)
1125 utils.fubar("'%s' failed with result %s." % (cmd, result))
1126 shutil.rmtree(tmpdir)
1128 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1130 ###########################################################################
1131 def ensure_hashes(self):
1132 # Make sure we recognise the format of the Files: field in the .changes
1133 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1134 if len(format) == 2:
1135 format = int(format[0]), int(format[1])
1137 format = int(float(format[0])), 0
1139 # We need to deal with the original changes blob, as the fields we need
1140 # might not be in the changes dict serialised into the .dak anymore.
1141 orig_changes = parse_deb822(self.pkg.changes['filecontents'])
1143 # Copy the checksums over to the current changes dict. This will keep
1144 # the existing modifications to it intact.
1145 for field in orig_changes:
1146 if field.startswith('checksums-'):
1147 self.pkg.changes[field] = orig_changes[field]
1149 # Check for unsupported hashes
1150 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1151 self.rejects.append(j)
1153 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1154 self.rejects.append(j)
1156 # We have to calculate the hash if we have an earlier changes version than
1157 # the hash appears in rather than require it exist in the changes file
1158 for hashname, hashfunc, version in utils.known_hashes:
1159 # TODO: Move _ensure_changes_hash into this class
1160 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1161 self.rejects.append(j)
1162 if "source" in self.pkg.changes["architecture"]:
1163 # TODO: Move _ensure_dsc_hash into this class
1164 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1165 self.rejects.append(j)
1168 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1169 self.rejects.append(m)
1171 for m in utils.check_size(".changes", self.pkg.files):
1172 self.rejects.append(m)
1174 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1175 self.rejects.append(m)
1177 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1178 self.rejects.append(m)
1180 for m in utils.ensure_hashes(self.pkg.changes, dsc, files, dsc_files):
1181 self.rejects.append(m)
1183 ###########################################################################
1184 def check_urgency(self):
1186 if self.pkg.changes["architecture"].has_key("source"):
1187 if not self.pkg.changes.has_key("urgency"):
1188 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1189 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1190 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1191 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1192 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1193 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1195 ###########################################################################
1197 # Sanity check the time stamps of files inside debs.
1198 # [Files in the near future cause ugly warnings and extreme time
1199 # travel can cause errors on extraction]
1201 def check_timestamps(self):
1202 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1203 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1204 tar = TarTime(future_cutoff, past_cutoff)
1206 for filename, entry in self.pkg.files.keys():
1207 if entry["type"] == "deb":
1210 deb_file = utils.open_file(filename)
1211 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1214 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1215 except SystemError, e:
1216 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1217 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1220 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1224 future_files = tar.future_files.keys()
1226 num_future_files = len(future_files)
1227 future_file = future_files[0]
1228 future_date = tar.future_files[future_file]
1229 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1230 % (filename, num_future_files, future_file, time.ctime(future_date)))
1232 ancient_files = tar.ancient_files.keys()
1234 num_ancient_files = len(ancient_files)
1235 ancient_file = ancient_files[0]
1236 ancient_date = tar.ancient_files[ancient_file]
1237 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1238 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1240 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1242 ###########################################################################
1243 def check_signed_by_key(self):
1244 """Ensure the .changes is signed by an authorized uploader."""
1245 session = DBConn().session()
1247 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1249 # match claimed name with actual name:
1251 # This is fundamentally broken but need us to refactor how we get
1252 # the UIDs/Fingerprints in order for us to fix it properly
1253 uid, uid_email = self.pkg.changes["fingerprint"], uid
1254 may_nmu, may_sponsor = 1, 1
1255 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1256 # and can't get one in there if we don't allow nmu/sponsorship
1257 elif is_dm is False:
1258 # If is_dm is False, we allow full upload rights
1259 uid_email = "%s@debian.org" % (uid)
1260 may_nmu, may_sponsor = 1, 1
1262 # Assume limited upload rights unless we've discovered otherwise
1264 may_nmu, may_sponsor = 0, 0
1266 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1268 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1270 if uid_name == "": sponsored = 1
1273 if ("source" in self.pkg.changes["architecture"] and
1274 uid_email and utils.is_email_alias(uid_email)):
1275 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1276 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1277 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1278 self.pkg.changes["sponsoremail"] = uid_email
1280 if sponsored and not may_sponsor:
1281 self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1283 if not sponsored and not may_nmu:
1284 should_reject = True
1285 highest_sid, highest_version = None, None
1287 # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1288 # It ignores higher versions with the dm_upload_allowed flag set to false
1289 # I'm keeping the existing behaviour for now until I've gone back and
1290 # checked exactly what the GR says - mhy
1291 for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1292 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1293 highest_sid = si.source_id
1294 highest_version = si.version
1296 if highest_sid is None:
1297 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1299 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1300 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1301 if email == uid_email or name == uid_name:
1302 should_reject = False
1305 if should_reject is True:
1306 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1308 for b in self.pkg.changes["binary"].keys():
1309 for suite in self.pkg.changes["distribution"].keys():
1310 q = session.query(DBSource)
1311 q = q.join(DBBinary).filter_by(package=b)
1312 q = q.join(BinAssociation).join(Suite).filter_by(suite)
1315 if s.source != self.pkg.changes["source"]:
1316 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1318 for f in self.pkg.files.keys():
1319 if self.pkg.files[f].has_key("byhand"):
1320 self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1321 if self.pkg.files[f].has_key("new"):
1322 self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1324 ###########################################################################
1325 def build_summaries(self):
1326 """ Build a summary of changes the upload introduces. """
1328 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1330 short_summary = summary
1332 # This is for direport's benefit...
1333 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1336 summary += "Changes: " + f
1338 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1340 summary += self.announce(short_summary, 0)
1342 return (summary, short_summary)
1344 ###########################################################################
1346 def close_bugs(self, summary, action):
1348 Send mail to close bugs as instructed by the closes field in the changes file.
1349 Also add a line to summary if any work was done.
1351 @type summary: string
1352 @param summary: summary text, as given by L{build_summaries}
1355 @param action: Set to false no real action will be done.
1358 @return: summary. If action was taken, extended by the list of closed bugs.
1362 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1364 bugs = self.pkg.changes["closes"].keys()
1370 summary += "Closing bugs: "
1372 summary += "%s " % (bug)
1374 self.Subst["__BUG_NUMBER__"] = bug
1375 if self.pkg.changes["distribution"].has_key("stable"):
1376 self.Subst["__STABLE_WARNING__"] = """
1377 Note that this package is not part of the released stable Debian
1378 distribution. It may have dependencies on other unreleased software,
1379 or other instabilities. Please take care if you wish to install it.
1380 The update will eventually make its way into the next released Debian
1383 self.Subst["__STABLE_WARNING__"] = ""
1384 mail_message = utils.TemplateSubst(self.Subst, template)
1385 utils.send_mail(mail_message)
1387 # Clear up after ourselves
1388 del self.Subst["__BUG_NUMBER__"]
1389 del self.Subst["__STABLE_WARNING__"]
1392 self.Logger.log(["closing bugs"] + bugs)
1398 ###########################################################################
1400 def announce(self, short_summary, action):
1402 Send an announce mail about a new upload.
1404 @type short_summary: string
1405 @param short_summary: Short summary text to include in the mail
1408 @param action: Set to false no real action will be done.
1411 @return: Textstring about action taken.
1416 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1418 # Only do announcements for source uploads with a recent dpkg-dev installed
1419 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1420 self.pkg.changes["architecture"].has_key("source"):
1426 self.Subst["__SHORT_SUMMARY__"] = short_summary
1428 for dist in self.pkg.changes["distribution"].keys():
1429 announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
1430 if announce_list == "" or lists_done.has_key(announce_list):
1433 lists_done[announce_list] = 1
1434 summary += "Announcing to %s\n" % (announce_list)
1437 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1438 if cnf.get("Dinstall::TrackingServer") and \
1439 self.pkg.changes["architecture"].has_key("source"):
1440 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1441 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1443 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1444 utils.send_mail(mail_message)
1446 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1448 if cnf.FindB("Dinstall::CloseBugs"):
1449 summary = self.close_bugs(summary, action)
1451 del self.Subst["__SHORT_SUMMARY__"]
1455 ###########################################################################
1457 def accept (self, summary, short_summary, targetdir=None):
1461 This moves all files referenced from the .changes into the I{accepted}
1462 queue, sends the accepted mail, announces to lists, closes bugs and
1463 also checks for override disparities. If enabled it will write out
1464 the version history for the BTS Version Tracking and will finally call
1467 @type summary: string
1468 @param summary: Summary text
1470 @type short_summary: string
1471 @param short_summary: Short summary
1476 stats = SummaryStats()
1478 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1480 if targetdir is None:
1481 targetdir = cnf["Dir::Queue::Accepted"]
1484 self.Logger.log(["Accepting changes", self.pkg.changes_file])
1486 self.write_dot_dak(targetdir)
1488 # Move all the files into the accepted directory
1489 utils.move(self.pkg.changes_file, targetdir)
1491 for name, entry in sorted(self.pkg.files.items()):
1492 utils.move(name, targetdir)
1493 stats.accept_bytes += float(entry["size"])
1495 stats.accept_count += 1
1497 # Send accept mail, announce to lists, close bugs and check for
1498 # override disparities
1499 if not cnf["Dinstall::Options::No-Mail"]:
1500 self.Subst["__SUITE__"] = ""
1501 self.Subst["__SUMMARY__"] = summary
1502 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1503 utils.send_mail(mail_message)
1504 self.announce(short_summary, 1)
1506 ## Helper stuff for DebBugs Version Tracking
1507 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1508 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1509 # the conditionalization on dsc["bts changelog"] should be
1512 # Write out the version history from the changelog
1513 if self.pkg.changes["architecture"].has_key("source") and \
1514 self.pkg.dsc.has_key("bts changelog"):
1516 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1517 version_history = os.fdopen(fd, 'w')
1518 version_history.write(self.pkg.dsc["bts changelog"])
1519 version_history.close()
1520 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1521 self.pkg.changes_file[:-8]+".versions")
1522 os.rename(temp_filename, filename)
1523 os.chmod(filename, 0644)
1525 # Write out the binary -> source mapping.
1526 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1527 debinfo = os.fdopen(fd, 'w')
1528 for name, entry in sorted(self.pkg.files.items()):
1529 if entry["type"] == "deb":
1530 line = " ".join([entry["package"], entry["version"],
1531 entry["architecture"], entry["source package"],
1532 entry["source version"]])
1533 debinfo.write(line+"\n")
1535 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1536 self.pkg.changes_file[:-8]+".debinfo")
1537 os.rename(temp_filename, filename)
1538 os.chmod(filename, 0644)
1540 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1541 # <Ganneff> we do call queue_build too
1542 # <mhy> well yes, we'd have had to if we were inserting into accepted
1543 # <Ganneff> now. thats database only.
1544 # <mhy> urgh, that's going to get messy
1545 # <Ganneff> so i make the p-n call to it *also* using accepted/
1546 # <mhy> but then the packages will be in the queue_build table without the files being there
1547 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1548 # <mhy> ah, good point
1549 # <Ganneff> so it will work out, as unchecked move it over
1550 # <mhy> that's all completely sick
1553 # This routine returns None on success or an error on failure
1554 res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1559 def check_override(self):
1561 Checks override entries for validity. Mails "Override disparity" warnings,
1562 if that feature is enabled.
1564 Abandons the check if
1565 - override disparity checks are disabled
1566 - mail sending is disabled
1571 # Abandon the check if:
1572 # a) override disparity checks have been disabled
1573 # b) we're not sending mail
1574 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1575 cnf["Dinstall::Options::No-Mail"]:
1578 summary = self.pkg.check_override()
1583 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1585 self.Subst["__SUMMARY__"] = summary
1586 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1587 utils.send_mail(mail_message)
1588 del self.Subst["__SUMMARY__"]
1590 ###########################################################################
1592 def remove(self, dir=None):
1594 Used (for instance) in p-u to remove the package from unchecked
1597 os.chdir(self.pkg.directory)
1601 for f in self.pkg.files.keys():
1603 os.unlink(self.pkg.changes_file)
1605 ###########################################################################
1607 def move_to_dir (self, dest, perms=0660, changesperms=0664):
1609 Move files to dest with certain perms/changesperms
1611 utils.move(self.pkg.changes_file, dest, perms=changesperms)
1612 for f in self.pkg.files.keys():
1613 utils.move(f, dest, perms=perms)
1615 ###########################################################################
1617 def force_reject(self, reject_files):
1619 Forcefully move files from the current directory to the
1620 reject directory. If any file already exists in the reject
1621 directory it will be moved to the morgue to make way for
1625 @param files: file dictionary
1631 for file_entry in reject_files:
1632 # Skip any files which don't exist or which we don't have permission to copy.
1633 if os.access(file_entry, os.R_OK) == 0:
1636 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1639 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1641 # File exists? Let's try and move it to the morgue
1642 if e.errno == errno.EEXIST:
1643 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1645 morgue_file = utils.find_next_free(morgue_file)
1646 except NoFreeFilenameError:
1647 # Something's either gone badly Pete Tong, or
1648 # someone is trying to exploit us.
1649 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1651 utils.move(dest_file, morgue_file, perms=0660)
1653 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1656 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1660 # If we got here, we own the destination file, so we can
1661 # safely overwrite it.
1662 utils.move(file_entry, dest_file, 1, perms=0660)
1665 ###########################################################################
1666 def do_reject (self, manual=0, reject_message="", note=""):
1668 Reject an upload. If called without a reject message or C{manual} is
1669 true, spawn an editor so the user can write one.
1672 @param manual: manual or automated rejection
1674 @type reject_message: string
1675 @param reject_message: A reject message
1680 # If we weren't given a manual rejection message, spawn an
1681 # editor so the user can add one in...
1682 if manual and not reject_message:
1683 (fd, temp_filename) = utils.temp_filename()
1684 temp_file = os.fdopen(fd, 'w')
1687 temp_file.write(line)
1689 editor = os.environ.get("EDITOR","vi")
1691 while answer == 'E':
1692 os.system("%s %s" % (editor, temp_filename))
1693 temp_fh = utils.open_file(temp_filename)
1694 reject_message = "".join(temp_fh.readlines())
1696 print "Reject message:"
1697 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
1698 prompt = "[R]eject, Edit, Abandon, Quit ?"
1700 while prompt.find(answer) == -1:
1701 answer = utils.our_raw_input(prompt)
1702 m = re_default_answer.search(prompt)
1705 answer = answer[:1].upper()
1706 os.unlink(temp_filename)
1712 print "Rejecting.\n"
1716 reason_filename = self.pkg.changes_file[:-8] + ".reason"
1717 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
1719 # Move all the files into the reject directory
1720 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
1721 self.force_reject(reject_files)
1723 # If we fail here someone is probably trying to exploit the race
1724 # so let's just raise an exception ...
1725 if os.path.exists(reason_filename):
1726 os.unlink(reason_filename)
1727 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1729 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
1732 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
1733 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
1734 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
1735 os.write(reason_fd, reject_message)
1736 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1738 # Build up the rejection email
1739 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
1740 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
1741 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
1742 self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
1743 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1744 # Write the rejection email out as the <foo>.reason file
1745 os.write(reason_fd, reject_mail_message)
1747 del self.Subst["__REJECTOR_ADDRESS__"]
1748 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
1749 del self.Subst["__CC__"]
1753 # Send the rejection mail if appropriate
1754 if not cnf["Dinstall::Options::No-Mail"]:
1755 utils.send_mail(reject_mail_message)
1757 self.Logger.log(["rejected", pkg.changes_file])
1761 ################################################################################
1762 def in_override_p(self, package, component, suite, binary_type, file, session=None):
1764 Check if a package already has override entries in the DB
1766 @type package: string
1767 @param package: package name
1769 @type component: string
1770 @param component: database id of the component
1773 @param suite: database id of the suite
1775 @type binary_type: string
1776 @param binary_type: type of the package
1779 @param file: filename we check
1781 @return: the database result. But noone cares anyway.
1788 session = DBConn().session()
1790 if binary_type == "": # must be source
1793 file_type = binary_type
1795 # Override suite name; used for example with proposed-updates
1796 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
1797 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
1799 result = get_override(package, suite, component, file_type, session)
1801 # If checking for a source package fall back on the binary override type
1802 if file_type == "dsc" and len(result) < 1:
1803 result = get_override(package, suite, component, ['deb', 'udeb'], session)
1805 # Remember the section and priority so we can check them later if appropriate
1808 self.pkg.files[file]["override section"] = result.section.section
1809 self.pkg.files[file]["override priority"] = result.priority.priority
1814 ################################################################################
1815 def get_anyversion(self, sv_list, suite):
1818 @param sv_list: list of (suite, version) tuples to check
1821 @param suite: suite name
1826 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
1827 for (s, v) in sv_list:
1828 if s in [ x.lower() for x in anysuite ]:
1829 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
1834 ################################################################################
1836 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
1839 @param sv_list: list of (suite, version) tuples to check
1844 @type new_version: string
1845 @param new_version: XXX
1847 Ensure versions are newer than existing packages in target
1848 suites and that cross-suite version checking rules as
1849 set out in the conf file are satisfied.
1854 # Check versions for each target suite
1855 for target_suite in self.pkg.changes["distribution"].keys():
1856 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
1857 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
1859 # Enforce "must be newer than target suite" even if conffile omits it
1860 if target_suite not in must_be_newer_than:
1861 must_be_newer_than.append(target_suite)
1863 for (suite, existent_version) in sv_list:
1864 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
1866 if suite in must_be_newer_than and sourceful and vercmp < 1:
1867 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1869 if suite in must_be_older_than and vercmp > -1:
1872 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
1873 # we really use the other suite, ignoring the conflicting one ...
1874 addsuite = self.pkg.changes["distribution-version"][suite]
1876 add_version = self.get_anyversion(sv_list, addsuite)
1877 target_version = self.get_anyversion(sv_list, target_suite)
1880 # not add_version can only happen if we map to a suite
1881 # that doesn't enhance the suite we're propup'ing from.
1882 # so "propup-ver x a b c; map a d" is a problem only if
1883 # d doesn't enhance a.
1885 # i think we could always propagate in this case, rather
1886 # than complaining. either way, this isn't a REJECT issue
1888 # And - we really should complain to the dorks who configured dak
1889 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
1890 self.pkg.changes.setdefault("propdistribution", {})
1891 self.pkg.changes["propdistribution"][addsuite] = 1
1893 elif not target_version:
1894 # not targets_version is true when the package is NEW
1895 # we could just stick with the "...old version..." REJECT
1896 # for this, I think.
1897 self.rejects.append("Won't propogate NEW packages.")
1898 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
1899 # propogation would be redundant. no need to reject though.
1900 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1902 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
1903 apt_pkg.VersionCompare(add_version, target_version) >= 0:
1905 self.warnings.append("Propogating upload to %s" % (addsuite))
1906 self.pkg.changes.setdefault("propdistribution", {})
1907 self.pkg.changes["propdistribution"][addsuite] = 1
1911 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1913 ################################################################################
1914 def check_binary_against_db(self, file, session=None):
1916 session = DBConn().session()
1918 # Ensure version is sane
1919 q = session.query(BinAssociation)
1920 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
1921 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
1923 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
1924 file, files[file]["version"], sourceful=False)
1926 # Check for any existing copies of the file
1927 q = session.query(DBBinary).filter_by(files[file]["package"])
1928 q = q.filter_by(version=files[file]["version"])
1929 q = q.join(Architecture).filter_by(arch_string=files[file]["architecture"])
1932 self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
1934 ################################################################################
1936 def check_source_against_db(self, file, session=None):
1940 session = DBConn().session()
1942 source = self.pkg.dsc.get("source")
1943 version = self.pkg.dsc.get("version")
1945 # Ensure version is sane
1946 q = session.query(SrcAssociation)
1947 q = q.join(DBSource).filter(DBSource.source==source)
1949 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
1950 file, version, sourceful=True)
1952 ################################################################################
1953 def check_dsc_against_db(self, file, session=None):
1956 @warning: NB: this function can remove entries from the 'files' index [if
1957 the .orig.tar.gz is a duplicate of the one in the archive]; if
1958 you're iterating over 'files' and call this function as part of
1959 the loop, be sure to add a check to the top of the loop to
1960 ensure you haven't just tried to dereference the deleted entry.
1965 session = DBConn().session()
1967 self.pkg.orig_tar_gz = None
1969 # Try and find all files mentioned in the .dsc. This has
1970 # to work harder to cope with the multiple possible
1971 # locations of an .orig.tar.gz.
1972 # The ordering on the select is needed to pick the newest orig
1973 # when it exists in multiple places.
1974 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
1976 if self.pkg.files.has_key(dsc_name):
1977 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
1978 actual_size = int(self.pkg.files[dsc_name]["size"])
1979 found = "%s in incoming" % (dsc_name)
1981 # Check the file does not already exist in the archive
1982 ql = get_poolfile_like_name(dsc_name)
1984 # Strip out anything that isn't '%s' or '/%s$'
1986 if not i.filename.endswith(dsc_name):
1989 # "[dak] has not broken them. [dak] has fixed a
1990 # brokenness. Your crappy hack exploited a bug in
1993 # "(Come on! I thought it was always obvious that
1994 # one just doesn't release different files with
1995 # the same name and version.)"
1996 # -- ajk@ on d-devel@l.d.o
1999 # Ignore exact matches for .orig.tar.gz
2001 if dsc_name.endswith(".orig.tar.gz"):
2003 if self.pkg.files.has_key(dsc_name) and \
2004 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2005 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2006 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2007 # TODO: Don't delete the entry, just mark it as not needed
2008 # This would fix the stupidity of changing something we often iterate over
2009 # whilst we're doing it
2011 self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
2015 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2017 elif dsc_name.endswith(".orig.tar.gz"):
2019 ql = get_poolfile_like_name(dsc_name, session)
2021 # Strip out anything that isn't '%s' or '/%s$'
2022 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2024 if not i.filename.endswith(dsc_name):
2028 # Unfortunately, we may get more than one match here if,
2029 # for example, the package was in potato but had an -sa
2030 # upload in woody. So we need to choose the right one.
2032 # default to something sane in case we don't match any or have only one
2037 old_file = os.path.join(i.location.path, i.filename)
2038 old_file_fh = utils.open_file(old_file)
2039 actual_md5 = apt_pkg.md5sum(old_file_fh)
2041 actual_size = os.stat(old_file)[stat.ST_SIZE]
2042 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2045 old_file = os.path.join(i.location.path, i.filename)
2046 old_file_fh = utils.open_file(old_file)
2047 actual_md5 = apt_pkg.md5sum(old_file_fh)
2049 actual_size = os.stat(old_file)[stat.ST_SIZE]
2051 suite_type = f.location.archive_type
2052 # need this for updating dsc_files in install()
2053 dsc_entry["files id"] = f.file_id
2054 # See install() in process-accepted...
2055 self.pkg.orig_tar_id = f.file_id
2056 self.pkg.orig_tar_gz = old_file
2057 self.pkg.orig_tar_location = f.location.location_id
2059 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2060 # Not there? Check the queue directories...
2061 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2062 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2063 if os.path.exists(in_otherdir):
2064 in_otherdir_fh = utils.open_file(in_otherdir)
2065 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2066 in_otherdir_fh.close()
2067 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2069 self.pkg.orig_tar_gz = in_otherdir
2072 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2073 self.pkg.orig_tar_gz = -1
2076 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2078 if actual_md5 != dsc_entry["md5sum"]:
2079 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2080 if actual_size != int(dsc_entry["size"]):
2081 self.rejects.append("size for %s doesn't match %s." % (found, file))
2083 ################################################################################
2084 def accepted_checks(self, overwrite_checks=True, session=None):
2085 # Recheck anything that relies on the database; since that's not
2086 # frozen between accept and our run time when called from p-a.
2088 # overwrite_checks is set to False when installing to stable/oldstable
2091 session = DBConn().session()
2096 for checkfile in self.pkg.files.keys():
2097 # The .orig.tar.gz can disappear out from under us is it's a
2098 # duplicate of one in the archive.
2099 if not self.pkg.files.has_key(checkfile):
2102 entry = self.pkg.files[checkfile]
2104 # Check that the source still exists
2105 if entry["type"] == "deb":
2106 source_version = entry["source version"]
2107 source_package = entry["source package"]
2108 if not self.pkg.changes["architecture"].has_key("source") \
2109 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2110 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2112 # Version and file overwrite checks
2113 if overwrite_checks:
2114 if entry["type"] == "deb":
2115 self.check_binary_against_db(checkfile, session)
2116 elif entry["type"] == "dsc":
2117 self.check_source_against_db(checkfile, session)
2118 self.check_dsc_against_db(dsc_filename, session)
2120 # propogate in the case it is in the override tables:
2121 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2122 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile):
2123 propogate[suite] = 1
2125 nopropogate[suite] = 1
2127 for suite in propogate.keys():
2128 if suite in nopropogate:
2130 self.pkg.changes["distribution"][suite] = 1
2132 for checkfile in self.pkg.files.keys():
2133 # Check the package is still in the override tables
2134 for suite in self.pkg.changes["distribution"].keys():
2135 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile):
2136 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2138 ################################################################################
2139 # This is not really a reject, but an unaccept, but since a) the code for
2140 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2141 # extremely rare, for now we'll go with whining at our admin folks...
2143 def do_unaccept(self):
2146 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2147 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2148 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2149 self.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
2150 if cnf.has_key("Dinstall::Bcc"):
2151 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2153 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2155 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2157 # Write the rejection email out as the <foo>.reason file
2158 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2159 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2161 # If we fail here someone is probably trying to exploit the race
2162 # so let's just raise an exception ...
2163 if os.path.exists(reject_filename):
2164 os.unlink(reject_filename)
2166 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2167 os.write(fd, reject_mail_message)
2170 utils.send_mail(reject_mail_message)
2172 del self.Subst["__REJECTOR_ADDRESS__"]
2173 del self.Subst["__REJECT_MESSAGE__"]
2174 del self.Subst["__CC__"]
2176 ################################################################################
2177 # If any file of an upload has a recent mtime then chances are good
2178 # the file is still being uploaded.
2180 def upload_too_new(self):
2183 # Move back to the original directory to get accurate time stamps
2185 os.chdir(self.pkg.directory)
2186 file_list = self.pkg.files.keys()
2187 file_list.extend(self.pkg.dsc_files.keys())
2188 file_list.append(self.pkg.changes_file)
2191 last_modified = time.time()-os.path.getmtime(f)
2192 if last_modified < int(cnf["Dinstall::SkipTime"]):