5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
43 from dak_exceptions import *
46 from config import Config
47 from holding import Holding
49 from summarystats import SummaryStats
50 from utils import parse_changes
51 from textutils import fix_maintainer
52 from binary import Binary
54 ###############################################################################
56 def get_type(f, session):
58 Get the file type of C{f}
61 @param f: file entry from Changes object
63 @type session: SQLA Session
64 @param session: SQL Alchemy session object
71 if f.has_key("dbtype"):
72 file_type = file["dbtype"]
73 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
76 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
78 # Validate the override type
79 type_id = get_override_type(file_type, session)
81 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
85 ################################################################################
87 # Determine what parts in a .changes are NEW
89 def determine_new(changes, files, warn=1):
91 Determine what parts in a C{changes} file are NEW.
93 @type changes: Upload.Pkg.changes dict
94 @param changes: Changes dictionary
96 @type files: Upload.Pkg.files dict
97 @param files: Files dictionary
100 @param warn: Warn if overrides are added for (old)stable
103 @return: dictionary of NEW components.
108 session = DBConn().session()
110 # Build up a list of potentially new things
111 for name, f in files.items():
112 # Skip byhand elements
113 if f["type"] == "byhand":
116 priority = f["priority"]
117 section = f["section"]
118 file_type = get_type(f, session)
119 component = f["component"]
121 if file_type == "dsc":
124 if not new.has_key(pkg):
126 new[pkg]["priority"] = priority
127 new[pkg]["section"] = section
128 new[pkg]["type"] = file_type
129 new[pkg]["component"] = component
130 new[pkg]["files"] = []
132 old_type = new[pkg]["type"]
133 if old_type != file_type:
134 # source gets trumped by deb or udeb
135 if old_type == "dsc":
136 new[pkg]["priority"] = priority
137 new[pkg]["section"] = section
138 new[pkg]["type"] = file_type
139 new[pkg]["component"] = component
141 new[pkg]["files"].append(name)
143 if f.has_key("othercomponents"):
144 new[pkg]["othercomponents"] = f["othercomponents"]
146 for suite in changes["suite"].keys():
147 for pkg in new.keys():
148 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
150 for file_entry in new[pkg]["files"]:
151 if files[file_entry].has_key("new"):
152 del files[file_entry]["new"]
156 for s in ['stable', 'oldstable']:
157 if changes["suite"].has_key(s):
158 print "WARNING: overrides will be added for %s!" % s
159 for pkg in new.keys():
160 if new[pkg].has_key("othercomponents"):
161 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
167 ################################################################################
169 def check_valid(new):
171 Check if section and priority for NEW packages exist in database.
172 Additionally does sanity checks:
173 - debian-installer packages have to be udeb (or source)
174 - non debian-installer packages can not be udeb
175 - source priority can only be assigned to dsc file types
178 @param new: Dict of new packages with their section, priority and type.
181 for pkg in new.keys():
182 section_name = new[pkg]["section"]
183 priority_name = new[pkg]["priority"]
184 file_type = new[pkg]["type"]
186 section = get_section(section_name)
188 new[pkg]["section id"] = -1
190 new[pkg]["section id"] = section.section_id
192 priority = get_priority(priority_name)
194 new[pkg]["priority id"] = -1
196 new[pkg]["priority id"] = priority.priority_id
199 di = section_name.find("debian-installer") != -1
201 # If d-i, we must be udeb and vice-versa
202 if (di and file_type not in ("udeb", "dsc")) or \
203 (not di and file_type == "udeb"):
204 new[pkg]["section id"] = -1
206 # If dsc we need to be source and vice-versa
207 if (priority == "source" and file_type != "dsc") or \
208 (priority != "source" and file_type == "dsc"):
209 new[pkg]["priority id"] = -1
211 ###############################################################################
213 def lookup_uid_from_fingerprint(fpr, session):
216 # This is a stupid default, but see the comments below
219 user = get_uid_from_fingerprint(fpr, session)
223 if user.name is None:
228 # Check the relevant fingerprint (which we have to have)
229 for f in user.fingerprint:
230 if f.fingerprint == fpr:
231 is_dm = f.keyring.debian_maintainer
234 return (uid, uid_name, is_dm)
236 ###############################################################################
238 # Used by Upload.check_timestamps
239 class TarTime(object):
240 def __init__(self, future_cutoff, past_cutoff):
242 self.future_cutoff = future_cutoff
243 self.past_cutoff = past_cutoff
246 self.future_files = {}
247 self.ancient_files = {}
249 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
250 if MTime > self.future_cutoff:
251 self.future_files[Name] = MTime
252 if MTime < self.past_cutoff:
253 self.ancient_files[Name] = MTime
255 ###############################################################################
257 class Upload(object):
259 Everything that has to do with an upload processed.
267 ###########################################################################
270 """ Reset a number of internal variables."""
272 # Initialize the substitution template map
275 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
276 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
277 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
278 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
286 def package_info(self):
289 if len(self.rejects) > 0:
290 msg += "Reject Reasons:\n"
291 msg += "\n".join(self.rejects)
293 if len(self.warnings) > 0:
295 msg += "\n".join(self.warnings)
297 if len(self.notes) > 0:
299 msg += "\n".join(self.notes)
303 ###########################################################################
304 def update_subst(self):
305 """ Set up the per-package template substitution mappings """
309 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
310 if not self.pkg.changes.has_key("architecture") or not \
311 isinstance(self.pkg.changes["architecture"], DictType):
312 self.pkg.changes["architecture"] = { "Unknown" : "" }
314 # and maintainer2047 may not exist.
315 if not self.pkg.changes.has_key("maintainer2047"):
316 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
318 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
319 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
320 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
322 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
323 if self.pkg.changes["architecture"].has_key("source") and \
324 self.pkg.changes["changedby822"] != "" and \
325 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
327 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
328 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
329 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
331 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
332 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
333 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
335 if "sponsoremail" in self.pkg.changes:
336 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
338 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
339 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
341 # Apply any global override of the Maintainer field
342 if cnf.get("Dinstall::OverrideMaintainer"):
343 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
344 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
346 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
347 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
348 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
350 ###########################################################################
351 def load_changes(self, filename):
354 @rvalue: whether the changes file was valid or not. We may want to
355 reject even if this is True (see what gets put in self.rejects).
356 This is simply to prevent us even trying things later which will
357 fail because we couldn't properly parse the file.
360 self.pkg.changes_file = filename
362 # Parse the .changes field into a dictionary
364 self.pkg.changes.update(parse_changes(filename))
365 except CantOpenError:
366 self.rejects.append("%s: can't read file." % (filename))
368 except ParseChangesError, line:
369 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
371 except ChangesUnicodeError:
372 self.rejects.append("%s: changes file not proper utf-8" % (filename))
375 # Parse the Files field from the .changes into another dictionary
377 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
378 except ParseChangesError, line:
379 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
381 except UnknownFormatError, format:
382 self.rejects.append("%s: unknown format '%s'." % (filename, format))
385 # Check for mandatory fields
386 for i in ("distribution", "source", "binary", "architecture",
387 "version", "maintainer", "files", "changes", "description"):
388 if not self.pkg.changes.has_key(i):
389 # Avoid undefined errors later
390 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
393 # Strip a source version in brackets from the source field
394 if re_strip_srcver.search(self.pkg.changes["source"]):
395 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
397 # Ensure the source field is a valid package name.
398 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
399 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
401 # Split multi-value fields into a lower-level dictionary
402 for i in ("architecture", "distribution", "binary", "closes"):
403 o = self.pkg.changes.get(i, "")
405 del self.pkg.changes[i]
407 self.pkg.changes[i] = {}
410 self.pkg.changes[i][j] = 1
412 # Fix the Maintainer: field to be RFC822/2047 compatible
414 (self.pkg.changes["maintainer822"],
415 self.pkg.changes["maintainer2047"],
416 self.pkg.changes["maintainername"],
417 self.pkg.changes["maintaineremail"]) = \
418 fix_maintainer (self.pkg.changes["maintainer"])
419 except ParseMaintError, msg:
420 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
421 % (filename, changes["maintainer"], msg))
423 # ...likewise for the Changed-By: field if it exists.
425 (self.pkg.changes["changedby822"],
426 self.pkg.changes["changedby2047"],
427 self.pkg.changes["changedbyname"],
428 self.pkg.changes["changedbyemail"]) = \
429 fix_maintainer (self.pkg.changes.get("changed-by", ""))
430 except ParseMaintError, msg:
431 self.pkg.changes["changedby822"] = ""
432 self.pkg.changes["changedby2047"] = ""
433 self.pkg.changes["changedbyname"] = ""
434 self.pkg.changes["changedbyemail"] = ""
436 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
437 % (filename, changes["changed-by"], msg))
439 # Ensure all the values in Closes: are numbers
440 if self.pkg.changes.has_key("closes"):
441 for i in self.pkg.changes["closes"].keys():
442 if re_isanum.match (i) == None:
443 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
445 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
446 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
447 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
449 # Check there isn't already a changes file of the same name in one
450 # of the queue directories.
451 base_filename = os.path.basename(filename)
452 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
453 if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
454 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
456 # Check the .changes is non-empty
457 if not self.pkg.files:
458 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
461 # Changes was syntactically valid even if we'll reject
464 ###########################################################################
466 def check_distributions(self):
467 "Check and map the Distribution field"
471 # Handle suite mappings
472 for m in Cnf.ValueList("SuiteMappings"):
475 if mtype == "map" or mtype == "silent-map":
476 (source, dest) = args[1:3]
477 if self.pkg.changes["distribution"].has_key(source):
478 del self.pkg.changes["distribution"][source]
479 self.pkg.changes["distribution"][dest] = 1
480 if mtype != "silent-map":
481 self.notes.append("Mapping %s to %s." % (source, dest))
482 if self.pkg.changes.has_key("distribution-version"):
483 if self.pkg.changes["distribution-version"].has_key(source):
484 self.pkg.changes["distribution-version"][source]=dest
485 elif mtype == "map-unreleased":
486 (source, dest) = args[1:3]
487 if self.pkg.changes["distribution"].has_key(source):
488 for arch in self.pkg.changes["architecture"].keys():
489 if arch not in [ arch_string for a in get_suite_architectures(source) ]:
490 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
491 del self.pkg.changes["distribution"][source]
492 self.pkg.changes["distribution"][dest] = 1
494 elif mtype == "ignore":
496 if self.pkg.changes["distribution"].has_key(suite):
497 del self.pkg.changes["distribution"][suite]
498 self.warnings.append("Ignoring %s as a target suite." % (suite))
499 elif mtype == "reject":
501 if self.pkg.changes["distribution"].has_key(suite):
502 self.rejects.append("Uploads to %s are not accepted." % (suite))
503 elif mtype == "propup-version":
504 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
506 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
507 if self.pkg.changes["distribution"].has_key(args[1]):
508 self.pkg.changes.setdefault("distribution-version", {})
509 for suite in args[2:]:
510 self.pkg.changes["distribution-version"][suite] = suite
512 # Ensure there is (still) a target distribution
513 if len(self.pkg.changes["distribution"].keys()) < 1:
514 self.rejects.append("No valid distribution remaining.")
516 # Ensure target distributions exist
517 for suite in self.pkg.changes["distribution"].keys():
518 if not Cnf.has_key("Suite::%s" % (suite)):
519 self.rejects.append("Unknown distribution `%s'." % (suite))
521 ###########################################################################
523 def binary_file_checks(self, f, session):
525 entry = self.pkg.files[f]
527 # Extract package control information
528 deb_file = utils.open_file(f)
530 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
532 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
534 # Can't continue, none of the checks on control would work.
537 # Check for mandantory "Description:"
540 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
542 self.rejects.append("%s: Missing Description in binary package" % (f))
547 # Check for mandatory fields
548 for field in [ "Package", "Architecture", "Version" ]:
549 if control.Find(field) == None:
551 self.rejects.append("%s: No %s field in control." % (f, field))
554 # Ensure the package name matches the one give in the .changes
555 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
556 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
558 # Validate the package field
559 package = control.Find("Package")
560 if not re_valid_pkg_name.match(package):
561 self.rejects.append("%s: invalid package name '%s'." % (f, package))
563 # Validate the version field
564 version = control.Find("Version")
565 if not re_valid_version.match(version):
566 self.rejects.append("%s: invalid version number '%s'." % (f, version))
568 # Ensure the architecture of the .deb is one we know about.
569 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
570 architecture = control.Find("Architecture")
571 upload_suite = self.pkg.changes["distribution"].keys()[0]
573 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
574 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
575 self.rejects.append("Unknown architecture '%s'." % (architecture))
577 # Ensure the architecture of the .deb is one of the ones
578 # listed in the .changes.
579 if not self.pkg.changes["architecture"].has_key(architecture):
580 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
582 # Sanity-check the Depends field
583 depends = control.Find("Depends")
585 self.rejects.append("%s: Depends field is empty." % (f))
587 # Sanity-check the Provides field
588 provides = control.Find("Provides")
590 provide = re_spacestrip.sub('', provides)
592 self.rejects.append("%s: Provides field is empty." % (f))
593 prov_list = provide.split(",")
594 for prov in prov_list:
595 if not re_valid_pkg_name.match(prov):
596 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
598 # Check the section & priority match those given in the .changes (non-fatal)
599 if control.Find("Section") and entry["section"] != "" \
600 and entry["section"] != control.Find("Section"):
601 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
602 (f, control.Find("Section", ""), entry["section"]))
603 if control.Find("Priority") and entry["priority"] != "" \
604 and entry["priority"] != control.Find("Priority"):
605 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
606 (f, control.Find("Priority", ""), entry["priority"]))
608 entry["package"] = package
609 entry["architecture"] = architecture
610 entry["version"] = version
611 entry["maintainer"] = control.Find("Maintainer", "")
613 if f.endswith(".udeb"):
614 self.pkg.files[f]["dbtype"] = "udeb"
615 elif f.endswith(".deb"):
616 self.pkg.files[f]["dbtype"] = "deb"
618 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
620 entry["source"] = control.Find("Source", entry["package"])
622 # Get the source version
623 source = entry["source"]
626 if source.find("(") != -1:
627 m = re_extract_src_version.match(source)
629 source_version = m.group(2)
631 if not source_version:
632 source_version = self.pkg.files[f]["version"]
634 entry["source package"] = source
635 entry["source version"] = source_version
637 # Ensure the filename matches the contents of the .deb
638 m = re_isadeb.match(f)
641 file_package = m.group(1)
642 if entry["package"] != file_package:
643 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
644 (f, file_package, entry["dbtype"], entry["package"]))
645 epochless_version = re_no_epoch.sub('', control.Find("Version"))
648 file_version = m.group(2)
649 if epochless_version != file_version:
650 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
651 (f, file_version, entry["dbtype"], epochless_version))
654 file_architecture = m.group(3)
655 if entry["architecture"] != file_architecture:
656 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
657 (f, file_architecture, entry["dbtype"], entry["architecture"]))
659 # Check for existent source
660 source_version = entry["source version"]
661 source_package = entry["source package"]
662 if self.pkg.changes["architecture"].has_key("source"):
663 if source_version != self.pkg.changes["version"]:
664 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
665 (source_version, f, self.pkg.changes["version"]))
667 # Check in the SQL database
668 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
669 # Check in one of the other directories
670 source_epochless_version = re_no_epoch.sub('', source_version)
671 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
672 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
674 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
677 dsc_file_exists = False
678 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
679 if cnf.has_key("Dir::Queue::%s" % (myq)):
680 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
681 dsc_file_exists = True
684 if not dsc_file_exists:
685 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
687 # Check the version and for file overwrites
688 self.check_binary_against_db(f, session)
692 if len(b.rejects) > 0:
694 self.rejects.append(j)
696 def source_file_checks(self, f, session):
697 entry = self.pkg.files[f]
699 m = re_issource.match(f)
703 entry["package"] = m.group(1)
704 entry["version"] = m.group(2)
705 entry["type"] = m.group(3)
707 # Ensure the source package name matches the Source filed in the .changes
708 if self.pkg.changes["source"] != entry["package"]:
709 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
711 # Ensure the source version matches the version in the .changes file
712 if entry["type"] == "orig.tar.gz":
713 changes_version = self.pkg.changes["chopversion2"]
715 changes_version = self.pkg.changes["chopversion"]
717 if changes_version != entry["version"]:
718 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
720 # Ensure the .changes lists source in the Architecture field
721 if not self.pkg.changes["architecture"].has_key("source"):
722 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
724 # Check the signature of a .dsc file
725 if entry["type"] == "dsc":
726 # check_signature returns either:
727 # (None, [list, of, rejects]) or (signature, [])
728 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
730 self.rejects.append(j)
732 entry["architecture"] = "source"
734 def per_suite_file_checks(self, f, suite, session):
736 entry = self.pkg.files[f]
737 archive = utils.where_am_i()
740 if entry.has_key("byhand"):
743 # Handle component mappings
744 for m in cnf.ValueList("ComponentMappings"):
745 (source, dest) = m.split()
746 if entry["component"] == source:
747 entry["original component"] = source
748 entry["component"] = dest
750 # Ensure the component is valid for the target suite
751 if cnf.has_key("Suite:%s::Components" % (suite)) and \
752 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
753 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
756 # Validate the component
757 component = entry["component"]
758 if not get_component(component, session):
759 self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
762 # See if the package is NEW
763 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
766 # Validate the priority
767 if entry["priority"].find('/') != -1:
768 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
770 # Determine the location
771 location = cnf["Dir::Pool"]
772 l = get_location(location, entry["component"], archive, session)
774 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
775 entry["location id"] = -1
777 entry["location id"] = l.location_id
779 # Check the md5sum & size against existing files (if any)
780 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
782 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
783 entry["size"], entry["md5sum"], entry["location id"])
786 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
787 elif found is False and poolfile is not None:
788 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
791 entry["files id"] = None
793 entry["files id"] = poolfile.file_id
795 # Check for packages that have moved from one component to another
796 entry['suite'] = suite
797 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
799 entry["othercomponents"] = res.fetchone()[0]
801 def check_files(self, action=True):
802 archive = utils.where_am_i()
803 file_keys = self.pkg.files.keys()
807 # XXX: As far as I can tell, this can no longer happen - see
808 # comments by AJ in old revisions - mhy
809 # if reprocess is 2 we've already done this and we're checking
810 # things again for the new .orig.tar.gz.
811 # [Yes, I'm fully aware of how disgusting this is]
812 if action and self.reprocess < 2:
814 os.chdir(self.pkg.directory)
816 ret = holding.copy_to_holding(f)
818 # XXX: Should we bail out here or try and continue?
819 self.rejects.append(ret)
823 # Check there isn't already a .changes or .dak file of the same name in
824 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
825 # [NB: this check must be done post-suite mapping]
826 base_filename = os.path.basename(self.pkg.changes_file)
827 dot_dak_filename = base_filename[:-8] + ".dak"
829 for suite in self.pkg.changes["distribution"].keys():
830 copychanges = "Suite::%s::CopyChanges" % (suite)
831 if cnf.has_key(copychanges) and \
832 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
833 self.rejects.append("%s: a file with this name already exists in %s" \
834 % (base_filename, cnf[copychanges]))
836 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
837 if cnf.has_key(copy_dot_dak) and \
838 os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
839 self.rejects.append("%s: a file with this name already exists in %s" \
840 % (dot_dak_filename, Cnf[copy_dot_dak]))
846 session = DBConn().session()
848 for f, entry in self.pkg.files.items():
849 # Ensure the file does not already exist in one of the accepted directories
850 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
851 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
852 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
853 self.rejects.append("%s file already exists in the %s directory." % (f, d))
855 if not re_taint_free.match(f):
856 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
858 # Check the file is readable
859 if os.access(f, os.R_OK) == 0:
860 # When running in -n, copy_to_holding() won't have
861 # generated the reject_message, so we need to.
863 if os.path.exists(f):
864 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
866 self.rejects.append("Can't read `%s'. [file not found]" % (f))
867 entry["type"] = "unreadable"
870 # If it's byhand skip remaining checks
871 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
873 entry["type"] = "byhand"
875 # Checks for a binary package...
876 elif re_isadeb.match(f):
878 entry["type"] = "deb"
880 # This routine appends to self.rejects/warnings as appropriate
881 self.binary_file_checks(f, session)
883 # Checks for a source package...
884 elif re_issource.match(f):
887 # This routine appends to self.rejects/warnings as appropriate
888 self.source_file_checks(f, session)
890 # Not a binary or source package? Assume byhand...
893 entry["type"] = "byhand"
895 # Per-suite file checks
896 entry["oldfiles"] = {}
897 for suite in self.pkg.changes["distribution"].keys():
898 self.per_suite_file_checks(f, suite, session)
902 # If the .changes file says it has source, it must have source.
903 if self.pkg.changes["architecture"].has_key("source"):
905 self.rejects.append("no source found and Architecture line in changes mention source.")
907 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
908 self.rejects.append("source only uploads are not supported.")
910 ###########################################################################
911 def check_dsc(self, action=True):
912 """Returns bool indicating whether or not the source changes are valid"""
913 # Ensure there is source to check
914 if not self.pkg.changes["architecture"].has_key("source"):
919 for f, entry in self.pkg.files.items():
920 if entry["type"] == "dsc":
922 self.rejects.append("can not process a .changes file with multiple .dsc's.")
927 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
929 self.rejects.append("source uploads must contain a dsc file")
932 # Parse the .dsc file
934 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
935 except CantOpenError:
936 # if not -n copy_to_holding() will have done this for us...
938 self.rejects.append("%s: can't read file." % (dsc_filename))
939 except ParseChangesError, line:
940 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
941 except InvalidDscError, line:
942 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
943 except ChangesUnicodeError:
944 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
946 # Build up the file list of files mentioned by the .dsc
948 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
949 except NoFilesFieldError:
950 self.rejects.append("%s: no Files: field." % (dsc_filename))
952 except UnknownFormatError, format:
953 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
955 except ParseChangesError, line:
956 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
959 # Enforce mandatory fields
960 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
961 if not self.pkg.dsc.has_key(i):
962 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
965 # Validate the source and version fields
966 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
967 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
968 if not re_valid_version.match(self.pkg.dsc["version"]):
969 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
971 # Bumping the version number of the .dsc breaks extraction by stable's
972 # dpkg-source. So let's not do that...
973 if self.pkg.dsc["format"] != "1.0":
974 self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
976 # Validate the Maintainer field
978 # We ignore the return value
979 fix_maintainer(self.pkg.dsc["maintainer"])
980 except ParseMaintError, msg:
981 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
982 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
984 # Validate the build-depends field(s)
985 for field_name in [ "build-depends", "build-depends-indep" ]:
986 field = self.pkg.dsc.get(field_name)
988 # Check for broken dpkg-dev lossage...
989 if field.startswith("ARRAY"):
990 self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \
991 (dsc_filename, field_name.title()))
993 # Have apt try to parse them...
995 apt_pkg.ParseSrcDepends(field)
997 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
999 # Ensure the version number in the .dsc matches the version number in the .changes
1000 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1001 changes_version = self.pkg.files[dsc_filename]["version"]
1003 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1004 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1006 # Ensure there is a .tar.gz in the .dsc file
1008 for f in self.pkg.dsc_files.keys():
1009 m = re_issource.match(f)
1011 self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
1014 if ftype == "orig.tar.gz" or ftype == "tar.gz":
1018 self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
1020 # Ensure source is newer than existing source in target suites
1021 session = DBConn().session()
1022 self.check_source_against_db(dsc_filename, session)
1023 self.check_dsc_against_db(dsc_filename, session)
1028 ###########################################################################
1030 def get_changelog_versions(self, source_dir):
1031 """Extracts a the source package and (optionally) grabs the
1032 version history out of debian/changelog for the BTS."""
1036 # Find the .dsc (again)
1038 for f in self.pkg.files.keys():
1039 if self.pkg.files[f]["type"] == "dsc":
1042 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1043 if not dsc_filename:
1046 # Create a symlink mirror of the source files in our temporary directory
1047 for f in self.pkg.files.keys():
1048 m = re_issource.match(f)
1050 src = os.path.join(source_dir, f)
1051 # If a file is missing for whatever reason, give up.
1052 if not os.path.exists(src):
1055 if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
1057 dest = os.path.join(os.getcwd(), f)
1058 os.symlink(src, dest)
1060 # If the orig.tar.gz is not a part of the upload, create a symlink to the
1062 if self.pkg.orig_tar_gz:
1063 dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
1064 os.symlink(self.pkg.orig_tar_gz, dest)
1066 # Extract the source
1067 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1068 (result, output) = commands.getstatusoutput(cmd)
1070 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1071 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
1074 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1077 # Get the upstream version
1078 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1079 if re_strip_revision.search(upstr_version):
1080 upstr_version = re_strip_revision.sub('', upstr_version)
1082 # Ensure the changelog file exists
1083 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1084 if not os.path.exists(changelog_filename):
1085 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1088 # Parse the changelog
1089 self.pkg.dsc["bts changelog"] = ""
1090 changelog_file = utils.open_file(changelog_filename)
1091 for line in changelog_file.readlines():
1092 m = re_changelog_versions.match(line)
1094 self.pkg.dsc["bts changelog"] += line
1095 changelog_file.close()
1097 # Check we found at least one revision in the changelog
1098 if not self.pkg.dsc["bts changelog"]:
1099 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1101 def check_source(self):
1102 # XXX: I'm fairly sure reprocess == 2 can never happen
1103 # AJT disabled the is_incoming check years ago - mhy
1104 # We should probably scrap or rethink the whole reprocess thing
1106 # a) there's no source
1107 # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
1108 # or c) the orig.tar.gz is MIA
1109 if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1110 or self.pkg.orig_tar_gz == -1:
1113 tmpdir = utils.temp_dirname()
1115 # Move into the temporary directory
1119 # Get the changelog version history
1120 self.get_changelog_versions(cwd)
1122 # Move back and cleanup the temporary tree
1126 shutil.rmtree(tmpdir)
1128 if e.errno != errno.EACCES:
1130 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1132 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1133 # We probably have u-r or u-w directories so chmod everything
1135 cmd = "chmod -R u+rwx %s" % (tmpdir)
1136 result = os.system(cmd)
1138 utils.fubar("'%s' failed with result %s." % (cmd, result))
1139 shutil.rmtree(tmpdir)
1140 except Exception, e:
1141 print "foobar2 (%s)" % e
1142 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1144 ###########################################################################
1145 def ensure_hashes(self):
1146 # Make sure we recognise the format of the Files: field in the .changes
1147 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1148 if len(format) == 2:
1149 format = int(format[0]), int(format[1])
1151 format = int(float(format[0])), 0
1153 # We need to deal with the original changes blob, as the fields we need
1154 # might not be in the changes dict serialised into the .dak anymore.
1155 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1157 # Copy the checksums over to the current changes dict. This will keep
1158 # the existing modifications to it intact.
1159 for field in orig_changes:
1160 if field.startswith('checksums-'):
1161 self.pkg.changes[field] = orig_changes[field]
1163 # Check for unsupported hashes
1164 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1165 self.rejects.append(j)
1167 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1168 self.rejects.append(j)
1170 # We have to calculate the hash if we have an earlier changes version than
1171 # the hash appears in rather than require it exist in the changes file
1172 for hashname, hashfunc, version in utils.known_hashes:
1173 # TODO: Move _ensure_changes_hash into this class
1174 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1175 self.rejects.append(j)
1176 if "source" in self.pkg.changes["architecture"]:
1177 # TODO: Move _ensure_dsc_hash into this class
1178 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1179 self.rejects.append(j)
1181 def check_hashes(self):
1182 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1183 self.rejects.append(m)
1185 for m in utils.check_size(".changes", self.pkg.files):
1186 self.rejects.append(m)
1188 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1189 self.rejects.append(m)
1191 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1192 self.rejects.append(m)
1194 self.ensure_hashes()
1196 ###########################################################################
1197 def check_urgency(self):
1199 if self.pkg.changes["architecture"].has_key("source"):
1200 if not self.pkg.changes.has_key("urgency"):
1201 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1202 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1203 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1204 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1205 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1206 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1208 ###########################################################################
1210 # Sanity check the time stamps of files inside debs.
1211 # [Files in the near future cause ugly warnings and extreme time
1212 # travel can cause errors on extraction]
1214 def check_timestamps(self):
1217 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1218 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1219 tar = TarTime(future_cutoff, past_cutoff)
1221 for filename, entry in self.pkg.files.items():
1222 if entry["type"] == "deb":
1225 deb_file = utils.open_file(filename)
1226 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1229 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1230 except SystemError, e:
1231 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1232 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1235 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1239 future_files = tar.future_files.keys()
1241 num_future_files = len(future_files)
1242 future_file = future_files[0]
1243 future_date = tar.future_files[future_file]
1244 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1245 % (filename, num_future_files, future_file, time.ctime(future_date)))
1247 ancient_files = tar.ancient_files.keys()
1249 num_ancient_files = len(ancient_files)
1250 ancient_file = ancient_files[0]
1251 ancient_date = tar.ancient_files[ancient_file]
1252 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1253 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1255 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1257 ###########################################################################
1258 def check_signed_by_key(self):
1259 """Ensure the .changes is signed by an authorized uploader."""
1260 session = DBConn().session()
1262 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1264 # match claimed name with actual name:
1266 # This is fundamentally broken but need us to refactor how we get
1267 # the UIDs/Fingerprints in order for us to fix it properly
1268 uid, uid_email = self.pkg.changes["fingerprint"], uid
1269 may_nmu, may_sponsor = 1, 1
1270 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1271 # and can't get one in there if we don't allow nmu/sponsorship
1272 elif is_dm is False:
1273 # If is_dm is False, we allow full upload rights
1274 uid_email = "%s@debian.org" % (uid)
1275 may_nmu, may_sponsor = 1, 1
1277 # Assume limited upload rights unless we've discovered otherwise
1279 may_nmu, may_sponsor = 0, 0
1281 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1283 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1285 if uid_name == "": sponsored = 1
1288 if ("source" in self.pkg.changes["architecture"] and
1289 uid_email and utils.is_email_alias(uid_email)):
1290 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1291 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1292 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1293 self.pkg.changes["sponsoremail"] = uid_email
1295 if sponsored and not may_sponsor:
1296 self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1298 if not sponsored and not may_nmu:
1299 should_reject = True
1300 highest_sid, highest_version = None, None
1302 # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1303 # It ignores higher versions with the dm_upload_allowed flag set to false
1304 # I'm keeping the existing behaviour for now until I've gone back and
1305 # checked exactly what the GR says - mhy
1306 for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1307 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1308 highest_sid = si.source_id
1309 highest_version = si.version
1311 if highest_sid is None:
1312 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1314 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1315 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1316 if email == uid_email or name == uid_name:
1317 should_reject = False
1320 if should_reject is True:
1321 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1323 for b in self.pkg.changes["binary"].keys():
1324 for suite in self.pkg.changes["distribution"].keys():
1325 q = session.query(DBSource)
1326 q = q.join(DBBinary).filter_by(package=b)
1327 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1330 if s.source != self.pkg.changes["source"]:
1331 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1333 for f in self.pkg.files.keys():
1334 if self.pkg.files[f].has_key("byhand"):
1335 self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1336 if self.pkg.files[f].has_key("new"):
1337 self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1341 ###########################################################################
1342 def build_summaries(self):
1343 """ Build a summary of changes the upload introduces. """
1345 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1347 short_summary = summary
1349 # This is for direport's benefit...
1350 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1353 summary += "Changes: " + f
1355 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1357 summary += self.announce(short_summary, 0)
1359 return (summary, short_summary)
1361 ###########################################################################
1363 def close_bugs(self, summary, action):
1365 Send mail to close bugs as instructed by the closes field in the changes file.
1366 Also add a line to summary if any work was done.
1368 @type summary: string
1369 @param summary: summary text, as given by L{build_summaries}
1372 @param action: Set to false no real action will be done.
1375 @return: summary. If action was taken, extended by the list of closed bugs.
1379 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1381 bugs = self.pkg.changes["closes"].keys()
1387 summary += "Closing bugs: "
1389 summary += "%s " % (bug)
1391 self.Subst["__BUG_NUMBER__"] = bug
1392 if self.pkg.changes["distribution"].has_key("stable"):
1393 self.Subst["__STABLE_WARNING__"] = """
1394 Note that this package is not part of the released stable Debian
1395 distribution. It may have dependencies on other unreleased software,
1396 or other instabilities. Please take care if you wish to install it.
1397 The update will eventually make its way into the next released Debian
1400 self.Subst["__STABLE_WARNING__"] = ""
1401 mail_message = utils.TemplateSubst(self.Subst, template)
1402 utils.send_mail(mail_message)
1404 # Clear up after ourselves
1405 del self.Subst["__BUG_NUMBER__"]
1406 del self.Subst["__STABLE_WARNING__"]
1408 if action and self.logger:
1409 self.logger.log(["closing bugs"] + bugs)
1415 ###########################################################################
1417 def announce(self, short_summary, action):
1419 Send an announce mail about a new upload.
1421 @type short_summary: string
1422 @param short_summary: Short summary text to include in the mail
1425 @param action: Set to false no real action will be done.
1428 @return: Textstring about action taken.
1433 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1435 # Only do announcements for source uploads with a recent dpkg-dev installed
1436 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1437 self.pkg.changes["architecture"].has_key("source"):
1443 self.Subst["__SHORT_SUMMARY__"] = short_summary
1445 for dist in self.pkg.changes["distribution"].keys():
1446 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1447 if announce_list == "" or lists_done.has_key(announce_list):
1450 lists_done[announce_list] = 1
1451 summary += "Announcing to %s\n" % (announce_list)
1454 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1455 if cnf.get("Dinstall::TrackingServer") and \
1456 self.pkg.changes["architecture"].has_key("source"):
1457 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1458 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1460 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1461 utils.send_mail(mail_message)
1463 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1465 if cnf.FindB("Dinstall::CloseBugs"):
1466 summary = self.close_bugs(summary, action)
1468 del self.Subst["__SHORT_SUMMARY__"]
1472 ###########################################################################
1474 def accept (self, summary, short_summary, targetdir=None):
1478 This moves all files referenced from the .changes into the I{accepted}
1479 queue, sends the accepted mail, announces to lists, closes bugs and
1480 also checks for override disparities. If enabled it will write out
1481 the version history for the BTS Version Tracking and will finally call
1484 @type summary: string
1485 @param summary: Summary text
1487 @type short_summary: string
1488 @param short_summary: Short summary
1493 stats = SummaryStats()
1495 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1497 if targetdir is None:
1498 targetdir = cnf["Dir::Queue::Accepted"]
1502 self.logger.log(["Accepting changes", self.pkg.changes_file])
1504 self.pkg.write_dot_dak(targetdir)
1506 # Move all the files into the accepted directory
1507 utils.move(self.pkg.changes_file, targetdir)
1509 for name, entry in sorted(self.pkg.files.items()):
1510 utils.move(name, targetdir)
1511 stats.accept_bytes += float(entry["size"])
1513 stats.accept_count += 1
1515 # Send accept mail, announce to lists, close bugs and check for
1516 # override disparities
1517 if not cnf["Dinstall::Options::No-Mail"]:
1518 self.Subst["__SUITE__"] = ""
1519 self.Subst["__SUMMARY__"] = summary
1520 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1521 utils.send_mail(mail_message)
1522 self.announce(short_summary, 1)
1524 ## Helper stuff for DebBugs Version Tracking
1525 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1526 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1527 # the conditionalization on dsc["bts changelog"] should be
1530 # Write out the version history from the changelog
1531 if self.pkg.changes["architecture"].has_key("source") and \
1532 self.pkg.dsc.has_key("bts changelog"):
1534 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1535 version_history = os.fdopen(fd, 'w')
1536 version_history.write(self.pkg.dsc["bts changelog"])
1537 version_history.close()
1538 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1539 self.pkg.changes_file[:-8]+".versions")
1540 os.rename(temp_filename, filename)
1541 os.chmod(filename, 0644)
1543 # Write out the binary -> source mapping.
1544 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1545 debinfo = os.fdopen(fd, 'w')
1546 for name, entry in sorted(self.pkg.files.items()):
1547 if entry["type"] == "deb":
1548 line = " ".join([entry["package"], entry["version"],
1549 entry["architecture"], entry["source package"],
1550 entry["source version"]])
1551 debinfo.write(line+"\n")
1553 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1554 self.pkg.changes_file[:-8]+".debinfo")
1555 os.rename(temp_filename, filename)
1556 os.chmod(filename, 0644)
1558 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1559 # <Ganneff> we do call queue_build too
1560 # <mhy> well yes, we'd have had to if we were inserting into accepted
1561 # <Ganneff> now. thats database only.
1562 # <mhy> urgh, that's going to get messy
1563 # <Ganneff> so i make the p-n call to it *also* using accepted/
1564 # <mhy> but then the packages will be in the queue_build table without the files being there
1565 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1566 # <mhy> ah, good point
1567 # <Ganneff> so it will work out, as unchecked move it over
1568 # <mhy> that's all completely sick
1571 # This routine returns None on success or an error on failure
1572 res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1577 def check_override(self):
1579 Checks override entries for validity. Mails "Override disparity" warnings,
1580 if that feature is enabled.
1582 Abandons the check if
1583 - override disparity checks are disabled
1584 - mail sending is disabled
1589 # Abandon the check if:
1590 # a) override disparity checks have been disabled
1591 # b) we're not sending mail
1592 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1593 cnf["Dinstall::Options::No-Mail"]:
1596 summary = self.pkg.check_override()
1601 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1603 self.Subst["__SUMMARY__"] = summary
1604 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1605 utils.send_mail(mail_message)
1606 del self.Subst["__SUMMARY__"]
1608 ###########################################################################
1610 def remove(self, dir=None):
1612 Used (for instance) in p-u to remove the package from unchecked
1615 os.chdir(self.pkg.directory)
1619 for f in self.pkg.files.keys():
1621 os.unlink(self.pkg.changes_file)
1623 ###########################################################################
1625 def move_to_dir (self, dest, perms=0660, changesperms=0664):
1627 Move files to dest with certain perms/changesperms
1629 utils.move(self.pkg.changes_file, dest, perms=changesperms)
1630 for f in self.pkg.files.keys():
1631 utils.move(f, dest, perms=perms)
1633 ###########################################################################
1635 def force_reject(self, reject_files):
1637 Forcefully move files from the current directory to the
1638 reject directory. If any file already exists in the reject
1639 directory it will be moved to the morgue to make way for
1643 @param files: file dictionary
1649 for file_entry in reject_files:
1650 # Skip any files which don't exist or which we don't have permission to copy.
1651 if os.access(file_entry, os.R_OK) == 0:
1654 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1657 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1659 # File exists? Let's try and move it to the morgue
1660 if e.errno == errno.EEXIST:
1661 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1663 morgue_file = utils.find_next_free(morgue_file)
1664 except NoFreeFilenameError:
1665 # Something's either gone badly Pete Tong, or
1666 # someone is trying to exploit us.
1667 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1669 utils.move(dest_file, morgue_file, perms=0660)
1671 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1674 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1678 # If we got here, we own the destination file, so we can
1679 # safely overwrite it.
1680 utils.move(file_entry, dest_file, 1, perms=0660)
1683 ###########################################################################
1684 def do_reject (self, manual=0, reject_message="", note=""):
1686 Reject an upload. If called without a reject message or C{manual} is
1687 true, spawn an editor so the user can write one.
1690 @param manual: manual or automated rejection
1692 @type reject_message: string
1693 @param reject_message: A reject message
1698 # If we weren't given a manual rejection message, spawn an
1699 # editor so the user can add one in...
1700 if manual and not reject_message:
1701 (fd, temp_filename) = utils.temp_filename()
1702 temp_file = os.fdopen(fd, 'w')
1705 temp_file.write(line)
1707 editor = os.environ.get("EDITOR","vi")
1709 while answer == 'E':
1710 os.system("%s %s" % (editor, temp_filename))
1711 temp_fh = utils.open_file(temp_filename)
1712 reject_message = "".join(temp_fh.readlines())
1714 print "Reject message:"
1715 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
1716 prompt = "[R]eject, Edit, Abandon, Quit ?"
1718 while prompt.find(answer) == -1:
1719 answer = utils.our_raw_input(prompt)
1720 m = re_default_answer.search(prompt)
1723 answer = answer[:1].upper()
1724 os.unlink(temp_filename)
1730 print "Rejecting.\n"
1734 reason_filename = self.pkg.changes_file[:-8] + ".reason"
1735 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
1737 # Move all the files into the reject directory
1738 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
1739 self.force_reject(reject_files)
1741 # If we fail here someone is probably trying to exploit the race
1742 # so let's just raise an exception ...
1743 if os.path.exists(reason_filename):
1744 os.unlink(reason_filename)
1745 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1747 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
1750 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
1751 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
1752 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
1753 os.write(reason_fd, reject_message)
1754 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1756 # Build up the rejection email
1757 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
1758 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
1759 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
1760 self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
1761 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1762 # Write the rejection email out as the <foo>.reason file
1763 os.write(reason_fd, reject_mail_message)
1765 del self.Subst["__REJECTOR_ADDRESS__"]
1766 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
1767 del self.Subst["__CC__"]
1771 # Send the rejection mail if appropriate
1772 if not cnf["Dinstall::Options::No-Mail"]:
1773 utils.send_mail(reject_mail_message)
1776 self.logger.log(["rejected", self.pkg.changes_file])
1780 ################################################################################
1781 def in_override_p(self, package, component, suite, binary_type, file, session):
1783 Check if a package already has override entries in the DB
1785 @type package: string
1786 @param package: package name
1788 @type component: string
1789 @param component: database id of the component
1792 @param suite: database id of the suite
1794 @type binary_type: string
1795 @param binary_type: type of the package
1798 @param file: filename we check
1800 @return: the database result. But noone cares anyway.
1806 if binary_type == "": # must be source
1809 file_type = binary_type
1811 # Override suite name; used for example with proposed-updates
1812 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
1813 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
1815 result = get_override(package, suite, component, file_type, session)
1817 # If checking for a source package fall back on the binary override type
1818 if file_type == "dsc" and len(result) < 1:
1819 result = get_override(package, suite, component, ['deb', 'udeb'], session)
1821 # Remember the section and priority so we can check them later if appropriate
1824 self.pkg.files[file]["override section"] = result.section.section
1825 self.pkg.files[file]["override priority"] = result.priority.priority
1830 ################################################################################
1831 def get_anyversion(self, sv_list, suite):
1834 @param sv_list: list of (suite, version) tuples to check
1837 @param suite: suite name
1843 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
1844 for (s, v) in sv_list:
1845 if s in [ x.lower() for x in anysuite ]:
1846 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
1851 ################################################################################
1853 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
1856 @param sv_list: list of (suite, version) tuples to check
1861 @type new_version: string
1862 @param new_version: XXX
1864 Ensure versions are newer than existing packages in target
1865 suites and that cross-suite version checking rules as
1866 set out in the conf file are satisfied.
1871 # Check versions for each target suite
1872 for target_suite in self.pkg.changes["distribution"].keys():
1873 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
1874 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
1876 # Enforce "must be newer than target suite" even if conffile omits it
1877 if target_suite not in must_be_newer_than:
1878 must_be_newer_than.append(target_suite)
1880 for (suite, existent_version) in sv_list:
1881 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
1883 if suite in must_be_newer_than and sourceful and vercmp < 1:
1884 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1886 if suite in must_be_older_than and vercmp > -1:
1889 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
1890 # we really use the other suite, ignoring the conflicting one ...
1891 addsuite = self.pkg.changes["distribution-version"][suite]
1893 add_version = self.get_anyversion(sv_list, addsuite)
1894 target_version = self.get_anyversion(sv_list, target_suite)
1897 # not add_version can only happen if we map to a suite
1898 # that doesn't enhance the suite we're propup'ing from.
1899 # so "propup-ver x a b c; map a d" is a problem only if
1900 # d doesn't enhance a.
1902 # i think we could always propagate in this case, rather
1903 # than complaining. either way, this isn't a REJECT issue
1905 # And - we really should complain to the dorks who configured dak
1906 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
1907 self.pkg.changes.setdefault("propdistribution", {})
1908 self.pkg.changes["propdistribution"][addsuite] = 1
1910 elif not target_version:
1911 # not targets_version is true when the package is NEW
1912 # we could just stick with the "...old version..." REJECT
1913 # for this, I think.
1914 self.rejects.append("Won't propogate NEW packages.")
1915 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
1916 # propogation would be redundant. no need to reject though.
1917 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1919 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
1920 apt_pkg.VersionCompare(add_version, target_version) >= 0:
1922 self.warnings.append("Propogating upload to %s" % (addsuite))
1923 self.pkg.changes.setdefault("propdistribution", {})
1924 self.pkg.changes["propdistribution"][addsuite] = 1
1928 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1930 ################################################################################
1931 def check_binary_against_db(self, file, session):
1932 # Ensure version is sane
1933 q = session.query(BinAssociation)
1934 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
1935 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
1937 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
1938 file, self.pkg.files[file]["version"], sourceful=False)
1940 # Check for any existing copies of the file
1941 q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
1942 q = q.filter_by(version=self.pkg.files[file]["version"])
1943 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
1946 self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
1948 ################################################################################
1950 def check_source_against_db(self, file, session):
1953 source = self.pkg.dsc.get("source")
1954 version = self.pkg.dsc.get("version")
1956 # Ensure version is sane
1957 q = session.query(SrcAssociation)
1958 q = q.join(DBSource).filter(DBSource.source==source)
1960 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
1961 file, version, sourceful=True)
1963 ################################################################################
1964 def check_dsc_against_db(self, file, session):
1967 @warning: NB: this function can remove entries from the 'files' index [if
1968 the .orig.tar.gz is a duplicate of the one in the archive]; if
1969 you're iterating over 'files' and call this function as part of
1970 the loop, be sure to add a check to the top of the loop to
1971 ensure you haven't just tried to dereference the deleted entry.
1976 self.pkg.orig_tar_gz = None
1978 # Try and find all files mentioned in the .dsc. This has
1979 # to work harder to cope with the multiple possible
1980 # locations of an .orig.tar.gz.
1981 # The ordering on the select is needed to pick the newest orig
1982 # when it exists in multiple places.
1983 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
1985 if self.pkg.files.has_key(dsc_name):
1986 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
1987 actual_size = int(self.pkg.files[dsc_name]["size"])
1988 found = "%s in incoming" % (dsc_name)
1990 # Check the file does not already exist in the archive
1991 ql = get_poolfile_like_name(dsc_name, session)
1993 # Strip out anything that isn't '%s' or '/%s$'
1995 if not i.filename.endswith(dsc_name):
1998 # "[dak] has not broken them. [dak] has fixed a
1999 # brokenness. Your crappy hack exploited a bug in
2002 # "(Come on! I thought it was always obvious that
2003 # one just doesn't release different files with
2004 # the same name and version.)"
2005 # -- ajk@ on d-devel@l.d.o
2008 # Ignore exact matches for .orig.tar.gz
2010 if dsc_name.endswith(".orig.tar.gz"):
2012 if self.pkg.files.has_key(dsc_name) and \
2013 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2014 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2015 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2016 # TODO: Don't delete the entry, just mark it as not needed
2017 # This would fix the stupidity of changing something we often iterate over
2018 # whilst we're doing it
2019 del self.pkg.files[dsc_name]
2020 self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
2024 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2026 elif dsc_name.endswith(".orig.tar.gz"):
2028 ql = get_poolfile_like_name(dsc_name, session)
2030 # Strip out anything that isn't '%s' or '/%s$'
2031 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2033 if not i.filename.endswith(dsc_name):
2037 # Unfortunately, we may get more than one match here if,
2038 # for example, the package was in potato but had an -sa
2039 # upload in woody. So we need to choose the right one.
2041 # default to something sane in case we don't match any or have only one
2046 old_file = os.path.join(i.location.path, i.filename)
2047 old_file_fh = utils.open_file(old_file)
2048 actual_md5 = apt_pkg.md5sum(old_file_fh)
2050 actual_size = os.stat(old_file)[stat.ST_SIZE]
2051 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2054 old_file = os.path.join(i.location.path, i.filename)
2055 old_file_fh = utils.open_file(old_file)
2056 actual_md5 = apt_pkg.md5sum(old_file_fh)
2058 actual_size = os.stat(old_file)[stat.ST_SIZE]
2060 suite_type = x.location.archive_type
2061 # need this for updating dsc_files in install()
2062 dsc_entry["files id"] = x.file_id
2063 # See install() in process-accepted...
2064 self.pkg.orig_tar_id = x.file_id
2065 self.pkg.orig_tar_gz = old_file
2066 self.pkg.orig_tar_location = x.location.location_id
2068 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2069 # Not there? Check the queue directories...
2070 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2071 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2073 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2074 if os.path.exists(in_otherdir):
2075 in_otherdir_fh = utils.open_file(in_otherdir)
2076 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2077 in_otherdir_fh.close()
2078 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2080 self.pkg.orig_tar_gz = in_otherdir
2083 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2084 self.pkg.orig_tar_gz = -1
2087 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2089 if actual_md5 != dsc_entry["md5sum"]:
2090 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2091 if actual_size != int(dsc_entry["size"]):
2092 self.rejects.append("size for %s doesn't match %s." % (found, file))
2094 ################################################################################
2095 def accepted_checks(self, overwrite_checks, session):
2096 # Recheck anything that relies on the database; since that's not
2097 # frozen between accept and our run time when called from p-a.
2099 # overwrite_checks is set to False when installing to stable/oldstable
2104 # Find the .dsc (again)
2106 for f in self.pkg.files.keys():
2107 if self.pkg.files[f]["type"] == "dsc":
2110 for checkfile in self.pkg.files.keys():
2111 # The .orig.tar.gz can disappear out from under us is it's a
2112 # duplicate of one in the archive.
2113 if not self.pkg.files.has_key(checkfile):
2116 entry = self.pkg.files[checkfile]
2118 # Check that the source still exists
2119 if entry["type"] == "deb":
2120 source_version = entry["source version"]
2121 source_package = entry["source package"]
2122 if not self.pkg.changes["architecture"].has_key("source") \
2123 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2124 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2126 # Version and file overwrite checks
2127 if overwrite_checks:
2128 if entry["type"] == "deb":
2129 self.check_binary_against_db(checkfile, session)
2130 elif entry["type"] == "dsc":
2131 self.check_source_against_db(checkfile, session)
2132 self.check_dsc_against_db(dsc_filename, session)
2134 # propogate in the case it is in the override tables:
2135 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2136 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2137 propogate[suite] = 1
2139 nopropogate[suite] = 1
2141 for suite in propogate.keys():
2142 if suite in nopropogate:
2144 self.pkg.changes["distribution"][suite] = 1
2146 for checkfile in self.pkg.files.keys():
2147 # Check the package is still in the override tables
2148 for suite in self.pkg.changes["distribution"].keys():
2149 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2150 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2152 ################################################################################
2153 # This is not really a reject, but an unaccept, but since a) the code for
2154 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2155 # extremely rare, for now we'll go with whining at our admin folks...
2157 def do_unaccept(self):
2160 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2161 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2162 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2163 self.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
2164 if cnf.has_key("Dinstall::Bcc"):
2165 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2167 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2169 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2171 # Write the rejection email out as the <foo>.reason file
2172 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2173 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2175 # If we fail here someone is probably trying to exploit the race
2176 # so let's just raise an exception ...
2177 if os.path.exists(reject_filename):
2178 os.unlink(reject_filename)
2180 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2181 os.write(fd, reject_mail_message)
2184 utils.send_mail(reject_mail_message)
2186 del self.Subst["__REJECTOR_ADDRESS__"]
2187 del self.Subst["__REJECT_MESSAGE__"]
2188 del self.Subst["__CC__"]
2190 ################################################################################
2191 # If any file of an upload has a recent mtime then chances are good
2192 # the file is still being uploaded.
2194 def upload_too_new(self):
2197 # Move back to the original directory to get accurate time stamps
2199 os.chdir(self.pkg.directory)
2200 file_list = self.pkg.files.keys()
2201 file_list.extend(self.pkg.dsc_files.keys())
2202 file_list.append(self.pkg.changes_file)
2205 last_modified = time.time()-os.path.getmtime(f)
2206 if last_modified < int(cnf["Dinstall::SkipTime"]):