5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
43 from dak_exceptions import *
46 from config import Config
47 from holding import Holding
49 from summarystats import SummaryStats
50 from utils import parse_changes
51 from textutils import fix_maintainer
52 from binary import Binary
54 ###############################################################################
56 def get_type(f, session):
58 Get the file type of C{f}
61 @param f: file entry from Changes object
63 @type session: SQLA Session
64 @param session: SQL Alchemy session object
71 if f.has_key("dbtype"):
72 file_type = file["dbtype"]
73 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
76 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
78 # Validate the override type
79 type_id = get_override_type(file_type, session)
81 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
85 ################################################################################
87 # Determine what parts in a .changes are NEW
89 def determine_new(changes, files, warn=1):
91 Determine what parts in a C{changes} file are NEW.
93 @type changes: Upload.Pkg.changes dict
94 @param changes: Changes dictionary
96 @type files: Upload.Pkg.files dict
97 @param files: Files dictionary
100 @param warn: Warn if overrides are added for (old)stable
103 @return: dictionary of NEW components.
108 session = DBConn().session()
110 # Build up a list of potentially new things
111 for name, f in files.items():
112 # Skip byhand elements
113 if f["type"] == "byhand":
116 priority = f["priority"]
117 section = f["section"]
118 file_type = get_type(f, session)
119 component = f["component"]
121 if file_type == "dsc":
124 if not new.has_key(pkg):
126 new[pkg]["priority"] = priority
127 new[pkg]["section"] = section
128 new[pkg]["type"] = file_type
129 new[pkg]["component"] = component
130 new[pkg]["files"] = []
132 old_type = new[pkg]["type"]
133 if old_type != file_type:
134 # source gets trumped by deb or udeb
135 if old_type == "dsc":
136 new[pkg]["priority"] = priority
137 new[pkg]["section"] = section
138 new[pkg]["type"] = file_type
139 new[pkg]["component"] = component
141 new[pkg]["files"].append(name)
143 if f.has_key("othercomponents"):
144 new[pkg]["othercomponents"] = f["othercomponents"]
146 for suite in changes["suite"].keys():
147 for pkg in new.keys():
148 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
150 for file_entry in new[pkg]["files"]:
151 if files[file_entry].has_key("new"):
152 del files[file_entry]["new"]
156 for s in ['stable', 'oldstable']:
157 if changes["suite"].has_key(s):
158 print "WARNING: overrides will be added for %s!" % s
159 for pkg in new.keys():
160 if new[pkg].has_key("othercomponents"):
161 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
167 ################################################################################
169 def check_valid(new):
171 Check if section and priority for NEW packages exist in database.
172 Additionally does sanity checks:
173 - debian-installer packages have to be udeb (or source)
174 - non debian-installer packages can not be udeb
175 - source priority can only be assigned to dsc file types
178 @param new: Dict of new packages with their section, priority and type.
181 for pkg in new.keys():
182 section_name = new[pkg]["section"]
183 priority_name = new[pkg]["priority"]
184 file_type = new[pkg]["type"]
186 section = get_section(section_name)
188 new[pkg]["section id"] = -1
190 new[pkg]["section id"] = section.section_id
192 priority = get_priority(priority_name)
194 new[pkg]["priority id"] = -1
196 new[pkg]["priority id"] = priority.priority_id
199 di = section_name.find("debian-installer") != -1
201 # If d-i, we must be udeb and vice-versa
202 if (di and file_type not in ("udeb", "dsc")) or \
203 (not di and file_type == "udeb"):
204 new[pkg]["section id"] = -1
206 # If dsc we need to be source and vice-versa
207 if (priority == "source" and file_type != "dsc") or \
208 (priority != "source" and file_type == "dsc"):
209 new[pkg]["priority id"] = -1
211 ###############################################################################
213 def lookup_uid_from_fingerprint(fpr, session):
216 # This is a stupid default, but see the comments below
219 user = get_uid_from_fingerprint(fpr, session)
223 if user.name is None:
228 # Check the relevant fingerprint (which we have to have)
229 for f in user.fingerprint:
230 if f.fingerprint == fpr:
231 is_dm = f.keyring.debian_maintainer
234 return (uid, uid_name, is_dm)
236 ###############################################################################
238 # Used by Upload.check_timestamps
239 class TarTime(object):
240 def __init__(self, future_cutoff, past_cutoff):
242 self.future_cutoff = future_cutoff
243 self.past_cutoff = past_cutoff
246 self.future_files = {}
247 self.ancient_files = {}
249 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
250 if MTime > self.future_cutoff:
251 self.future_files[Name] = MTime
252 if MTime < self.past_cutoff:
253 self.ancient_files[Name] = MTime
255 ###############################################################################
257 class Upload(object):
259 Everything that has to do with an upload processed.
267 ###########################################################################
270 """ Reset a number of internal variables."""
272 # Initialize the substitution template map
275 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
276 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
277 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
278 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
286 def package_info(self):
289 if len(self.rejects) > 0:
290 msg += "Reject Reasons:\n"
291 msg += "\n".join(self.rejects)
293 if len(self.warnings) > 0:
295 msg += "\n".join(self.warnings)
297 if len(self.notes) > 0:
299 msg += "\n".join(self.notes)
303 ###########################################################################
304 def update_subst(self):
305 """ Set up the per-package template substitution mappings """
309 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
310 if not self.pkg.changes.has_key("architecture") or not \
311 isinstance(self.pkg.changes["architecture"], DictType):
312 self.pkg.changes["architecture"] = { "Unknown" : "" }
314 # and maintainer2047 may not exist.
315 if not self.pkg.changes.has_key("maintainer2047"):
316 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
318 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
319 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
320 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
322 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
323 if self.pkg.changes["architecture"].has_key("source") and \
324 self.pkg.changes["changedby822"] != "" and \
325 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
327 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
328 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
329 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
331 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
332 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
333 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
335 if "sponsoremail" in self.pkg.changes:
336 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
338 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
339 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
341 # Apply any global override of the Maintainer field
342 if cnf.get("Dinstall::OverrideMaintainer"):
343 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
344 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
346 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
347 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
348 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
350 ###########################################################################
351 def load_changes(self, filename):
354 @rvalue: whether the changes file was valid or not. We may want to
355 reject even if this is True (see what gets put in self.rejects).
356 This is simply to prevent us even trying things later which will
357 fail because we couldn't properly parse the file.
360 self.pkg.changes_file = filename
362 # Parse the .changes field into a dictionary
364 self.pkg.changes.update(parse_changes(filename))
365 except CantOpenError:
366 self.rejects.append("%s: can't read file." % (filename))
368 except ParseChangesError, line:
369 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
371 except ChangesUnicodeError:
372 self.rejects.append("%s: changes file not proper utf-8" % (filename))
375 # Parse the Files field from the .changes into another dictionary
377 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
378 except ParseChangesError, line:
379 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
381 except UnknownFormatError, format:
382 self.rejects.append("%s: unknown format '%s'." % (filename, format))
385 # Check for mandatory fields
386 for i in ("distribution", "source", "binary", "architecture",
387 "version", "maintainer", "files", "changes", "description"):
388 if not self.pkg.changes.has_key(i):
389 # Avoid undefined errors later
390 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
393 # Strip a source version in brackets from the source field
394 if re_strip_srcver.search(self.pkg.changes["source"]):
395 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
397 # Ensure the source field is a valid package name.
398 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
399 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
401 # Split multi-value fields into a lower-level dictionary
402 for i in ("architecture", "distribution", "binary", "closes"):
403 o = self.pkg.changes.get(i, "")
405 del self.pkg.changes[i]
407 self.pkg.changes[i] = {}
410 self.pkg.changes[i][j] = 1
412 # Fix the Maintainer: field to be RFC822/2047 compatible
414 (self.pkg.changes["maintainer822"],
415 self.pkg.changes["maintainer2047"],
416 self.pkg.changes["maintainername"],
417 self.pkg.changes["maintaineremail"]) = \
418 fix_maintainer (self.pkg.changes["maintainer"])
419 except ParseMaintError, msg:
420 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
421 % (filename, changes["maintainer"], msg))
423 # ...likewise for the Changed-By: field if it exists.
425 (self.pkg.changes["changedby822"],
426 self.pkg.changes["changedby2047"],
427 self.pkg.changes["changedbyname"],
428 self.pkg.changes["changedbyemail"]) = \
429 fix_maintainer (self.pkg.changes.get("changed-by", ""))
430 except ParseMaintError, msg:
431 self.pkg.changes["changedby822"] = ""
432 self.pkg.changes["changedby2047"] = ""
433 self.pkg.changes["changedbyname"] = ""
434 self.pkg.changes["changedbyemail"] = ""
436 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
437 % (filename, changes["changed-by"], msg))
439 # Ensure all the values in Closes: are numbers
440 if self.pkg.changes.has_key("closes"):
441 for i in self.pkg.changes["closes"].keys():
442 if re_isanum.match (i) == None:
443 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
445 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
446 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
447 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
449 # Check there isn't already a changes file of the same name in one
450 # of the queue directories.
451 base_filename = os.path.basename(filename)
452 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
453 if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
454 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
456 # Check the .changes is non-empty
457 if not self.pkg.files:
458 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
461 # Changes was syntactically valid even if we'll reject
464 ###########################################################################
466 def check_distributions(self):
467 "Check and map the Distribution field"
471 # Handle suite mappings
472 for m in Cnf.ValueList("SuiteMappings"):
475 if mtype == "map" or mtype == "silent-map":
476 (source, dest) = args[1:3]
477 if self.pkg.changes["distribution"].has_key(source):
478 del self.pkg.changes["distribution"][source]
479 self.pkg.changes["distribution"][dest] = 1
480 if mtype != "silent-map":
481 self.notes.append("Mapping %s to %s." % (source, dest))
482 if self.pkg.changes.has_key("distribution-version"):
483 if self.pkg.changes["distribution-version"].has_key(source):
484 self.pkg.changes["distribution-version"][source]=dest
485 elif mtype == "map-unreleased":
486 (source, dest) = args[1:3]
487 if self.pkg.changes["distribution"].has_key(source):
488 for arch in self.pkg.changes["architecture"].keys():
489 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
490 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
491 del self.pkg.changes["distribution"][source]
492 self.pkg.changes["distribution"][dest] = 1
494 elif mtype == "ignore":
496 if self.pkg.changes["distribution"].has_key(suite):
497 del self.pkg.changes["distribution"][suite]
498 self.warnings.append("Ignoring %s as a target suite." % (suite))
499 elif mtype == "reject":
501 if self.pkg.changes["distribution"].has_key(suite):
502 self.rejects.append("Uploads to %s are not accepted." % (suite))
503 elif mtype == "propup-version":
504 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
506 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
507 if self.pkg.changes["distribution"].has_key(args[1]):
508 self.pkg.changes.setdefault("distribution-version", {})
509 for suite in args[2:]:
510 self.pkg.changes["distribution-version"][suite] = suite
512 # Ensure there is (still) a target distribution
513 if len(self.pkg.changes["distribution"].keys()) < 1:
514 self.rejects.append("No valid distribution remaining.")
516 # Ensure target distributions exist
517 for suite in self.pkg.changes["distribution"].keys():
518 if not Cnf.has_key("Suite::%s" % (suite)):
519 self.rejects.append("Unknown distribution `%s'." % (suite))
521 ###########################################################################
523 def binary_file_checks(self, f, session):
525 entry = self.pkg.files[f]
527 # Extract package control information
528 deb_file = utils.open_file(f)
530 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
532 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
534 # Can't continue, none of the checks on control would work.
537 # Check for mandantory "Description:"
540 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
542 self.rejects.append("%s: Missing Description in binary package" % (f))
547 # Check for mandatory fields
548 for field in [ "Package", "Architecture", "Version" ]:
549 if control.Find(field) == None:
551 self.rejects.append("%s: No %s field in control." % (f, field))
554 # Ensure the package name matches the one give in the .changes
555 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
556 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
558 # Validate the package field
559 package = control.Find("Package")
560 if not re_valid_pkg_name.match(package):
561 self.rejects.append("%s: invalid package name '%s'." % (f, package))
563 # Validate the version field
564 version = control.Find("Version")
565 if not re_valid_version.match(version):
566 self.rejects.append("%s: invalid version number '%s'." % (f, version))
568 # Ensure the architecture of the .deb is one we know about.
569 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
570 architecture = control.Find("Architecture")
571 upload_suite = self.pkg.changes["distribution"].keys()[0]
573 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
574 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
575 self.rejects.append("Unknown architecture '%s'." % (architecture))
577 # Ensure the architecture of the .deb is one of the ones
578 # listed in the .changes.
579 if not self.pkg.changes["architecture"].has_key(architecture):
580 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
582 # Sanity-check the Depends field
583 depends = control.Find("Depends")
585 self.rejects.append("%s: Depends field is empty." % (f))
587 # Sanity-check the Provides field
588 provides = control.Find("Provides")
590 provide = re_spacestrip.sub('', provides)
592 self.rejects.append("%s: Provides field is empty." % (f))
593 prov_list = provide.split(",")
594 for prov in prov_list:
595 if not re_valid_pkg_name.match(prov):
596 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
598 # Check the section & priority match those given in the .changes (non-fatal)
599 if control.Find("Section") and entry["section"] != "" \
600 and entry["section"] != control.Find("Section"):
601 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
602 (f, control.Find("Section", ""), entry["section"]))
603 if control.Find("Priority") and entry["priority"] != "" \
604 and entry["priority"] != control.Find("Priority"):
605 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
606 (f, control.Find("Priority", ""), entry["priority"]))
608 entry["package"] = package
609 entry["architecture"] = architecture
610 entry["version"] = version
611 entry["maintainer"] = control.Find("Maintainer", "")
613 if f.endswith(".udeb"):
614 self.pkg.files[f]["dbtype"] = "udeb"
615 elif f.endswith(".deb"):
616 self.pkg.files[f]["dbtype"] = "deb"
618 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
620 entry["source"] = control.Find("Source", entry["package"])
622 # Get the source version
623 source = entry["source"]
626 if source.find("(") != -1:
627 m = re_extract_src_version.match(source)
629 source_version = m.group(2)
631 if not source_version:
632 source_version = self.pkg.files[f]["version"]
634 entry["source package"] = source
635 entry["source version"] = source_version
637 # Ensure the filename matches the contents of the .deb
638 m = re_isadeb.match(f)
641 file_package = m.group(1)
642 if entry["package"] != file_package:
643 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
644 (f, file_package, entry["dbtype"], entry["package"]))
645 epochless_version = re_no_epoch.sub('', control.Find("Version"))
648 file_version = m.group(2)
649 if epochless_version != file_version:
650 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
651 (f, file_version, entry["dbtype"], epochless_version))
654 file_architecture = m.group(3)
655 if entry["architecture"] != file_architecture:
656 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
657 (f, file_architecture, entry["dbtype"], entry["architecture"]))
659 # Check for existent source
660 source_version = entry["source version"]
661 source_package = entry["source package"]
662 if self.pkg.changes["architecture"].has_key("source"):
663 if source_version != self.pkg.changes["version"]:
664 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
665 (source_version, f, self.pkg.changes["version"]))
667 # Check in the SQL database
668 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
669 # Check in one of the other directories
670 source_epochless_version = re_no_epoch.sub('', source_version)
671 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
672 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
674 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
677 dsc_file_exists = False
678 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
679 if cnf.has_key("Dir::Queue::%s" % (myq)):
680 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
681 dsc_file_exists = True
684 if not dsc_file_exists:
685 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
687 # Check the version and for file overwrites
688 self.check_binary_against_db(f, session)
692 if len(b.rejects) > 0:
694 self.rejects.append(j)
696 def source_file_checks(self, f, session):
697 entry = self.pkg.files[f]
699 m = re_issource.match(f)
703 entry["package"] = m.group(1)
704 entry["version"] = m.group(2)
705 entry["type"] = m.group(3)
707 # Ensure the source package name matches the Source filed in the .changes
708 if self.pkg.changes["source"] != entry["package"]:
709 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
711 # Ensure the source version matches the version in the .changes file
712 if entry["type"] == "orig.tar.gz":
713 changes_version = self.pkg.changes["chopversion2"]
715 changes_version = self.pkg.changes["chopversion"]
717 if changes_version != entry["version"]:
718 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
720 # Ensure the .changes lists source in the Architecture field
721 if not self.pkg.changes["architecture"].has_key("source"):
722 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
724 # Check the signature of a .dsc file
725 if entry["type"] == "dsc":
726 # check_signature returns either:
727 # (None, [list, of, rejects]) or (signature, [])
728 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
730 self.rejects.append(j)
732 entry["architecture"] = "source"
734 def per_suite_file_checks(self, f, suite, session):
736 entry = self.pkg.files[f]
737 archive = utils.where_am_i()
740 if entry.has_key("byhand"):
743 # Check we have fields we need to do these checks
745 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
746 if not entry.has_key(m):
747 self.rejects.append("file '%s' does not have field %s set" % (f, m))
753 # Handle component mappings
754 for m in cnf.ValueList("ComponentMappings"):
755 (source, dest) = m.split()
756 if entry["component"] == source:
757 entry["original component"] = source
758 entry["component"] = dest
760 # Ensure the component is valid for the target suite
761 if cnf.has_key("Suite:%s::Components" % (suite)) and \
762 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
763 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
766 # Validate the component
767 if not get_component(entry["component"], session):
768 self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
771 # See if the package is NEW
772 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
775 # Validate the priority
776 if entry["priority"].find('/') != -1:
777 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
779 # Determine the location
780 location = cnf["Dir::Pool"]
781 l = get_location(location, entry["component"], archive, session)
783 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
784 entry["location id"] = -1
786 entry["location id"] = l.location_id
788 # Check the md5sum & size against existing files (if any)
789 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
791 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
792 entry["size"], entry["md5sum"], entry["location id"])
795 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
796 elif found is False and poolfile is not None:
797 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
800 entry["files id"] = None
802 entry["files id"] = poolfile.file_id
804 # Check for packages that have moved from one component to another
805 entry['suite'] = suite
806 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
808 entry["othercomponents"] = res.fetchone()[0]
810 def check_files(self, action=True):
811 archive = utils.where_am_i()
812 file_keys = self.pkg.files.keys()
816 # XXX: As far as I can tell, this can no longer happen - see
817 # comments by AJ in old revisions - mhy
818 # if reprocess is 2 we've already done this and we're checking
819 # things again for the new .orig.tar.gz.
820 # [Yes, I'm fully aware of how disgusting this is]
821 if action and self.reprocess < 2:
823 os.chdir(self.pkg.directory)
825 ret = holding.copy_to_holding(f)
827 # XXX: Should we bail out here or try and continue?
828 self.rejects.append(ret)
832 # Check there isn't already a .changes or .dak file of the same name in
833 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
834 # [NB: this check must be done post-suite mapping]
835 base_filename = os.path.basename(self.pkg.changes_file)
836 dot_dak_filename = base_filename[:-8] + ".dak"
838 for suite in self.pkg.changes["distribution"].keys():
839 copychanges = "Suite::%s::CopyChanges" % (suite)
840 if cnf.has_key(copychanges) and \
841 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
842 self.rejects.append("%s: a file with this name already exists in %s" \
843 % (base_filename, cnf[copychanges]))
845 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
846 if cnf.has_key(copy_dot_dak) and \
847 os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
848 self.rejects.append("%s: a file with this name already exists in %s" \
849 % (dot_dak_filename, Cnf[copy_dot_dak]))
855 session = DBConn().session()
857 for f, entry in self.pkg.files.items():
858 # Ensure the file does not already exist in one of the accepted directories
859 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
860 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
861 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
862 self.rejects.append("%s file already exists in the %s directory." % (f, d))
864 if not re_taint_free.match(f):
865 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
867 # Check the file is readable
868 if os.access(f, os.R_OK) == 0:
869 # When running in -n, copy_to_holding() won't have
870 # generated the reject_message, so we need to.
872 if os.path.exists(f):
873 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
875 self.rejects.append("Can't read `%s'. [file not found]" % (f))
876 entry["type"] = "unreadable"
879 # If it's byhand skip remaining checks
880 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
882 entry["type"] = "byhand"
884 # Checks for a binary package...
885 elif re_isadeb.match(f):
887 entry["type"] = "deb"
889 # This routine appends to self.rejects/warnings as appropriate
890 self.binary_file_checks(f, session)
892 # Checks for a source package...
893 elif re_issource.match(f):
896 # This routine appends to self.rejects/warnings as appropriate
897 self.source_file_checks(f, session)
899 # Not a binary or source package? Assume byhand...
902 entry["type"] = "byhand"
904 # Per-suite file checks
905 entry["oldfiles"] = {}
906 for suite in self.pkg.changes["distribution"].keys():
907 self.per_suite_file_checks(f, suite, session)
911 # If the .changes file says it has source, it must have source.
912 if self.pkg.changes["architecture"].has_key("source"):
914 self.rejects.append("no source found and Architecture line in changes mention source.")
916 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
917 self.rejects.append("source only uploads are not supported.")
919 ###########################################################################
920 def check_dsc(self, action=True):
921 """Returns bool indicating whether or not the source changes are valid"""
922 # Ensure there is source to check
923 if not self.pkg.changes["architecture"].has_key("source"):
928 for f, entry in self.pkg.files.items():
929 if entry["type"] == "dsc":
931 self.rejects.append("can not process a .changes file with multiple .dsc's.")
936 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
938 self.rejects.append("source uploads must contain a dsc file")
941 # Parse the .dsc file
943 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
944 except CantOpenError:
945 # if not -n copy_to_holding() will have done this for us...
947 self.rejects.append("%s: can't read file." % (dsc_filename))
948 except ParseChangesError, line:
949 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
950 except InvalidDscError, line:
951 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
952 except ChangesUnicodeError:
953 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
955 # Build up the file list of files mentioned by the .dsc
957 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
958 except NoFilesFieldError:
959 self.rejects.append("%s: no Files: field." % (dsc_filename))
961 except UnknownFormatError, format:
962 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
964 except ParseChangesError, line:
965 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
968 # Enforce mandatory fields
969 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
970 if not self.pkg.dsc.has_key(i):
971 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
974 # Validate the source and version fields
975 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
976 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
977 if not re_valid_version.match(self.pkg.dsc["version"]):
978 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
980 # Bumping the version number of the .dsc breaks extraction by stable's
981 # dpkg-source. So let's not do that...
982 if self.pkg.dsc["format"] != "1.0":
983 self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
985 # Validate the Maintainer field
987 # We ignore the return value
988 fix_maintainer(self.pkg.dsc["maintainer"])
989 except ParseMaintError, msg:
990 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
991 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
993 # Validate the build-depends field(s)
994 for field_name in [ "build-depends", "build-depends-indep" ]:
995 field = self.pkg.dsc.get(field_name)
997 # Check for broken dpkg-dev lossage...
998 if field.startswith("ARRAY"):
999 self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \
1000 (dsc_filename, field_name.title()))
1002 # Have apt try to parse them...
1004 apt_pkg.ParseSrcDepends(field)
1006 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1008 # Ensure the version number in the .dsc matches the version number in the .changes
1009 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1010 changes_version = self.pkg.files[dsc_filename]["version"]
1012 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1013 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1015 # Ensure there is a .tar.gz in the .dsc file
1017 for f in self.pkg.dsc_files.keys():
1018 m = re_issource.match(f)
1020 self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
1023 if ftype == "orig.tar.gz" or ftype == "tar.gz":
1027 self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
1029 # Ensure source is newer than existing source in target suites
1030 session = DBConn().session()
1031 self.check_source_against_db(dsc_filename, session)
1032 self.check_dsc_against_db(dsc_filename, session)
1037 ###########################################################################
1039 def get_changelog_versions(self, source_dir):
1040 """Extracts a the source package and (optionally) grabs the
1041 version history out of debian/changelog for the BTS."""
1045 # Find the .dsc (again)
1047 for f in self.pkg.files.keys():
1048 if self.pkg.files[f]["type"] == "dsc":
1051 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1052 if not dsc_filename:
1055 # Create a symlink mirror of the source files in our temporary directory
1056 for f in self.pkg.files.keys():
1057 m = re_issource.match(f)
1059 src = os.path.join(source_dir, f)
1060 # If a file is missing for whatever reason, give up.
1061 if not os.path.exists(src):
1064 if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
1066 dest = os.path.join(os.getcwd(), f)
1067 os.symlink(src, dest)
1069 # If the orig.tar.gz is not a part of the upload, create a symlink to the
1071 if self.pkg.orig_tar_gz:
1072 dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
1073 os.symlink(self.pkg.orig_tar_gz, dest)
1075 # Extract the source
1076 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1077 (result, output) = commands.getstatusoutput(cmd)
1079 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1080 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
1083 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1086 # Get the upstream version
1087 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1088 if re_strip_revision.search(upstr_version):
1089 upstr_version = re_strip_revision.sub('', upstr_version)
1091 # Ensure the changelog file exists
1092 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1093 if not os.path.exists(changelog_filename):
1094 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1097 # Parse the changelog
1098 self.pkg.dsc["bts changelog"] = ""
1099 changelog_file = utils.open_file(changelog_filename)
1100 for line in changelog_file.readlines():
1101 m = re_changelog_versions.match(line)
1103 self.pkg.dsc["bts changelog"] += line
1104 changelog_file.close()
1106 # Check we found at least one revision in the changelog
1107 if not self.pkg.dsc["bts changelog"]:
1108 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1110 def check_source(self):
1111 # XXX: I'm fairly sure reprocess == 2 can never happen
1112 # AJT disabled the is_incoming check years ago - mhy
1113 # We should probably scrap or rethink the whole reprocess thing
1115 # a) there's no source
1116 # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
1117 # or c) the orig.tar.gz is MIA
1118 if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1119 or self.pkg.orig_tar_gz == -1:
1122 tmpdir = utils.temp_dirname()
1124 # Move into the temporary directory
1128 # Get the changelog version history
1129 self.get_changelog_versions(cwd)
1131 # Move back and cleanup the temporary tree
1135 shutil.rmtree(tmpdir)
1137 if e.errno != errno.EACCES:
1139 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1141 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1142 # We probably have u-r or u-w directories so chmod everything
1144 cmd = "chmod -R u+rwx %s" % (tmpdir)
1145 result = os.system(cmd)
1147 utils.fubar("'%s' failed with result %s." % (cmd, result))
1148 shutil.rmtree(tmpdir)
1149 except Exception, e:
1150 print "foobar2 (%s)" % e
1151 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1153 ###########################################################################
1154 def ensure_hashes(self):
1155 # Make sure we recognise the format of the Files: field in the .changes
1156 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1157 if len(format) == 2:
1158 format = int(format[0]), int(format[1])
1160 format = int(float(format[0])), 0
1162 # We need to deal with the original changes blob, as the fields we need
1163 # might not be in the changes dict serialised into the .dak anymore.
1164 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1166 # Copy the checksums over to the current changes dict. This will keep
1167 # the existing modifications to it intact.
1168 for field in orig_changes:
1169 if field.startswith('checksums-'):
1170 self.pkg.changes[field] = orig_changes[field]
1172 # Check for unsupported hashes
1173 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1174 self.rejects.append(j)
1176 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1177 self.rejects.append(j)
1179 # We have to calculate the hash if we have an earlier changes version than
1180 # the hash appears in rather than require it exist in the changes file
1181 for hashname, hashfunc, version in utils.known_hashes:
1182 # TODO: Move _ensure_changes_hash into this class
1183 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1184 self.rejects.append(j)
1185 if "source" in self.pkg.changes["architecture"]:
1186 # TODO: Move _ensure_dsc_hash into this class
1187 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1188 self.rejects.append(j)
1190 def check_hashes(self):
1191 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1192 self.rejects.append(m)
1194 for m in utils.check_size(".changes", self.pkg.files):
1195 self.rejects.append(m)
1197 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1198 self.rejects.append(m)
1200 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1201 self.rejects.append(m)
1203 self.ensure_hashes()
1205 ###########################################################################
1206 def check_urgency(self):
1208 if self.pkg.changes["architecture"].has_key("source"):
1209 if not self.pkg.changes.has_key("urgency"):
1210 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1211 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1212 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1213 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1214 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1215 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1217 ###########################################################################
1219 # Sanity check the time stamps of files inside debs.
1220 # [Files in the near future cause ugly warnings and extreme time
1221 # travel can cause errors on extraction]
1223 def check_timestamps(self):
1226 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1227 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1228 tar = TarTime(future_cutoff, past_cutoff)
1230 for filename, entry in self.pkg.files.items():
1231 if entry["type"] == "deb":
1234 deb_file = utils.open_file(filename)
1235 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1238 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1239 except SystemError, e:
1240 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1241 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1244 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1248 future_files = tar.future_files.keys()
1250 num_future_files = len(future_files)
1251 future_file = future_files[0]
1252 future_date = tar.future_files[future_file]
1253 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1254 % (filename, num_future_files, future_file, time.ctime(future_date)))
1256 ancient_files = tar.ancient_files.keys()
1258 num_ancient_files = len(ancient_files)
1259 ancient_file = ancient_files[0]
1260 ancient_date = tar.ancient_files[ancient_file]
1261 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1262 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1264 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1266 ###########################################################################
1267 def check_signed_by_key(self):
1268 """Ensure the .changes is signed by an authorized uploader."""
1269 session = DBConn().session()
1271 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1273 # match claimed name with actual name:
1275 # This is fundamentally broken but need us to refactor how we get
1276 # the UIDs/Fingerprints in order for us to fix it properly
1277 uid, uid_email = self.pkg.changes["fingerprint"], uid
1278 may_nmu, may_sponsor = 1, 1
1279 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1280 # and can't get one in there if we don't allow nmu/sponsorship
1281 elif is_dm is False:
1282 # If is_dm is False, we allow full upload rights
1283 uid_email = "%s@debian.org" % (uid)
1284 may_nmu, may_sponsor = 1, 1
1286 # Assume limited upload rights unless we've discovered otherwise
1288 may_nmu, may_sponsor = 0, 0
1290 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1292 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1294 if uid_name == "": sponsored = 1
1297 if ("source" in self.pkg.changes["architecture"] and
1298 uid_email and utils.is_email_alias(uid_email)):
1299 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1300 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1301 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1302 self.pkg.changes["sponsoremail"] = uid_email
1304 if sponsored and not may_sponsor:
1305 self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1307 if not sponsored and not may_nmu:
1308 should_reject = True
1309 highest_sid, highest_version = None, None
1311 # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1312 # It ignores higher versions with the dm_upload_allowed flag set to false
1313 # I'm keeping the existing behaviour for now until I've gone back and
1314 # checked exactly what the GR says - mhy
1315 for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1316 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1317 highest_sid = si.source_id
1318 highest_version = si.version
1320 if highest_sid is None:
1321 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1323 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1324 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1325 if email == uid_email or name == uid_name:
1326 should_reject = False
1329 if should_reject is True:
1330 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1332 for b in self.pkg.changes["binary"].keys():
1333 for suite in self.pkg.changes["distribution"].keys():
1334 q = session.query(DBSource)
1335 q = q.join(DBBinary).filter_by(package=b)
1336 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1339 if s.source != self.pkg.changes["source"]:
1340 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1342 for f in self.pkg.files.keys():
1343 if self.pkg.files[f].has_key("byhand"):
1344 self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1345 if self.pkg.files[f].has_key("new"):
1346 self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1350 ###########################################################################
1351 def build_summaries(self):
1352 """ Build a summary of changes the upload introduces. """
1354 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1356 short_summary = summary
1358 # This is for direport's benefit...
1359 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1362 summary += "Changes: " + f
1364 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1366 summary += self.announce(short_summary, 0)
1368 return (summary, short_summary)
1370 ###########################################################################
1372 def close_bugs(self, summary, action):
1374 Send mail to close bugs as instructed by the closes field in the changes file.
1375 Also add a line to summary if any work was done.
1377 @type summary: string
1378 @param summary: summary text, as given by L{build_summaries}
1381 @param action: Set to false no real action will be done.
1384 @return: summary. If action was taken, extended by the list of closed bugs.
1388 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1390 bugs = self.pkg.changes["closes"].keys()
1396 summary += "Closing bugs: "
1398 summary += "%s " % (bug)
1400 self.Subst["__BUG_NUMBER__"] = bug
1401 if self.pkg.changes["distribution"].has_key("stable"):
1402 self.Subst["__STABLE_WARNING__"] = """
1403 Note that this package is not part of the released stable Debian
1404 distribution. It may have dependencies on other unreleased software,
1405 or other instabilities. Please take care if you wish to install it.
1406 The update will eventually make its way into the next released Debian
1409 self.Subst["__STABLE_WARNING__"] = ""
1410 mail_message = utils.TemplateSubst(self.Subst, template)
1411 utils.send_mail(mail_message)
1413 # Clear up after ourselves
1414 del self.Subst["__BUG_NUMBER__"]
1415 del self.Subst["__STABLE_WARNING__"]
1417 if action and self.logger:
1418 self.logger.log(["closing bugs"] + bugs)
1424 ###########################################################################
1426 def announce(self, short_summary, action):
1428 Send an announce mail about a new upload.
1430 @type short_summary: string
1431 @param short_summary: Short summary text to include in the mail
1434 @param action: Set to false no real action will be done.
1437 @return: Textstring about action taken.
1442 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1444 # Only do announcements for source uploads with a recent dpkg-dev installed
1445 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1446 self.pkg.changes["architecture"].has_key("source"):
1452 self.Subst["__SHORT_SUMMARY__"] = short_summary
1454 for dist in self.pkg.changes["distribution"].keys():
1455 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1456 if announce_list == "" or lists_done.has_key(announce_list):
1459 lists_done[announce_list] = 1
1460 summary += "Announcing to %s\n" % (announce_list)
1463 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1464 if cnf.get("Dinstall::TrackingServer") and \
1465 self.pkg.changes["architecture"].has_key("source"):
1466 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1467 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1469 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1470 utils.send_mail(mail_message)
1472 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1474 if cnf.FindB("Dinstall::CloseBugs"):
1475 summary = self.close_bugs(summary, action)
1477 del self.Subst["__SHORT_SUMMARY__"]
1481 ###########################################################################
1483 def accept (self, summary, short_summary, targetdir=None):
1487 This moves all files referenced from the .changes into the I{accepted}
1488 queue, sends the accepted mail, announces to lists, closes bugs and
1489 also checks for override disparities. If enabled it will write out
1490 the version history for the BTS Version Tracking and will finally call
1493 @type summary: string
1494 @param summary: Summary text
1496 @type short_summary: string
1497 @param short_summary: Short summary
1502 stats = SummaryStats()
1504 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1506 if targetdir is None:
1507 targetdir = cnf["Dir::Queue::Accepted"]
1511 self.logger.log(["Accepting changes", self.pkg.changes_file])
1513 self.pkg.write_dot_dak(targetdir)
1515 # Move all the files into the accepted directory
1516 utils.move(self.pkg.changes_file, targetdir)
1518 for name, entry in sorted(self.pkg.files.items()):
1519 utils.move(name, targetdir)
1520 stats.accept_bytes += float(entry["size"])
1522 stats.accept_count += 1
1524 # Send accept mail, announce to lists, close bugs and check for
1525 # override disparities
1526 if not cnf["Dinstall::Options::No-Mail"]:
1527 self.Subst["__SUITE__"] = ""
1528 self.Subst["__SUMMARY__"] = summary
1529 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1530 utils.send_mail(mail_message)
1531 self.announce(short_summary, 1)
1533 ## Helper stuff for DebBugs Version Tracking
1534 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1535 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1536 # the conditionalization on dsc["bts changelog"] should be
1539 # Write out the version history from the changelog
1540 if self.pkg.changes["architecture"].has_key("source") and \
1541 self.pkg.dsc.has_key("bts changelog"):
1543 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1544 version_history = os.fdopen(fd, 'w')
1545 version_history.write(self.pkg.dsc["bts changelog"])
1546 version_history.close()
1547 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1548 self.pkg.changes_file[:-8]+".versions")
1549 os.rename(temp_filename, filename)
1550 os.chmod(filename, 0644)
1552 # Write out the binary -> source mapping.
1553 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1554 debinfo = os.fdopen(fd, 'w')
1555 for name, entry in sorted(self.pkg.files.items()):
1556 if entry["type"] == "deb":
1557 line = " ".join([entry["package"], entry["version"],
1558 entry["architecture"], entry["source package"],
1559 entry["source version"]])
1560 debinfo.write(line+"\n")
1562 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1563 self.pkg.changes_file[:-8]+".debinfo")
1564 os.rename(temp_filename, filename)
1565 os.chmod(filename, 0644)
1567 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1568 # <Ganneff> we do call queue_build too
1569 # <mhy> well yes, we'd have had to if we were inserting into accepted
1570 # <Ganneff> now. thats database only.
1571 # <mhy> urgh, that's going to get messy
1572 # <Ganneff> so i make the p-n call to it *also* using accepted/
1573 # <mhy> but then the packages will be in the queue_build table without the files being there
1574 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1575 # <mhy> ah, good point
1576 # <Ganneff> so it will work out, as unchecked move it over
1577 # <mhy> that's all completely sick
1580 # This routine returns None on success or an error on failure
1581 res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1586 def check_override(self):
1588 Checks override entries for validity. Mails "Override disparity" warnings,
1589 if that feature is enabled.
1591 Abandons the check if
1592 - override disparity checks are disabled
1593 - mail sending is disabled
1598 # Abandon the check if:
1599 # a) override disparity checks have been disabled
1600 # b) we're not sending mail
1601 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1602 cnf["Dinstall::Options::No-Mail"]:
1605 summary = self.pkg.check_override()
1610 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1612 self.Subst["__SUMMARY__"] = summary
1613 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1614 utils.send_mail(mail_message)
1615 del self.Subst["__SUMMARY__"]
1617 ###########################################################################
1619 def remove(self, dir=None):
1621 Used (for instance) in p-u to remove the package from unchecked
1624 os.chdir(self.pkg.directory)
1628 for f in self.pkg.files.keys():
1630 os.unlink(self.pkg.changes_file)
1632 ###########################################################################
1634 def move_to_dir (self, dest, perms=0660, changesperms=0664):
1636 Move files to dest with certain perms/changesperms
1638 utils.move(self.pkg.changes_file, dest, perms=changesperms)
1639 for f in self.pkg.files.keys():
1640 utils.move(f, dest, perms=perms)
1642 ###########################################################################
1644 def force_reject(self, reject_files):
1646 Forcefully move files from the current directory to the
1647 reject directory. If any file already exists in the reject
1648 directory it will be moved to the morgue to make way for
1652 @param files: file dictionary
1658 for file_entry in reject_files:
1659 # Skip any files which don't exist or which we don't have permission to copy.
1660 if os.access(file_entry, os.R_OK) == 0:
1663 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1666 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1668 # File exists? Let's try and move it to the morgue
1669 if e.errno == errno.EEXIST:
1670 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1672 morgue_file = utils.find_next_free(morgue_file)
1673 except NoFreeFilenameError:
1674 # Something's either gone badly Pete Tong, or
1675 # someone is trying to exploit us.
1676 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1678 utils.move(dest_file, morgue_file, perms=0660)
1680 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1683 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1687 # If we got here, we own the destination file, so we can
1688 # safely overwrite it.
1689 utils.move(file_entry, dest_file, 1, perms=0660)
1692 ###########################################################################
1693 def do_reject (self, manual=0, reject_message="", note=""):
1695 Reject an upload. If called without a reject message or C{manual} is
1696 true, spawn an editor so the user can write one.
1699 @param manual: manual or automated rejection
1701 @type reject_message: string
1702 @param reject_message: A reject message
1707 # If we weren't given a manual rejection message, spawn an
1708 # editor so the user can add one in...
1709 if manual and not reject_message:
1710 (fd, temp_filename) = utils.temp_filename()
1711 temp_file = os.fdopen(fd, 'w')
1714 temp_file.write(line)
1716 editor = os.environ.get("EDITOR","vi")
1718 while answer == 'E':
1719 os.system("%s %s" % (editor, temp_filename))
1720 temp_fh = utils.open_file(temp_filename)
1721 reject_message = "".join(temp_fh.readlines())
1723 print "Reject message:"
1724 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
1725 prompt = "[R]eject, Edit, Abandon, Quit ?"
1727 while prompt.find(answer) == -1:
1728 answer = utils.our_raw_input(prompt)
1729 m = re_default_answer.search(prompt)
1732 answer = answer[:1].upper()
1733 os.unlink(temp_filename)
1739 print "Rejecting.\n"
1743 reason_filename = self.pkg.changes_file[:-8] + ".reason"
1744 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
1746 # Move all the files into the reject directory
1747 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
1748 self.force_reject(reject_files)
1750 # If we fail here someone is probably trying to exploit the race
1751 # so let's just raise an exception ...
1752 if os.path.exists(reason_filename):
1753 os.unlink(reason_filename)
1754 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1756 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
1759 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
1760 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
1761 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
1762 os.write(reason_fd, reject_message)
1763 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1765 # Build up the rejection email
1766 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
1767 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
1768 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
1769 self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
1770 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1771 # Write the rejection email out as the <foo>.reason file
1772 os.write(reason_fd, reject_mail_message)
1774 del self.Subst["__REJECTOR_ADDRESS__"]
1775 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
1776 del self.Subst["__CC__"]
1780 # Send the rejection mail if appropriate
1781 if not cnf["Dinstall::Options::No-Mail"]:
1782 utils.send_mail(reject_mail_message)
1785 self.logger.log(["rejected", self.pkg.changes_file])
1789 ################################################################################
1790 def in_override_p(self, package, component, suite, binary_type, file, session):
1792 Check if a package already has override entries in the DB
1794 @type package: string
1795 @param package: package name
1797 @type component: string
1798 @param component: database id of the component
1801 @param suite: database id of the suite
1803 @type binary_type: string
1804 @param binary_type: type of the package
1807 @param file: filename we check
1809 @return: the database result. But noone cares anyway.
1815 if binary_type == "": # must be source
1818 file_type = binary_type
1820 # Override suite name; used for example with proposed-updates
1821 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
1822 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
1824 result = get_override(package, suite, component, file_type, session)
1826 # If checking for a source package fall back on the binary override type
1827 if file_type == "dsc" and len(result) < 1:
1828 result = get_override(package, suite, component, ['deb', 'udeb'], session)
1830 # Remember the section and priority so we can check them later if appropriate
1833 self.pkg.files[file]["override section"] = result.section.section
1834 self.pkg.files[file]["override priority"] = result.priority.priority
1839 ################################################################################
1840 def get_anyversion(self, sv_list, suite):
1843 @param sv_list: list of (suite, version) tuples to check
1846 @param suite: suite name
1852 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
1853 for (s, v) in sv_list:
1854 if s in [ x.lower() for x in anysuite ]:
1855 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
1860 ################################################################################
1862 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
1865 @param sv_list: list of (suite, version) tuples to check
1870 @type new_version: string
1871 @param new_version: XXX
1873 Ensure versions are newer than existing packages in target
1874 suites and that cross-suite version checking rules as
1875 set out in the conf file are satisfied.
1880 # Check versions for each target suite
1881 for target_suite in self.pkg.changes["distribution"].keys():
1882 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
1883 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
1885 # Enforce "must be newer than target suite" even if conffile omits it
1886 if target_suite not in must_be_newer_than:
1887 must_be_newer_than.append(target_suite)
1889 for (suite, existent_version) in sv_list:
1890 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
1892 if suite in must_be_newer_than and sourceful and vercmp < 1:
1893 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1895 if suite in must_be_older_than and vercmp > -1:
1898 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
1899 # we really use the other suite, ignoring the conflicting one ...
1900 addsuite = self.pkg.changes["distribution-version"][suite]
1902 add_version = self.get_anyversion(sv_list, addsuite)
1903 target_version = self.get_anyversion(sv_list, target_suite)
1906 # not add_version can only happen if we map to a suite
1907 # that doesn't enhance the suite we're propup'ing from.
1908 # so "propup-ver x a b c; map a d" is a problem only if
1909 # d doesn't enhance a.
1911 # i think we could always propagate in this case, rather
1912 # than complaining. either way, this isn't a REJECT issue
1914 # And - we really should complain to the dorks who configured dak
1915 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
1916 self.pkg.changes.setdefault("propdistribution", {})
1917 self.pkg.changes["propdistribution"][addsuite] = 1
1919 elif not target_version:
1920 # not targets_version is true when the package is NEW
1921 # we could just stick with the "...old version..." REJECT
1922 # for this, I think.
1923 self.rejects.append("Won't propogate NEW packages.")
1924 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
1925 # propogation would be redundant. no need to reject though.
1926 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1928 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
1929 apt_pkg.VersionCompare(add_version, target_version) >= 0:
1931 self.warnings.append("Propogating upload to %s" % (addsuite))
1932 self.pkg.changes.setdefault("propdistribution", {})
1933 self.pkg.changes["propdistribution"][addsuite] = 1
1937 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1939 ################################################################################
1940 def check_binary_against_db(self, file, session):
1941 # Ensure version is sane
1942 q = session.query(BinAssociation)
1943 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
1944 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
1946 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
1947 file, self.pkg.files[file]["version"], sourceful=False)
1949 # Check for any existing copies of the file
1950 q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
1951 q = q.filter_by(version=self.pkg.files[file]["version"])
1952 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
1955 self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
1957 ################################################################################
1959 def check_source_against_db(self, file, session):
1962 source = self.pkg.dsc.get("source")
1963 version = self.pkg.dsc.get("version")
1965 # Ensure version is sane
1966 q = session.query(SrcAssociation)
1967 q = q.join(DBSource).filter(DBSource.source==source)
1969 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
1970 file, version, sourceful=True)
1972 ################################################################################
1973 def check_dsc_against_db(self, file, session):
1976 @warning: NB: this function can remove entries from the 'files' index [if
1977 the .orig.tar.gz is a duplicate of the one in the archive]; if
1978 you're iterating over 'files' and call this function as part of
1979 the loop, be sure to add a check to the top of the loop to
1980 ensure you haven't just tried to dereference the deleted entry.
1985 self.pkg.orig_tar_gz = None
1987 # Try and find all files mentioned in the .dsc. This has
1988 # to work harder to cope with the multiple possible
1989 # locations of an .orig.tar.gz.
1990 # The ordering on the select is needed to pick the newest orig
1991 # when it exists in multiple places.
1992 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
1994 if self.pkg.files.has_key(dsc_name):
1995 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
1996 actual_size = int(self.pkg.files[dsc_name]["size"])
1997 found = "%s in incoming" % (dsc_name)
1999 # Check the file does not already exist in the archive
2000 ql = get_poolfile_like_name(dsc_name, session)
2002 # Strip out anything that isn't '%s' or '/%s$'
2004 if not i.filename.endswith(dsc_name):
2007 # "[dak] has not broken them. [dak] has fixed a
2008 # brokenness. Your crappy hack exploited a bug in
2011 # "(Come on! I thought it was always obvious that
2012 # one just doesn't release different files with
2013 # the same name and version.)"
2014 # -- ajk@ on d-devel@l.d.o
2017 # Ignore exact matches for .orig.tar.gz
2019 if dsc_name.endswith(".orig.tar.gz"):
2021 if self.pkg.files.has_key(dsc_name) and \
2022 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2023 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2024 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2025 # TODO: Don't delete the entry, just mark it as not needed
2026 # This would fix the stupidity of changing something we often iterate over
2027 # whilst we're doing it
2028 del self.pkg.files[dsc_name]
2029 self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
2033 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2035 elif dsc_name.endswith(".orig.tar.gz"):
2037 ql = get_poolfile_like_name(dsc_name, session)
2039 # Strip out anything that isn't '%s' or '/%s$'
2040 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2042 if not i.filename.endswith(dsc_name):
2046 # Unfortunately, we may get more than one match here if,
2047 # for example, the package was in potato but had an -sa
2048 # upload in woody. So we need to choose the right one.
2050 # default to something sane in case we don't match any or have only one
2055 old_file = os.path.join(i.location.path, i.filename)
2056 old_file_fh = utils.open_file(old_file)
2057 actual_md5 = apt_pkg.md5sum(old_file_fh)
2059 actual_size = os.stat(old_file)[stat.ST_SIZE]
2060 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2063 old_file = os.path.join(i.location.path, i.filename)
2064 old_file_fh = utils.open_file(old_file)
2065 actual_md5 = apt_pkg.md5sum(old_file_fh)
2067 actual_size = os.stat(old_file)[stat.ST_SIZE]
2069 suite_type = x.location.archive_type
2070 # need this for updating dsc_files in install()
2071 dsc_entry["files id"] = x.file_id
2072 # See install() in process-accepted...
2073 self.pkg.orig_tar_id = x.file_id
2074 self.pkg.orig_tar_gz = old_file
2075 self.pkg.orig_tar_location = x.location.location_id
2077 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2078 # Not there? Check the queue directories...
2079 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2080 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2082 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2083 if os.path.exists(in_otherdir):
2084 in_otherdir_fh = utils.open_file(in_otherdir)
2085 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2086 in_otherdir_fh.close()
2087 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2089 self.pkg.orig_tar_gz = in_otherdir
2092 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2093 self.pkg.orig_tar_gz = -1
2096 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2098 if actual_md5 != dsc_entry["md5sum"]:
2099 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2100 if actual_size != int(dsc_entry["size"]):
2101 self.rejects.append("size for %s doesn't match %s." % (found, file))
2103 ################################################################################
2104 def accepted_checks(self, overwrite_checks, session):
2105 # Recheck anything that relies on the database; since that's not
2106 # frozen between accept and our run time when called from p-a.
2108 # overwrite_checks is set to False when installing to stable/oldstable
2113 # Find the .dsc (again)
2115 for f in self.pkg.files.keys():
2116 if self.pkg.files[f]["type"] == "dsc":
2119 for checkfile in self.pkg.files.keys():
2120 # The .orig.tar.gz can disappear out from under us is it's a
2121 # duplicate of one in the archive.
2122 if not self.pkg.files.has_key(checkfile):
2125 entry = self.pkg.files[checkfile]
2127 # Check that the source still exists
2128 if entry["type"] == "deb":
2129 source_version = entry["source version"]
2130 source_package = entry["source package"]
2131 if not self.pkg.changes["architecture"].has_key("source") \
2132 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2133 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2135 # Version and file overwrite checks
2136 if overwrite_checks:
2137 if entry["type"] == "deb":
2138 self.check_binary_against_db(checkfile, session)
2139 elif entry["type"] == "dsc":
2140 self.check_source_against_db(checkfile, session)
2141 self.check_dsc_against_db(dsc_filename, session)
2143 # propogate in the case it is in the override tables:
2144 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2145 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2146 propogate[suite] = 1
2148 nopropogate[suite] = 1
2150 for suite in propogate.keys():
2151 if suite in nopropogate:
2153 self.pkg.changes["distribution"][suite] = 1
2155 for checkfile in self.pkg.files.keys():
2156 # Check the package is still in the override tables
2157 for suite in self.pkg.changes["distribution"].keys():
2158 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2159 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2161 ################################################################################
2162 # This is not really a reject, but an unaccept, but since a) the code for
2163 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2164 # extremely rare, for now we'll go with whining at our admin folks...
2166 def do_unaccept(self):
2169 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2170 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2171 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2172 self.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
2173 if cnf.has_key("Dinstall::Bcc"):
2174 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2176 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2178 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2180 # Write the rejection email out as the <foo>.reason file
2181 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2182 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2184 # If we fail here someone is probably trying to exploit the race
2185 # so let's just raise an exception ...
2186 if os.path.exists(reject_filename):
2187 os.unlink(reject_filename)
2189 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2190 os.write(fd, reject_mail_message)
2193 utils.send_mail(reject_mail_message)
2195 del self.Subst["__REJECTOR_ADDRESS__"]
2196 del self.Subst["__REJECT_MESSAGE__"]
2197 del self.Subst["__CC__"]
2199 ################################################################################
2200 # If any file of an upload has a recent mtime then chances are good
2201 # the file is still being uploaded.
2203 def upload_too_new(self):
2206 # Move back to the original directory to get accurate time stamps
2208 os.chdir(self.pkg.directory)
2209 file_list = self.pkg.files.keys()
2210 file_list.extend(self.pkg.dsc_files.keys())
2211 file_list.append(self.pkg.changes_file)
2214 last_modified = time.time()-os.path.getmtime(f)
2215 if last_modified < int(cnf["Dinstall::SkipTime"]):