5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
43 from dak_exceptions import *
46 from config import Config
47 from holding import Holding
49 from summarystats import SummaryStats
50 from utils import parse_changes
51 from textutils import fix_maintainer
52 from binary import Binary
54 ###############################################################################
56 def get_type(f, session=None):
58 Get the file type of C{f}
61 @param f: file entry from Changes object
68 session = DBConn().session()
71 if f.has_key("dbtype"):
72 file_type = file["dbtype"]
73 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
76 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
78 # Validate the override type
79 type_id = get_override_type(file_type, session)
81 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
85 ################################################################################
87 # Determine what parts in a .changes are NEW
89 def determine_new(changes, files, warn=1):
91 Determine what parts in a C{changes} file are NEW.
93 @type changes: Upload.Pkg.changes dict
94 @param changes: Changes dictionary
96 @type files: Upload.Pkg.files dict
97 @param files: Files dictionary
100 @param warn: Warn if overrides are added for (old)stable
103 @return: dictionary of NEW components.
108 session = DBConn().session()
110 # Build up a list of potentially new things
111 for name, f in files.items():
112 # Skip byhand elements
113 if f["type"] == "byhand":
116 priority = f["priority"]
117 section = f["section"]
118 file_type = get_type(f)
119 component = f["component"]
121 if file_type == "dsc":
124 if not new.has_key(pkg):
126 new[pkg]["priority"] = priority
127 new[pkg]["section"] = section
128 new[pkg]["type"] = file_type
129 new[pkg]["component"] = component
130 new[pkg]["files"] = []
132 old_type = new[pkg]["type"]
133 if old_type != file_type:
134 # source gets trumped by deb or udeb
135 if old_type == "dsc":
136 new[pkg]["priority"] = priority
137 new[pkg]["section"] = section
138 new[pkg]["type"] = file_type
139 new[pkg]["component"] = component
141 new[pkg]["files"].append(name)
143 if f.has_key("othercomponents"):
144 new[pkg]["othercomponents"] = f["othercomponents"]
146 for suite in changes["suite"].keys():
147 for pkg in new.keys():
148 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
150 for file_entry in new[pkg]["files"]:
151 if files[file_entry].has_key("new"):
152 del files[file_entry]["new"]
156 for s in ['stable', 'oldstable']:
157 if changes["suite"].has_key(s):
158 print "WARNING: overrides will be added for %s!" % s
159 for pkg in new.keys():
160 if new[pkg].has_key("othercomponents"):
161 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
165 ################################################################################
167 def check_valid(new):
169 Check if section and priority for NEW packages exist in database.
170 Additionally does sanity checks:
171 - debian-installer packages have to be udeb (or source)
172 - non debian-installer packages can not be udeb
173 - source priority can only be assigned to dsc file types
176 @param new: Dict of new packages with their section, priority and type.
179 for pkg in new.keys():
180 section_name = new[pkg]["section"]
181 priority_name = new[pkg]["priority"]
182 file_type = new[pkg]["type"]
184 section = get_section(section_name)
186 new[pkg]["section id"] = -1
188 new[pkg]["section id"] = section.section_id
190 priority = get_priority(priority_name)
192 new[pkg]["priority id"] = -1
194 new[pkg]["priority id"] = priority.priority_id
197 di = section_name.find("debian-installer") != -1
199 # If d-i, we must be udeb and vice-versa
200 if (di and file_type not in ("udeb", "dsc")) or \
201 (not di and file_type == "udeb"):
202 new[pkg]["section id"] = -1
204 # If dsc we need to be source and vice-versa
205 if (priority == "source" and file_type != "dsc") or \
206 (priority != "source" and file_type == "dsc"):
207 new[pkg]["priority id"] = -1
209 ###############################################################################
211 def lookup_uid_from_fingerprint(fpr, session):
214 # This is a stupid default, but see the comments below
217 user = get_uid_from_fingerprint(fpr, session)
221 if user.name is None:
226 # Check the relevant fingerprint (which we have to have)
227 for f in user.fingerprint:
228 if f.fingerprint == fpr:
229 is_dm = f.keyring.debian_maintainer
232 return (uid, uid_name, is_dm)
234 ###############################################################################
236 # Used by Upload.check_timestamps
237 class TarTime(object):
238 def __init__(self, future_cutoff, past_cutoff):
240 self.future_cutoff = future_cutoff
241 self.past_cutoff = past_cutoff
244 self.future_files = {}
245 self.ancient_files = {}
247 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
248 if MTime > self.future_cutoff:
249 self.future_files[Name] = MTime
250 if MTime < self.past_cutoff:
251 self.ancient_files[Name] = MTime
253 ###############################################################################
255 class Upload(object):
257 Everything that has to do with an upload processed.
265 ###########################################################################
268 """ Reset a number of internal variables."""
270 # Initialize the substitution template map
273 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
274 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
275 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
276 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
284 def package_info(self):
287 if len(self.rejects) > 0:
288 msg += "Reject Reasons:\n"
289 msg += "\n".join(self.rejects)
291 if len(self.warnings) > 0:
293 msg += "\n".join(self.warnings)
295 if len(self.notes) > 0:
297 msg += "\n".join(self.notes)
301 ###########################################################################
302 def update_subst(self):
303 """ Set up the per-package template substitution mappings """
307 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
308 if not self.pkg.changes.has_key("architecture") or not \
309 isinstance(changes["architecture"], DictType):
310 self.pkg.changes["architecture"] = { "Unknown" : "" }
312 # and maintainer2047 may not exist.
313 if not self.pkg.changes.has_key("maintainer2047"):
314 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
316 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
317 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
318 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
320 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
321 if self.pkg.changes["architecture"].has_key("source") and \
322 self.pkg.changes["changedby822"] != "" and \
323 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
325 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
326 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], changes["maintainer2047"])
327 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
329 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
330 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
331 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
333 if "sponsoremail" in self.pkg.changes:
334 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
336 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
337 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
339 # Apply any global override of the Maintainer field
340 if cnf.get("Dinstall::OverrideMaintainer"):
341 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
342 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
344 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
345 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
346 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
348 ###########################################################################
349 def load_changes(self, filename):
352 @rvalue: whether the changes file was valid or not. We may want to
353 reject even if this is True (see what gets put in self.rejects).
354 This is simply to prevent us even trying things later which will
355 fail because we couldn't properly parse the file.
358 self.pkg.changes_file = filename
360 # Parse the .changes field into a dictionary
362 self.pkg.changes.update(parse_changes(filename))
363 except CantOpenError:
364 self.rejects.append("%s: can't read file." % (filename))
366 except ParseChangesError, line:
367 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
369 except ChangesUnicodeError:
370 self.rejects.append("%s: changes file not proper utf-8" % (filename))
373 # Parse the Files field from the .changes into another dictionary
375 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
376 except ParseChangesError, line:
377 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
379 except UnknownFormatError, format:
380 self.rejects.append("%s: unknown format '%s'." % (filename, format))
383 # Check for mandatory fields
384 for i in ("distribution", "source", "binary", "architecture",
385 "version", "maintainer", "files", "changes", "description"):
386 if not self.pkg.changes.has_key(i):
387 # Avoid undefined errors later
388 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
391 # Strip a source version in brackets from the source field
392 if re_strip_srcver.search(self.pkg.changes["source"]):
393 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
395 # Ensure the source field is a valid package name.
396 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
397 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
399 # Split multi-value fields into a lower-level dictionary
400 for i in ("architecture", "distribution", "binary", "closes"):
401 o = self.pkg.changes.get(i, "")
403 del self.pkg.changes[i]
405 self.pkg.changes[i] = {}
408 self.pkg.changes[i][j] = 1
410 # Fix the Maintainer: field to be RFC822/2047 compatible
412 (self.pkg.changes["maintainer822"],
413 self.pkg.changes["maintainer2047"],
414 self.pkg.changes["maintainername"],
415 self.pkg.changes["maintaineremail"]) = \
416 fix_maintainer (self.pkg.changes["maintainer"])
417 except ParseMaintError, msg:
418 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
419 % (filename, changes["maintainer"], msg))
421 # ...likewise for the Changed-By: field if it exists.
423 (self.pkg.changes["changedby822"],
424 self.pkg.changes["changedby2047"],
425 self.pkg.changes["changedbyname"],
426 self.pkg.changes["changedbyemail"]) = \
427 fix_maintainer (self.pkg.changes.get("changed-by", ""))
428 except ParseMaintError, msg:
429 self.pkg.changes["changedby822"] = ""
430 self.pkg.changes["changedby2047"] = ""
431 self.pkg.changes["changedbyname"] = ""
432 self.pkg.changes["changedbyemail"] = ""
434 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
435 % (filename, changes["changed-by"], msg))
437 # Ensure all the values in Closes: are numbers
438 if self.pkg.changes.has_key("closes"):
439 for i in self.pkg.changes["closes"].keys():
440 if re_isanum.match (i) == None:
441 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
443 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
444 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
445 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
447 # Check there isn't already a changes file of the same name in one
448 # of the queue directories.
449 base_filename = os.path.basename(filename)
450 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
451 if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
452 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
454 # Check the .changes is non-empty
455 if not self.pkg.files:
456 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
459 # Changes was syntactically valid even if we'll reject
462 ###########################################################################
464 def check_distributions(self):
465 "Check and map the Distribution field"
469 # Handle suite mappings
470 for m in Cnf.ValueList("SuiteMappings"):
473 if mtype == "map" or mtype == "silent-map":
474 (source, dest) = args[1:3]
475 if self.pkg.changes["distribution"].has_key(source):
476 del self.pkg.changes["distribution"][source]
477 self.pkg.changes["distribution"][dest] = 1
478 if mtype != "silent-map":
479 self.notes.append("Mapping %s to %s." % (source, dest))
480 if self.pkg.changes.has_key("distribution-version"):
481 if self.pkg.changes["distribution-version"].has_key(source):
482 self.pkg.changes["distribution-version"][source]=dest
483 elif mtype == "map-unreleased":
484 (source, dest) = args[1:3]
485 if self.pkg.changes["distribution"].has_key(source):
486 for arch in self.pkg.changes["architecture"].keys():
487 if arch not in [ arch_string for a in get_suite_architectures(source) ]:
488 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
489 del self.pkg.changes["distribution"][source]
490 self.pkg.changes["distribution"][dest] = 1
492 elif mtype == "ignore":
494 if self.pkg.changes["distribution"].has_key(suite):
495 del self.pkg.changes["distribution"][suite]
496 self.warnings.append("Ignoring %s as a target suite." % (suite))
497 elif mtype == "reject":
499 if self.pkg.changes["distribution"].has_key(suite):
500 self.rejects.append("Uploads to %s are not accepted." % (suite))
501 elif mtype == "propup-version":
502 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
504 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
505 if self.pkg.changes["distribution"].has_key(args[1]):
506 self.pkg.changes.setdefault("distribution-version", {})
507 for suite in args[2:]:
508 self.pkg.changes["distribution-version"][suite] = suite
510 # Ensure there is (still) a target distribution
511 if len(self.pkg.changes["distribution"].keys()) < 1:
512 self.rejects.append("No valid distribution remaining.")
514 # Ensure target distributions exist
515 for suite in self.pkg.changes["distribution"].keys():
516 if not Cnf.has_key("Suite::%s" % (suite)):
517 self.rejects.append("Unknown distribution `%s'." % (suite))
519 ###########################################################################
521 def binary_file_checks(self, f, session):
523 entry = self.pkg.files[f]
525 # Extract package control information
526 deb_file = utils.open_file(f)
528 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
530 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
532 # Can't continue, none of the checks on control would work.
535 # Check for mandantory "Description:"
538 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
540 self.rejects.append("%s: Missing Description in binary package" % (f))
545 # Check for mandatory fields
546 for field in [ "Package", "Architecture", "Version" ]:
547 if control.Find(field) == None:
549 self.rejects.append("%s: No %s field in control." % (f, field))
552 # Ensure the package name matches the one give in the .changes
553 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
554 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
556 # Validate the package field
557 package = control.Find("Package")
558 if not re_valid_pkg_name.match(package):
559 self.rejects.append("%s: invalid package name '%s'." % (f, package))
561 # Validate the version field
562 version = control.Find("Version")
563 if not re_valid_version.match(version):
564 self.rejects.append("%s: invalid version number '%s'." % (f, version))
566 # Ensure the architecture of the .deb is one we know about.
567 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
568 architecture = control.Find("Architecture")
569 upload_suite = self.pkg.changes["distribution"].keys()[0]
571 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
572 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
573 self.rejects.append("Unknown architecture '%s'." % (architecture))
575 # Ensure the architecture of the .deb is one of the ones
576 # listed in the .changes.
577 if not self.pkg.changes["architecture"].has_key(architecture):
578 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
580 # Sanity-check the Depends field
581 depends = control.Find("Depends")
583 self.rejects.append("%s: Depends field is empty." % (f))
585 # Sanity-check the Provides field
586 provides = control.Find("Provides")
588 provide = re_spacestrip.sub('', provides)
590 self.rejects.append("%s: Provides field is empty." % (f))
591 prov_list = provide.split(",")
592 for prov in prov_list:
593 if not re_valid_pkg_name.match(prov):
594 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
596 # Check the section & priority match those given in the .changes (non-fatal)
597 if control.Find("Section") and entry["section"] != "" \
598 and entry["section"] != control.Find("Section"):
599 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
600 (f, control.Find("Section", ""), entry["section"]))
601 if control.Find("Priority") and entry["priority"] != "" \
602 and entry["priority"] != control.Find("Priority"):
603 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
604 (f, control.Find("Priority", ""), entry["priority"]))
606 entry["package"] = package
607 entry["architecture"] = architecture
608 entry["version"] = version
609 entry["maintainer"] = control.Find("Maintainer", "")
611 if f.endswith(".udeb"):
612 self.pkg.files[f]["dbtype"] = "udeb"
613 elif f.endswith(".deb"):
614 self.pkg.files[f]["dbtype"] = "deb"
616 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
618 entry["source"] = control.Find("Source", entry["package"])
620 # Get the source version
621 source = entry["source"]
624 if source.find("(") != -1:
625 m = re_extract_src_version.match(source)
627 source_version = m.group(2)
629 if not source_version:
630 source_version = self.pkg.files[f]["version"]
632 entry["source package"] = source
633 entry["source version"] = source_version
635 # Ensure the filename matches the contents of the .deb
636 m = re_isadeb.match(f)
639 file_package = m.group(1)
640 if entry["package"] != file_package:
641 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
642 (f, file_package, entry["dbtype"], entry["package"]))
643 epochless_version = re_no_epoch.sub('', control.Find("Version"))
646 file_version = m.group(2)
647 if epochless_version != file_version:
648 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
649 (f, file_version, entry["dbtype"], epochless_version))
652 file_architecture = m.group(3)
653 if entry["architecture"] != file_architecture:
654 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
655 (f, file_architecture, entry["dbtype"], entry["architecture"]))
657 # Check for existent source
658 source_version = entry["source version"]
659 source_package = entry["source package"]
660 if self.pkg.changes["architecture"].has_key("source"):
661 if source_version != self.pkg.changes["version"]:
662 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
663 (source_version, f, self.pkg.changes["version"]))
665 # Check in the SQL database
666 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
667 # Check in one of the other directories
668 source_epochless_version = re_no_epoch.sub('', source_version)
669 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
670 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
672 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
675 dsc_file_exists = False
676 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
677 if cnf.has_key("Dir::Queue::%s" % (myq)):
678 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
679 dsc_file_exists = True
682 if not dsc_file_exists:
683 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
685 # Check the version and for file overwrites
686 self.check_binary_against_db(f, session)
690 if len(b.rejects) > 0:
692 self.rejects.append(j)
694 def source_file_checks(self, f, session):
695 entry = self.pkg.files[f]
697 m = re_issource.match(f)
701 entry["package"] = m.group(1)
702 entry["version"] = m.group(2)
703 entry["type"] = m.group(3)
705 # Ensure the source package name matches the Source filed in the .changes
706 if self.pkg.changes["source"] != entry["package"]:
707 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
709 # Ensure the source version matches the version in the .changes file
710 if entry["type"] == "orig.tar.gz":
711 changes_version = self.pkg.changes["chopversion2"]
713 changes_version = self.pkg.changes["chopversion"]
715 if changes_version != entry["version"]:
716 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
718 # Ensure the .changes lists source in the Architecture field
719 if not self.pkg.changes["architecture"].has_key("source"):
720 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
722 # Check the signature of a .dsc file
723 if entry["type"] == "dsc":
724 # check_signature returns either:
725 # (None, [list, of, rejects]) or (signature, [])
726 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
728 self.rejects.append(j)
730 entry["architecture"] = "source"
732 def per_suite_file_checks(self, f, suite, session):
734 entry = self.pkg.files[f]
735 archive = utils.where_am_i()
738 if entry.has_key("byhand"):
741 # Handle component mappings
742 for m in cnf.ValueList("ComponentMappings"):
743 (source, dest) = m.split()
744 if entry["component"] == source:
745 entry["original component"] = source
746 entry["component"] = dest
748 # Ensure the component is valid for the target suite
749 if cnf.has_key("Suite:%s::Components" % (suite)) and \
750 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
751 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
754 # Validate the component
755 component = entry["component"]
756 if not get_component(component, session):
757 self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
760 # See if the package is NEW
761 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
764 # Validate the priority
765 if entry["priority"].find('/') != -1:
766 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
768 # Determine the location
769 location = cnf["Dir::Pool"]
770 l = get_location(location, entry["component"], archive, session)
772 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
773 entry["location id"] = -1
775 entry["location id"] = l.location_id
777 # Check the md5sum & size against existing files (if any)
778 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
780 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
781 entry["size"], entry["md5sum"], entry["location id"])
784 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
785 elif found is False and poolfile is not None:
786 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
789 entry["files id"] = None
791 entry["files id"] = poolfile.file_id
793 # Check for packages that have moved from one component to another
794 entry['suite'] = suite
795 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
797 entry["othercomponents"] = res.fetchone()[0]
799 def check_files(self, action=True):
800 archive = utils.where_am_i()
801 file_keys = self.pkg.files.keys()
805 # XXX: As far as I can tell, this can no longer happen - see
806 # comments by AJ in old revisions - mhy
807 # if reprocess is 2 we've already done this and we're checking
808 # things again for the new .orig.tar.gz.
809 # [Yes, I'm fully aware of how disgusting this is]
810 if action and self.reprocess < 2:
812 os.chdir(self.pkg.directory)
814 ret = holding.copy_to_holding(f)
816 # XXX: Should we bail out here or try and continue?
817 self.rejects.append(ret)
821 # Check there isn't already a .changes or .dak file of the same name in
822 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
823 # [NB: this check must be done post-suite mapping]
824 base_filename = os.path.basename(self.pkg.changes_file)
825 dot_dak_filename = base_filename[:-8] + ".dak"
827 for suite in self.pkg.changes["distribution"].keys():
828 copychanges = "Suite::%s::CopyChanges" % (suite)
829 if cnf.has_key(copychanges) and \
830 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
831 self.rejects.append("%s: a file with this name already exists in %s" \
832 % (base_filename, cnf[copychanges]))
834 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
835 if cnf.has_key(copy_dot_dak) and \
836 os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
837 self.rejects.append("%s: a file with this name already exists in %s" \
838 % (dot_dak_filename, Cnf[copy_dot_dak]))
844 session = DBConn().session()
846 for f, entry in self.pkg.files.items():
847 # Ensure the file does not already exist in one of the accepted directories
848 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
849 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
850 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
851 self.rejects.append("%s file already exists in the %s directory." % (f, d))
853 if not re_taint_free.match(f):
854 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
856 # Check the file is readable
857 if os.access(f, os.R_OK) == 0:
858 # When running in -n, copy_to_holding() won't have
859 # generated the reject_message, so we need to.
861 if os.path.exists(f):
862 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
864 self.rejects.append("Can't read `%s'. [file not found]" % (f))
865 entry["type"] = "unreadable"
868 # If it's byhand skip remaining checks
869 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
871 entry["type"] = "byhand"
873 # Checks for a binary package...
874 elif re_isadeb.match(f):
876 entry["type"] = "deb"
878 # This routine appends to self.rejects/warnings as appropriate
879 self.binary_file_checks(f, session)
881 # Checks for a source package...
882 elif re_issource.match(f):
885 # This routine appends to self.rejects/warnings as appropriate
886 self.source_file_checks(f, session)
888 # Not a binary or source package? Assume byhand...
891 entry["type"] = "byhand"
893 # Per-suite file checks
894 entry["oldfiles"] = {}
895 for suite in self.pkg.changes["distribution"].keys():
896 self.per_suite_file_checks(f, suite, session)
898 # If the .changes file says it has source, it must have source.
899 if self.pkg.changes["architecture"].has_key("source"):
901 self.rejects.append("no source found and Architecture line in changes mention source.")
903 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
904 self.rejects.append("source only uploads are not supported.")
906 ###########################################################################
907 def check_dsc(self, action=True):
908 """Returns bool indicating whether or not the source changes are valid"""
909 # Ensure there is source to check
910 if not self.pkg.changes["architecture"].has_key("source"):
915 for f, entry in self.pkg.files.items():
916 if entry["type"] == "dsc":
918 self.rejects.append("can not process a .changes file with multiple .dsc's.")
923 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
925 self.rejects.append("source uploads must contain a dsc file")
928 # Parse the .dsc file
930 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
931 except CantOpenError:
932 # if not -n copy_to_holding() will have done this for us...
934 self.rejects.append("%s: can't read file." % (dsc_filename))
935 except ParseChangesError, line:
936 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
937 except InvalidDscError, line:
938 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
939 except ChangesUnicodeError:
940 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
942 # Build up the file list of files mentioned by the .dsc
944 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
945 except NoFilesFieldError:
946 self.rejects.append("%s: no Files: field." % (dsc_filename))
948 except UnknownFormatError, format:
949 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
951 except ParseChangesError, line:
952 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
955 # Enforce mandatory fields
956 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
957 if not self.pkg.dsc.has_key(i):
958 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
961 # Validate the source and version fields
962 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
963 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
964 if not re_valid_version.match(self.pkg.dsc["version"]):
965 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
967 # Bumping the version number of the .dsc breaks extraction by stable's
968 # dpkg-source. So let's not do that...
969 if self.pkg.dsc["format"] != "1.0":
970 self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
972 # Validate the Maintainer field
974 # We ignore the return value
975 fix_maintainer(self.pkg.dsc["maintainer"])
976 except ParseMaintError, msg:
977 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
978 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
980 # Validate the build-depends field(s)
981 for field_name in [ "build-depends", "build-depends-indep" ]:
982 field = self.pkg.dsc.get(field_name)
984 # Check for broken dpkg-dev lossage...
985 if field.startswith("ARRAY"):
986 self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \
987 (dsc_filename, field_name.title()))
989 # Have apt try to parse them...
991 apt_pkg.ParseSrcDepends(field)
993 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
995 # Ensure the version number in the .dsc matches the version number in the .changes
996 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
997 changes_version = self.pkg.files[dsc_filename]["version"]
999 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1000 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1002 # Ensure there is a .tar.gz in the .dsc file
1004 for f in self.pkg.dsc_files.keys():
1005 m = re_issource.match(f)
1007 self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
1010 if ftype == "orig.tar.gz" or ftype == "tar.gz":
1014 self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
1016 # Ensure source is newer than existing source in target suites
1017 self.check_source_against_db(dsc_filename)
1019 self.check_dsc_against_db(dsc_filename)
1023 ###########################################################################
1025 def get_changelog_versions(self, source_dir):
1026 """Extracts a the source package and (optionally) grabs the
1027 version history out of debian/changelog for the BTS."""
1031 # Find the .dsc (again)
1033 for f in self.pkg.files.keys():
1034 if self.pkg.files[f]["type"] == "dsc":
1037 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1038 if not dsc_filename:
1041 # Create a symlink mirror of the source files in our temporary directory
1042 for f in self.pkg.files.keys():
1043 m = re_issource.match(f)
1045 src = os.path.join(source_dir, f)
1046 # If a file is missing for whatever reason, give up.
1047 if not os.path.exists(src):
1050 if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
1052 dest = os.path.join(os.getcwd(), f)
1053 os.symlink(src, dest)
1055 # If the orig.tar.gz is not a part of the upload, create a symlink to the
1057 if self.pkg.orig_tar_gz:
1058 dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
1059 os.symlink(self.pkg.orig_tar_gz, dest)
1061 # Extract the source
1062 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1063 (result, output) = commands.getstatusoutput(cmd)
1065 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1066 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
1069 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1072 # Get the upstream version
1073 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1074 if re_strip_revision.search(upstr_version):
1075 upstr_version = re_strip_revision.sub('', upstr_version)
1077 # Ensure the changelog file exists
1078 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1079 if not os.path.exists(changelog_filename):
1080 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1083 # Parse the changelog
1084 self.pkg.dsc["bts changelog"] = ""
1085 changelog_file = utils.open_file(changelog_filename)
1086 for line in changelog_file.readlines():
1087 m = re_changelog_versions.match(line)
1089 self.pkg.dsc["bts changelog"] += line
1090 changelog_file.close()
1092 # Check we found at least one revision in the changelog
1093 if not self.pkg.dsc["bts changelog"]:
1094 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1096 def check_source(self):
1097 # XXX: I'm fairly sure reprocess == 2 can never happen
1098 # AJT disabled the is_incoming check years ago - mhy
1099 # We should probably scrap or rethink the whole reprocess thing
1101 # a) there's no source
1102 # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
1103 # or c) the orig.tar.gz is MIA
1104 if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1105 or self.pkg.orig_tar_gz == -1:
1108 tmpdir = utils.temp_dirname()
1110 # Move into the temporary directory
1114 # Get the changelog version history
1115 self.get_changelog_versions(cwd)
1117 # Move back and cleanup the temporary tree
1121 shutil.rmtree(tmpdir)
1123 if e.errno != errno.EACCES:
1125 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1127 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1128 # We probably have u-r or u-w directories so chmod everything
1130 cmd = "chmod -R u+rwx %s" % (tmpdir)
1131 result = os.system(cmd)
1133 utils.fubar("'%s' failed with result %s." % (cmd, result))
1134 shutil.rmtree(tmpdir)
1135 except Exception, e:
1136 print "foobar2 (%s)" % e
1137 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1139 ###########################################################################
1140 def ensure_hashes(self):
1141 # Make sure we recognise the format of the Files: field in the .changes
1142 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1143 if len(format) == 2:
1144 format = int(format[0]), int(format[1])
1146 format = int(float(format[0])), 0
1148 # We need to deal with the original changes blob, as the fields we need
1149 # might not be in the changes dict serialised into the .dak anymore.
1150 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1152 # Copy the checksums over to the current changes dict. This will keep
1153 # the existing modifications to it intact.
1154 for field in orig_changes:
1155 if field.startswith('checksums-'):
1156 self.pkg.changes[field] = orig_changes[field]
1158 # Check for unsupported hashes
1159 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1160 self.rejects.append(j)
1162 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1163 self.rejects.append(j)
1165 # We have to calculate the hash if we have an earlier changes version than
1166 # the hash appears in rather than require it exist in the changes file
1167 for hashname, hashfunc, version in utils.known_hashes:
1168 # TODO: Move _ensure_changes_hash into this class
1169 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1170 self.rejects.append(j)
1171 if "source" in self.pkg.changes["architecture"]:
1172 # TODO: Move _ensure_dsc_hash into this class
1173 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1174 self.rejects.append(j)
1176 def check_hashes(self):
1177 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1178 self.rejects.append(m)
1180 for m in utils.check_size(".changes", self.pkg.files):
1181 self.rejects.append(m)
1183 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1184 self.rejects.append(m)
1186 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1187 self.rejects.append(m)
1189 self.ensure_hashes()
1191 ###########################################################################
1192 def check_urgency(self):
1194 if self.pkg.changes["architecture"].has_key("source"):
1195 if not self.pkg.changes.has_key("urgency"):
1196 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1197 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1198 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1199 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1200 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1201 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1203 ###########################################################################
1205 # Sanity check the time stamps of files inside debs.
1206 # [Files in the near future cause ugly warnings and extreme time
1207 # travel can cause errors on extraction]
1209 def check_timestamps(self):
1212 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1213 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1214 tar = TarTime(future_cutoff, past_cutoff)
1216 for filename, entry in self.pkg.files.items():
1217 if entry["type"] == "deb":
1220 deb_file = utils.open_file(filename)
1221 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1224 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1225 except SystemError, e:
1226 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1227 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1230 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1234 future_files = tar.future_files.keys()
1236 num_future_files = len(future_files)
1237 future_file = future_files[0]
1238 future_date = tar.future_files[future_file]
1239 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1240 % (filename, num_future_files, future_file, time.ctime(future_date)))
1242 ancient_files = tar.ancient_files.keys()
1244 num_ancient_files = len(ancient_files)
1245 ancient_file = ancient_files[0]
1246 ancient_date = tar.ancient_files[ancient_file]
1247 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1248 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1250 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1252 ###########################################################################
1253 def check_signed_by_key(self):
1254 """Ensure the .changes is signed by an authorized uploader."""
1255 session = DBConn().session()
1257 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1259 # match claimed name with actual name:
1261 # This is fundamentally broken but need us to refactor how we get
1262 # the UIDs/Fingerprints in order for us to fix it properly
1263 uid, uid_email = self.pkg.changes["fingerprint"], uid
1264 may_nmu, may_sponsor = 1, 1
1265 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1266 # and can't get one in there if we don't allow nmu/sponsorship
1267 elif is_dm is False:
1268 # If is_dm is False, we allow full upload rights
1269 uid_email = "%s@debian.org" % (uid)
1270 may_nmu, may_sponsor = 1, 1
1272 # Assume limited upload rights unless we've discovered otherwise
1274 may_nmu, may_sponsor = 0, 0
1276 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1278 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1280 if uid_name == "": sponsored = 1
1283 if ("source" in self.pkg.changes["architecture"] and
1284 uid_email and utils.is_email_alias(uid_email)):
1285 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1286 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1287 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1288 self.pkg.changes["sponsoremail"] = uid_email
1290 if sponsored and not may_sponsor:
1291 self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1293 if not sponsored and not may_nmu:
1294 should_reject = True
1295 highest_sid, highest_version = None, None
1297 # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1298 # It ignores higher versions with the dm_upload_allowed flag set to false
1299 # I'm keeping the existing behaviour for now until I've gone back and
1300 # checked exactly what the GR says - mhy
1301 for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1302 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1303 highest_sid = si.source_id
1304 highest_version = si.version
1306 if highest_sid is None:
1307 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1309 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1310 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1311 if email == uid_email or name == uid_name:
1312 should_reject = False
1315 if should_reject is True:
1316 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1318 for b in self.pkg.changes["binary"].keys():
1319 for suite in self.pkg.changes["distribution"].keys():
1320 q = session.query(DBSource)
1321 q = q.join(DBBinary).filter_by(package=b)
1322 q = q.join(BinAssociation).join(Suite).filter_by(suite)
1325 if s.source != self.pkg.changes["source"]:
1326 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1328 for f in self.pkg.files.keys():
1329 if self.pkg.files[f].has_key("byhand"):
1330 self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1331 if self.pkg.files[f].has_key("new"):
1332 self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1334 ###########################################################################
1335 def build_summaries(self):
1336 """ Build a summary of changes the upload introduces. """
1338 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1340 short_summary = summary
1342 # This is for direport's benefit...
1343 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1346 summary += "Changes: " + f
1348 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1350 summary += self.announce(short_summary, 0)
1352 return (summary, short_summary)
1354 ###########################################################################
1356 def close_bugs(self, summary, action):
1358 Send mail to close bugs as instructed by the closes field in the changes file.
1359 Also add a line to summary if any work was done.
1361 @type summary: string
1362 @param summary: summary text, as given by L{build_summaries}
1365 @param action: Set to false no real action will be done.
1368 @return: summary. If action was taken, extended by the list of closed bugs.
1372 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1374 bugs = self.pkg.changes["closes"].keys()
1380 summary += "Closing bugs: "
1382 summary += "%s " % (bug)
1384 self.Subst["__BUG_NUMBER__"] = bug
1385 if self.pkg.changes["distribution"].has_key("stable"):
1386 self.Subst["__STABLE_WARNING__"] = """
1387 Note that this package is not part of the released stable Debian
1388 distribution. It may have dependencies on other unreleased software,
1389 or other instabilities. Please take care if you wish to install it.
1390 The update will eventually make its way into the next released Debian
1393 self.Subst["__STABLE_WARNING__"] = ""
1394 mail_message = utils.TemplateSubst(self.Subst, template)
1395 utils.send_mail(mail_message)
1397 # Clear up after ourselves
1398 del self.Subst["__BUG_NUMBER__"]
1399 del self.Subst["__STABLE_WARNING__"]
1401 if action and self.logger:
1402 self.logger.log(["closing bugs"] + bugs)
1408 ###########################################################################
1410 def announce(self, short_summary, action):
1412 Send an announce mail about a new upload.
1414 @type short_summary: string
1415 @param short_summary: Short summary text to include in the mail
1418 @param action: Set to false no real action will be done.
1421 @return: Textstring about action taken.
1426 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1428 # Only do announcements for source uploads with a recent dpkg-dev installed
1429 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1430 self.pkg.changes["architecture"].has_key("source"):
1436 self.Subst["__SHORT_SUMMARY__"] = short_summary
1438 for dist in self.pkg.changes["distribution"].keys():
1439 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1440 if announce_list == "" or lists_done.has_key(announce_list):
1443 lists_done[announce_list] = 1
1444 summary += "Announcing to %s\n" % (announce_list)
1447 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1448 if cnf.get("Dinstall::TrackingServer") and \
1449 self.pkg.changes["architecture"].has_key("source"):
1450 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1451 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1453 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1454 utils.send_mail(mail_message)
1456 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1458 if cnf.FindB("Dinstall::CloseBugs"):
1459 summary = self.close_bugs(summary, action)
1461 del self.Subst["__SHORT_SUMMARY__"]
1465 ###########################################################################
1467 def accept (self, summary, short_summary, targetdir=None):
1471 This moves all files referenced from the .changes into the I{accepted}
1472 queue, sends the accepted mail, announces to lists, closes bugs and
1473 also checks for override disparities. If enabled it will write out
1474 the version history for the BTS Version Tracking and will finally call
1477 @type summary: string
1478 @param summary: Summary text
1480 @type short_summary: string
1481 @param short_summary: Short summary
1486 stats = SummaryStats()
1488 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1490 if targetdir is None:
1491 targetdir = cnf["Dir::Queue::Accepted"]
1495 self.logger.log(["Accepting changes", self.pkg.changes_file])
1497 self.pkg.write_dot_dak(targetdir)
1499 # Move all the files into the accepted directory
1500 utils.move(self.pkg.changes_file, targetdir)
1502 for name, entry in sorted(self.pkg.files.items()):
1503 utils.move(name, targetdir)
1504 stats.accept_bytes += float(entry["size"])
1506 stats.accept_count += 1
1508 # Send accept mail, announce to lists, close bugs and check for
1509 # override disparities
1510 if not cnf["Dinstall::Options::No-Mail"]:
1511 self.Subst["__SUITE__"] = ""
1512 self.Subst["__SUMMARY__"] = summary
1513 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1514 utils.send_mail(mail_message)
1515 self.announce(short_summary, 1)
1517 ## Helper stuff for DebBugs Version Tracking
1518 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1519 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1520 # the conditionalization on dsc["bts changelog"] should be
1523 # Write out the version history from the changelog
1524 if self.pkg.changes["architecture"].has_key("source") and \
1525 self.pkg.dsc.has_key("bts changelog"):
1527 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1528 version_history = os.fdopen(fd, 'w')
1529 version_history.write(self.pkg.dsc["bts changelog"])
1530 version_history.close()
1531 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1532 self.pkg.changes_file[:-8]+".versions")
1533 os.rename(temp_filename, filename)
1534 os.chmod(filename, 0644)
1536 # Write out the binary -> source mapping.
1537 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1538 debinfo = os.fdopen(fd, 'w')
1539 for name, entry in sorted(self.pkg.files.items()):
1540 if entry["type"] == "deb":
1541 line = " ".join([entry["package"], entry["version"],
1542 entry["architecture"], entry["source package"],
1543 entry["source version"]])
1544 debinfo.write(line+"\n")
1546 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1547 self.pkg.changes_file[:-8]+".debinfo")
1548 os.rename(temp_filename, filename)
1549 os.chmod(filename, 0644)
1551 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1552 # <Ganneff> we do call queue_build too
1553 # <mhy> well yes, we'd have had to if we were inserting into accepted
1554 # <Ganneff> now. thats database only.
1555 # <mhy> urgh, that's going to get messy
1556 # <Ganneff> so i make the p-n call to it *also* using accepted/
1557 # <mhy> but then the packages will be in the queue_build table without the files being there
1558 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1559 # <mhy> ah, good point
1560 # <Ganneff> so it will work out, as unchecked move it over
1561 # <mhy> that's all completely sick
1564 # This routine returns None on success or an error on failure
1565 res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1570 def check_override(self):
1572 Checks override entries for validity. Mails "Override disparity" warnings,
1573 if that feature is enabled.
1575 Abandons the check if
1576 - override disparity checks are disabled
1577 - mail sending is disabled
1582 # Abandon the check if:
1583 # a) override disparity checks have been disabled
1584 # b) we're not sending mail
1585 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1586 cnf["Dinstall::Options::No-Mail"]:
1589 summary = self.pkg.check_override()
1594 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1596 self.Subst["__SUMMARY__"] = summary
1597 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1598 utils.send_mail(mail_message)
1599 del self.Subst["__SUMMARY__"]
1601 ###########################################################################
1603 def remove(self, dir=None):
1605 Used (for instance) in p-u to remove the package from unchecked
1608 os.chdir(self.pkg.directory)
1612 for f in self.pkg.files.keys():
1614 os.unlink(self.pkg.changes_file)
1616 ###########################################################################
1618 def move_to_dir (self, dest, perms=0660, changesperms=0664):
1620 Move files to dest with certain perms/changesperms
1622 utils.move(self.pkg.changes_file, dest, perms=changesperms)
1623 for f in self.pkg.files.keys():
1624 utils.move(f, dest, perms=perms)
1626 ###########################################################################
1628 def force_reject(self, reject_files):
1630 Forcefully move files from the current directory to the
1631 reject directory. If any file already exists in the reject
1632 directory it will be moved to the morgue to make way for
1636 @param files: file dictionary
1642 for file_entry in reject_files:
1643 # Skip any files which don't exist or which we don't have permission to copy.
1644 if os.access(file_entry, os.R_OK) == 0:
1647 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1650 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1652 # File exists? Let's try and move it to the morgue
1653 if e.errno == errno.EEXIST:
1654 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1656 morgue_file = utils.find_next_free(morgue_file)
1657 except NoFreeFilenameError:
1658 # Something's either gone badly Pete Tong, or
1659 # someone is trying to exploit us.
1660 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1662 utils.move(dest_file, morgue_file, perms=0660)
1664 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1667 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1671 # If we got here, we own the destination file, so we can
1672 # safely overwrite it.
1673 utils.move(file_entry, dest_file, 1, perms=0660)
1676 ###########################################################################
1677 def do_reject (self, manual=0, reject_message="", note=""):
1679 Reject an upload. If called without a reject message or C{manual} is
1680 true, spawn an editor so the user can write one.
1683 @param manual: manual or automated rejection
1685 @type reject_message: string
1686 @param reject_message: A reject message
1691 # If we weren't given a manual rejection message, spawn an
1692 # editor so the user can add one in...
1693 if manual and not reject_message:
1694 (fd, temp_filename) = utils.temp_filename()
1695 temp_file = os.fdopen(fd, 'w')
1698 temp_file.write(line)
1700 editor = os.environ.get("EDITOR","vi")
1702 while answer == 'E':
1703 os.system("%s %s" % (editor, temp_filename))
1704 temp_fh = utils.open_file(temp_filename)
1705 reject_message = "".join(temp_fh.readlines())
1707 print "Reject message:"
1708 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
1709 prompt = "[R]eject, Edit, Abandon, Quit ?"
1711 while prompt.find(answer) == -1:
1712 answer = utils.our_raw_input(prompt)
1713 m = re_default_answer.search(prompt)
1716 answer = answer[:1].upper()
1717 os.unlink(temp_filename)
1723 print "Rejecting.\n"
1727 reason_filename = self.pkg.changes_file[:-8] + ".reason"
1728 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
1730 # Move all the files into the reject directory
1731 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
1732 self.force_reject(reject_files)
1734 # If we fail here someone is probably trying to exploit the race
1735 # so let's just raise an exception ...
1736 if os.path.exists(reason_filename):
1737 os.unlink(reason_filename)
1738 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1740 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
1743 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
1744 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
1745 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
1746 os.write(reason_fd, reject_message)
1747 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1749 # Build up the rejection email
1750 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
1751 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
1752 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
1753 self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
1754 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1755 # Write the rejection email out as the <foo>.reason file
1756 os.write(reason_fd, reject_mail_message)
1758 del self.Subst["__REJECTOR_ADDRESS__"]
1759 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
1760 del self.Subst["__CC__"]
1764 # Send the rejection mail if appropriate
1765 if not cnf["Dinstall::Options::No-Mail"]:
1766 utils.send_mail(reject_mail_message)
1769 self.logger.log(["rejected", pkg.changes_file])
1773 ################################################################################
1774 def in_override_p(self, package, component, suite, binary_type, file, session=None):
1776 Check if a package already has override entries in the DB
1778 @type package: string
1779 @param package: package name
1781 @type component: string
1782 @param component: database id of the component
1785 @param suite: database id of the suite
1787 @type binary_type: string
1788 @param binary_type: type of the package
1791 @param file: filename we check
1793 @return: the database result. But noone cares anyway.
1800 session = DBConn().session()
1802 if binary_type == "": # must be source
1805 file_type = binary_type
1807 # Override suite name; used for example with proposed-updates
1808 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
1809 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
1811 result = get_override(package, suite, component, file_type, session)
1813 # If checking for a source package fall back on the binary override type
1814 if file_type == "dsc" and len(result) < 1:
1815 result = get_override(package, suite, component, ['deb', 'udeb'], session)
1817 # Remember the section and priority so we can check them later if appropriate
1820 self.pkg.files[file]["override section"] = result.section.section
1821 self.pkg.files[file]["override priority"] = result.priority.priority
1826 ################################################################################
1827 def get_anyversion(self, sv_list, suite):
1830 @param sv_list: list of (suite, version) tuples to check
1833 @param suite: suite name
1838 anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
1839 for (s, v) in sv_list:
1840 if s in [ x.lower() for x in anysuite ]:
1841 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
1846 ################################################################################
1848 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
1851 @param sv_list: list of (suite, version) tuples to check
1856 @type new_version: string
1857 @param new_version: XXX
1859 Ensure versions are newer than existing packages in target
1860 suites and that cross-suite version checking rules as
1861 set out in the conf file are satisfied.
1866 # Check versions for each target suite
1867 for target_suite in self.pkg.changes["distribution"].keys():
1868 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
1869 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
1871 # Enforce "must be newer than target suite" even if conffile omits it
1872 if target_suite not in must_be_newer_than:
1873 must_be_newer_than.append(target_suite)
1875 for (suite, existent_version) in sv_list:
1876 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
1878 if suite in must_be_newer_than and sourceful and vercmp < 1:
1879 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1881 if suite in must_be_older_than and vercmp > -1:
1884 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
1885 # we really use the other suite, ignoring the conflicting one ...
1886 addsuite = self.pkg.changes["distribution-version"][suite]
1888 add_version = self.get_anyversion(sv_list, addsuite)
1889 target_version = self.get_anyversion(sv_list, target_suite)
1892 # not add_version can only happen if we map to a suite
1893 # that doesn't enhance the suite we're propup'ing from.
1894 # so "propup-ver x a b c; map a d" is a problem only if
1895 # d doesn't enhance a.
1897 # i think we could always propagate in this case, rather
1898 # than complaining. either way, this isn't a REJECT issue
1900 # And - we really should complain to the dorks who configured dak
1901 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
1902 self.pkg.changes.setdefault("propdistribution", {})
1903 self.pkg.changes["propdistribution"][addsuite] = 1
1905 elif not target_version:
1906 # not targets_version is true when the package is NEW
1907 # we could just stick with the "...old version..." REJECT
1908 # for this, I think.
1909 self.rejects.append("Won't propogate NEW packages.")
1910 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
1911 # propogation would be redundant. no need to reject though.
1912 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1914 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
1915 apt_pkg.VersionCompare(add_version, target_version) >= 0:
1917 self.warnings.append("Propogating upload to %s" % (addsuite))
1918 self.pkg.changes.setdefault("propdistribution", {})
1919 self.pkg.changes["propdistribution"][addsuite] = 1
1923 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1925 ################################################################################
1926 def check_binary_against_db(self, file, session=None):
1928 session = DBConn().session()
1930 # Ensure version is sane
1931 q = session.query(BinAssociation)
1932 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
1933 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
1935 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
1936 file, self.pkg.files[file]["version"], sourceful=False)
1938 # Check for any existing copies of the file
1939 q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
1940 q = q.filter_by(version=self.pkg.files[file]["version"])
1941 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
1944 self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
1946 ################################################################################
1948 def check_source_against_db(self, file, session=None):
1952 session = DBConn().session()
1954 source = self.pkg.dsc.get("source")
1955 version = self.pkg.dsc.get("version")
1957 # Ensure version is sane
1958 q = session.query(SrcAssociation)
1959 q = q.join(DBSource).filter(DBSource.source==source)
1961 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
1962 file, version, sourceful=True)
1964 ################################################################################
1965 def check_dsc_against_db(self, file, session=None):
1968 @warning: NB: this function can remove entries from the 'files' index [if
1969 the .orig.tar.gz is a duplicate of the one in the archive]; if
1970 you're iterating over 'files' and call this function as part of
1971 the loop, be sure to add a check to the top of the loop to
1972 ensure you haven't just tried to dereference the deleted entry.
1977 session = DBConn().session()
1979 self.pkg.orig_tar_gz = None
1981 # Try and find all files mentioned in the .dsc. This has
1982 # to work harder to cope with the multiple possible
1983 # locations of an .orig.tar.gz.
1984 # The ordering on the select is needed to pick the newest orig
1985 # when it exists in multiple places.
1986 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
1988 if self.pkg.files.has_key(dsc_name):
1989 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
1990 actual_size = int(self.pkg.files[dsc_name]["size"])
1991 found = "%s in incoming" % (dsc_name)
1993 # Check the file does not already exist in the archive
1994 ql = get_poolfile_like_name(dsc_name)
1996 # Strip out anything that isn't '%s' or '/%s$'
1998 if not i.filename.endswith(dsc_name):
2001 # "[dak] has not broken them. [dak] has fixed a
2002 # brokenness. Your crappy hack exploited a bug in
2005 # "(Come on! I thought it was always obvious that
2006 # one just doesn't release different files with
2007 # the same name and version.)"
2008 # -- ajk@ on d-devel@l.d.o
2011 # Ignore exact matches for .orig.tar.gz
2013 if dsc_name.endswith(".orig.tar.gz"):
2015 if self.pkg.files.has_key(dsc_name) and \
2016 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2017 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2018 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2019 # TODO: Don't delete the entry, just mark it as not needed
2020 # This would fix the stupidity of changing something we often iterate over
2021 # whilst we're doing it
2022 del self.pkg.files[dsc_name]
2023 self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
2027 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2029 elif dsc_name.endswith(".orig.tar.gz"):
2031 ql = get_poolfile_like_name(dsc_name, session)
2033 # Strip out anything that isn't '%s' or '/%s$'
2034 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2036 if not i.filename.endswith(dsc_name):
2040 # Unfortunately, we may get more than one match here if,
2041 # for example, the package was in potato but had an -sa
2042 # upload in woody. So we need to choose the right one.
2044 # default to something sane in case we don't match any or have only one
2049 old_file = os.path.join(i.location.path, i.filename)
2050 old_file_fh = utils.open_file(old_file)
2051 actual_md5 = apt_pkg.md5sum(old_file_fh)
2053 actual_size = os.stat(old_file)[stat.ST_SIZE]
2054 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2057 old_file = os.path.join(i.location.path, i.filename)
2058 old_file_fh = utils.open_file(old_file)
2059 actual_md5 = apt_pkg.md5sum(old_file_fh)
2061 actual_size = os.stat(old_file)[stat.ST_SIZE]
2063 suite_type = f.location.archive_type
2064 # need this for updating dsc_files in install()
2065 dsc_entry["files id"] = f.file_id
2066 # See install() in process-accepted...
2067 self.pkg.orig_tar_id = f.file_id
2068 self.pkg.orig_tar_gz = old_file
2069 self.pkg.orig_tar_location = f.location.location_id
2071 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2072 # Not there? Check the queue directories...
2073 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2074 in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2075 if os.path.exists(in_otherdir):
2076 in_otherdir_fh = utils.open_file(in_otherdir)
2077 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2078 in_otherdir_fh.close()
2079 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2081 self.pkg.orig_tar_gz = in_otherdir
2084 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2085 self.pkg.orig_tar_gz = -1
2088 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2090 if actual_md5 != dsc_entry["md5sum"]:
2091 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2092 if actual_size != int(dsc_entry["size"]):
2093 self.rejects.append("size for %s doesn't match %s." % (found, file))
2095 ################################################################################
2096 def accepted_checks(self, overwrite_checks=True, session=None):
2097 # Recheck anything that relies on the database; since that's not
2098 # frozen between accept and our run time when called from p-a.
2100 # overwrite_checks is set to False when installing to stable/oldstable
2103 session = DBConn().session()
2108 for checkfile in self.pkg.files.keys():
2109 # The .orig.tar.gz can disappear out from under us is it's a
2110 # duplicate of one in the archive.
2111 if not self.pkg.files.has_key(checkfile):
2114 entry = self.pkg.files[checkfile]
2116 # Check that the source still exists
2117 if entry["type"] == "deb":
2118 source_version = entry["source version"]
2119 source_package = entry["source package"]
2120 if not self.pkg.changes["architecture"].has_key("source") \
2121 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2122 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2124 # Version and file overwrite checks
2125 if overwrite_checks:
2126 if entry["type"] == "deb":
2127 self.check_binary_against_db(checkfile, session)
2128 elif entry["type"] == "dsc":
2129 self.check_source_against_db(checkfile, session)
2130 self.check_dsc_against_db(dsc_filename, session)
2132 # propogate in the case it is in the override tables:
2133 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2134 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile):
2135 propogate[suite] = 1
2137 nopropogate[suite] = 1
2139 for suite in propogate.keys():
2140 if suite in nopropogate:
2142 self.pkg.changes["distribution"][suite] = 1
2144 for checkfile in self.pkg.files.keys():
2145 # Check the package is still in the override tables
2146 for suite in self.pkg.changes["distribution"].keys():
2147 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile):
2148 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2150 ################################################################################
2151 # This is not really a reject, but an unaccept, but since a) the code for
2152 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2153 # extremely rare, for now we'll go with whining at our admin folks...
2155 def do_unaccept(self):
2158 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2159 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2160 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2161 self.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
2162 if cnf.has_key("Dinstall::Bcc"):
2163 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2165 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2167 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2169 # Write the rejection email out as the <foo>.reason file
2170 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2171 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2173 # If we fail here someone is probably trying to exploit the race
2174 # so let's just raise an exception ...
2175 if os.path.exists(reject_filename):
2176 os.unlink(reject_filename)
2178 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2179 os.write(fd, reject_mail_message)
2182 utils.send_mail(reject_mail_message)
2184 del self.Subst["__REJECTOR_ADDRESS__"]
2185 del self.Subst["__REJECT_MESSAGE__"]
2186 del self.Subst["__CC__"]
2188 ################################################################################
2189 # If any file of an upload has a recent mtime then chances are good
2190 # the file is still being uploaded.
2192 def upload_too_new(self):
2195 # Move back to the original directory to get accurate time stamps
2197 os.chdir(self.pkg.directory)
2198 file_list = self.pkg.files.keys()
2199 file_list.extend(self.pkg.dsc_files.keys())
2200 file_list.append(self.pkg.changes_file)
2203 last_modified = time.time()-os.path.getmtime(f)
2204 if last_modified < int(cnf["Dinstall::SkipTime"]):