5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
58 ###############################################################################
60 def get_type(f, session):
62 Get the file type of C{f}
65 @param f: file entry from Changes object
67 @type session: SQLA Session
68 @param session: SQL Alchemy session object
75 if f.has_key("dbtype"):
76 file_type = f["dbtype"]
77 elif re_source_ext.match(f["type"]):
80 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
82 # Validate the override type
83 type_id = get_override_type(file_type, session)
85 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
89 ################################################################################
91 # Determine what parts in a .changes are NEW
93 def determine_new(changes, files, warn=1):
95 Determine what parts in a C{changes} file are NEW.
97 @type changes: Upload.Pkg.changes dict
98 @param changes: Changes dictionary
100 @type files: Upload.Pkg.files dict
101 @param files: Files dictionary
104 @param warn: Warn if overrides are added for (old)stable
107 @return: dictionary of NEW components.
112 session = DBConn().session()
114 # Build up a list of potentially new things
115 for name, f in files.items():
116 # Skip byhand elements
117 if f["type"] == "byhand":
120 priority = f["priority"]
121 section = f["section"]
122 file_type = get_type(f, session)
123 component = f["component"]
125 if file_type == "dsc":
128 if not new.has_key(pkg):
130 new[pkg]["priority"] = priority
131 new[pkg]["section"] = section
132 new[pkg]["type"] = file_type
133 new[pkg]["component"] = component
134 new[pkg]["files"] = []
136 old_type = new[pkg]["type"]
137 if old_type != file_type:
138 # source gets trumped by deb or udeb
139 if old_type == "dsc":
140 new[pkg]["priority"] = priority
141 new[pkg]["section"] = section
142 new[pkg]["type"] = file_type
143 new[pkg]["component"] = component
145 new[pkg]["files"].append(name)
147 if f.has_key("othercomponents"):
148 new[pkg]["othercomponents"] = f["othercomponents"]
150 for suite in changes["suite"].keys():
151 for pkg in new.keys():
152 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
154 for file_entry in new[pkg]["files"]:
155 if files[file_entry].has_key("new"):
156 del files[file_entry]["new"]
160 for s in ['stable', 'oldstable']:
161 if changes["suite"].has_key(s):
162 print "WARNING: overrides will be added for %s!" % s
163 for pkg in new.keys():
164 if new[pkg].has_key("othercomponents"):
165 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
171 ################################################################################
173 def check_valid(new):
175 Check if section and priority for NEW packages exist in database.
176 Additionally does sanity checks:
177 - debian-installer packages have to be udeb (or source)
178 - non debian-installer packages can not be udeb
179 - source priority can only be assigned to dsc file types
182 @param new: Dict of new packages with their section, priority and type.
185 for pkg in new.keys():
186 section_name = new[pkg]["section"]
187 priority_name = new[pkg]["priority"]
188 file_type = new[pkg]["type"]
190 section = get_section(section_name)
192 new[pkg]["section id"] = -1
194 new[pkg]["section id"] = section.section_id
196 priority = get_priority(priority_name)
198 new[pkg]["priority id"] = -1
200 new[pkg]["priority id"] = priority.priority_id
203 di = section_name.find("debian-installer") != -1
205 # If d-i, we must be udeb and vice-versa
206 if (di and file_type not in ("udeb", "dsc")) or \
207 (not di and file_type == "udeb"):
208 new[pkg]["section id"] = -1
210 # If dsc we need to be source and vice-versa
211 if (priority == "source" and file_type != "dsc") or \
212 (priority != "source" and file_type == "dsc"):
213 new[pkg]["priority id"] = -1
215 ###############################################################################
217 def check_status(files):
219 for f in files.keys():
220 if files[f]["type"] == "byhand":
222 elif files[f].has_key("new"):
226 ###############################################################################
228 # Used by Upload.check_timestamps
229 class TarTime(object):
230 def __init__(self, future_cutoff, past_cutoff):
232 self.future_cutoff = future_cutoff
233 self.past_cutoff = past_cutoff
236 self.future_files = {}
237 self.ancient_files = {}
239 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
240 if MTime > self.future_cutoff:
241 self.future_files[Name] = MTime
242 if MTime < self.past_cutoff:
243 self.ancient_files[Name] = MTime
245 ###############################################################################
247 class Upload(object):
249 Everything that has to do with an upload processed.
257 ###########################################################################
260 """ Reset a number of internal variables."""
262 # Initialize the substitution template map
265 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
266 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
267 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
268 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
276 def package_info(self):
278 Format various messages from this Upload to send to the maintainer.
282 ('Reject Reasons', self.rejects),
283 ('Warnings', self.warnings),
284 ('Notes', self.notes),
288 for title, messages in msgs:
290 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
294 ###########################################################################
295 def update_subst(self):
296 """ Set up the per-package template substitution mappings """
300 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
301 if not self.pkg.changes.has_key("architecture") or not \
302 isinstance(self.pkg.changes["architecture"], dict):
303 self.pkg.changes["architecture"] = { "Unknown" : "" }
305 # and maintainer2047 may not exist.
306 if not self.pkg.changes.has_key("maintainer2047"):
307 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
309 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
310 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
311 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
313 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
314 if self.pkg.changes["architecture"].has_key("source") and \
315 self.pkg.changes["changedby822"] != "" and \
316 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
318 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
319 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
320 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
322 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
323 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
324 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
326 if "sponsoremail" in self.pkg.changes:
327 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
329 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
330 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
332 # Apply any global override of the Maintainer field
333 if cnf.get("Dinstall::OverrideMaintainer"):
334 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
335 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
337 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
338 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
339 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
341 ###########################################################################
342 def load_changes(self, filename):
345 @rvalue: whether the changes file was valid or not. We may want to
346 reject even if this is True (see what gets put in self.rejects).
347 This is simply to prevent us even trying things later which will
348 fail because we couldn't properly parse the file.
351 self.pkg.changes_file = filename
353 # Parse the .changes field into a dictionary
355 self.pkg.changes.update(parse_changes(filename))
356 except CantOpenError:
357 self.rejects.append("%s: can't read file." % (filename))
359 except ParseChangesError, line:
360 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
362 except ChangesUnicodeError:
363 self.rejects.append("%s: changes file not proper utf-8" % (filename))
366 # Parse the Files field from the .changes into another dictionary
368 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
369 except ParseChangesError, line:
370 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
372 except UnknownFormatError, format:
373 self.rejects.append("%s: unknown format '%s'." % (filename, format))
376 # Check for mandatory fields
377 for i in ("distribution", "source", "binary", "architecture",
378 "version", "maintainer", "files", "changes", "description"):
379 if not self.pkg.changes.has_key(i):
380 # Avoid undefined errors later
381 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
384 # Strip a source version in brackets from the source field
385 if re_strip_srcver.search(self.pkg.changes["source"]):
386 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
388 # Ensure the source field is a valid package name.
389 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
390 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
392 # Split multi-value fields into a lower-level dictionary
393 for i in ("architecture", "distribution", "binary", "closes"):
394 o = self.pkg.changes.get(i, "")
396 del self.pkg.changes[i]
398 self.pkg.changes[i] = {}
401 self.pkg.changes[i][j] = 1
403 # Fix the Maintainer: field to be RFC822/2047 compatible
405 (self.pkg.changes["maintainer822"],
406 self.pkg.changes["maintainer2047"],
407 self.pkg.changes["maintainername"],
408 self.pkg.changes["maintaineremail"]) = \
409 fix_maintainer (self.pkg.changes["maintainer"])
410 except ParseMaintError, msg:
411 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
412 % (filename, self.pkg.changes["maintainer"], msg))
414 # ...likewise for the Changed-By: field if it exists.
416 (self.pkg.changes["changedby822"],
417 self.pkg.changes["changedby2047"],
418 self.pkg.changes["changedbyname"],
419 self.pkg.changes["changedbyemail"]) = \
420 fix_maintainer (self.pkg.changes.get("changed-by", ""))
421 except ParseMaintError, msg:
422 self.pkg.changes["changedby822"] = ""
423 self.pkg.changes["changedby2047"] = ""
424 self.pkg.changes["changedbyname"] = ""
425 self.pkg.changes["changedbyemail"] = ""
427 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
428 % (filename, changes["changed-by"], msg))
430 # Ensure all the values in Closes: are numbers
431 if self.pkg.changes.has_key("closes"):
432 for i in self.pkg.changes["closes"].keys():
433 if re_isanum.match (i) == None:
434 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
436 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
437 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
438 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
440 # Check there isn't already a changes file of the same name in one
441 # of the queue directories.
442 base_filename = os.path.basename(filename)
443 if get_knownchange(base_filename):
444 self.rejects.append("%s: a file with this name already exists." % (base_filename))
446 # Check the .changes is non-empty
447 if not self.pkg.files:
448 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
451 # Changes was syntactically valid even if we'll reject
454 ###########################################################################
456 def check_distributions(self):
457 "Check and map the Distribution field"
461 # Handle suite mappings
462 for m in Cnf.ValueList("SuiteMappings"):
465 if mtype == "map" or mtype == "silent-map":
466 (source, dest) = args[1:3]
467 if self.pkg.changes["distribution"].has_key(source):
468 del self.pkg.changes["distribution"][source]
469 self.pkg.changes["distribution"][dest] = 1
470 if mtype != "silent-map":
471 self.notes.append("Mapping %s to %s." % (source, dest))
472 if self.pkg.changes.has_key("distribution-version"):
473 if self.pkg.changes["distribution-version"].has_key(source):
474 self.pkg.changes["distribution-version"][source]=dest
475 elif mtype == "map-unreleased":
476 (source, dest) = args[1:3]
477 if self.pkg.changes["distribution"].has_key(source):
478 for arch in self.pkg.changes["architecture"].keys():
479 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
480 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
481 del self.pkg.changes["distribution"][source]
482 self.pkg.changes["distribution"][dest] = 1
484 elif mtype == "ignore":
486 if self.pkg.changes["distribution"].has_key(suite):
487 del self.pkg.changes["distribution"][suite]
488 self.warnings.append("Ignoring %s as a target suite." % (suite))
489 elif mtype == "reject":
491 if self.pkg.changes["distribution"].has_key(suite):
492 self.rejects.append("Uploads to %s are not accepted." % (suite))
493 elif mtype == "propup-version":
494 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
496 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
497 if self.pkg.changes["distribution"].has_key(args[1]):
498 self.pkg.changes.setdefault("distribution-version", {})
499 for suite in args[2:]:
500 self.pkg.changes["distribution-version"][suite] = suite
502 # Ensure there is (still) a target distribution
503 if len(self.pkg.changes["distribution"].keys()) < 1:
504 self.rejects.append("No valid distribution remaining.")
506 # Ensure target distributions exist
507 for suite in self.pkg.changes["distribution"].keys():
508 if not Cnf.has_key("Suite::%s" % (suite)):
509 self.rejects.append("Unknown distribution `%s'." % (suite))
511 ###########################################################################
513 def binary_file_checks(self, f, session):
515 entry = self.pkg.files[f]
517 # Extract package control information
518 deb_file = utils.open_file(f)
520 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
522 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
524 # Can't continue, none of the checks on control would work.
527 # Check for mandantory "Description:"
530 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
532 self.rejects.append("%s: Missing Description in binary package" % (f))
537 # Check for mandatory fields
538 for field in [ "Package", "Architecture", "Version" ]:
539 if control.Find(field) == None:
541 self.rejects.append("%s: No %s field in control." % (f, field))
544 # Ensure the package name matches the one give in the .changes
545 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
546 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
548 # Validate the package field
549 package = control.Find("Package")
550 if not re_valid_pkg_name.match(package):
551 self.rejects.append("%s: invalid package name '%s'." % (f, package))
553 # Validate the version field
554 version = control.Find("Version")
555 if not re_valid_version.match(version):
556 self.rejects.append("%s: invalid version number '%s'." % (f, version))
558 # Ensure the architecture of the .deb is one we know about.
559 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
560 architecture = control.Find("Architecture")
561 upload_suite = self.pkg.changes["distribution"].keys()[0]
563 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
564 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
565 self.rejects.append("Unknown architecture '%s'." % (architecture))
567 # Ensure the architecture of the .deb is one of the ones
568 # listed in the .changes.
569 if not self.pkg.changes["architecture"].has_key(architecture):
570 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
572 # Sanity-check the Depends field
573 depends = control.Find("Depends")
575 self.rejects.append("%s: Depends field is empty." % (f))
577 # Sanity-check the Provides field
578 provides = control.Find("Provides")
580 provide = re_spacestrip.sub('', provides)
582 self.rejects.append("%s: Provides field is empty." % (f))
583 prov_list = provide.split(",")
584 for prov in prov_list:
585 if not re_valid_pkg_name.match(prov):
586 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
588 # Check the section & priority match those given in the .changes (non-fatal)
589 if control.Find("Section") and entry["section"] != "" \
590 and entry["section"] != control.Find("Section"):
591 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
592 (f, control.Find("Section", ""), entry["section"]))
593 if control.Find("Priority") and entry["priority"] != "" \
594 and entry["priority"] != control.Find("Priority"):
595 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
596 (f, control.Find("Priority", ""), entry["priority"]))
598 entry["package"] = package
599 entry["architecture"] = architecture
600 entry["version"] = version
601 entry["maintainer"] = control.Find("Maintainer", "")
603 if f.endswith(".udeb"):
604 self.pkg.files[f]["dbtype"] = "udeb"
605 elif f.endswith(".deb"):
606 self.pkg.files[f]["dbtype"] = "deb"
608 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
610 entry["source"] = control.Find("Source", entry["package"])
612 # Get the source version
613 source = entry["source"]
616 if source.find("(") != -1:
617 m = re_extract_src_version.match(source)
619 source_version = m.group(2)
621 if not source_version:
622 source_version = self.pkg.files[f]["version"]
624 entry["source package"] = source
625 entry["source version"] = source_version
627 # Ensure the filename matches the contents of the .deb
628 m = re_isadeb.match(f)
631 file_package = m.group(1)
632 if entry["package"] != file_package:
633 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
634 (f, file_package, entry["dbtype"], entry["package"]))
635 epochless_version = re_no_epoch.sub('', control.Find("Version"))
638 file_version = m.group(2)
639 if epochless_version != file_version:
640 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
641 (f, file_version, entry["dbtype"], epochless_version))
644 file_architecture = m.group(3)
645 if entry["architecture"] != file_architecture:
646 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
647 (f, file_architecture, entry["dbtype"], entry["architecture"]))
649 # Check for existent source
650 source_version = entry["source version"]
651 source_package = entry["source package"]
652 if self.pkg.changes["architecture"].has_key("source"):
653 if source_version != self.pkg.changes["version"]:
654 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
655 (source_version, f, self.pkg.changes["version"]))
657 # Check in the SQL database
658 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
659 # Check in one of the other directories
660 source_epochless_version = re_no_epoch.sub('', source_version)
661 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
662 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
664 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
667 dsc_file_exists = False
668 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
669 if cnf.has_key("Dir::Queue::%s" % (myq)):
670 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
671 dsc_file_exists = True
674 if not dsc_file_exists:
675 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
677 # Check the version and for file overwrites
678 self.check_binary_against_db(f, session)
680 # Temporarily disable contents generation until we change the table storage layout
683 #if len(b.rejects) > 0:
684 # for j in b.rejects:
685 # self.rejects.append(j)
687 def source_file_checks(self, f, session):
688 entry = self.pkg.files[f]
690 m = re_issource.match(f)
694 entry["package"] = m.group(1)
695 entry["version"] = m.group(2)
696 entry["type"] = m.group(3)
698 # Ensure the source package name matches the Source filed in the .changes
699 if self.pkg.changes["source"] != entry["package"]:
700 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
702 # Ensure the source version matches the version in the .changes file
703 if re_is_orig_source.match(f):
704 changes_version = self.pkg.changes["chopversion2"]
706 changes_version = self.pkg.changes["chopversion"]
708 if changes_version != entry["version"]:
709 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
711 # Ensure the .changes lists source in the Architecture field
712 if not self.pkg.changes["architecture"].has_key("source"):
713 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
715 # Check the signature of a .dsc file
716 if entry["type"] == "dsc":
717 # check_signature returns either:
718 # (None, [list, of, rejects]) or (signature, [])
719 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
721 self.rejects.append(j)
723 entry["architecture"] = "source"
725 def per_suite_file_checks(self, f, suite, session):
727 entry = self.pkg.files[f]
728 archive = utils.where_am_i()
731 if entry.has_key("byhand"):
734 # Check we have fields we need to do these checks
736 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
737 if not entry.has_key(m):
738 self.rejects.append("file '%s' does not have field %s set" % (f, m))
744 # Handle component mappings
745 for m in cnf.ValueList("ComponentMappings"):
746 (source, dest) = m.split()
747 if entry["component"] == source:
748 entry["original component"] = source
749 entry["component"] = dest
751 # Ensure the component is valid for the target suite
752 if cnf.has_key("Suite:%s::Components" % (suite)) and \
753 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
754 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
757 # Validate the component
758 if not get_component(entry["component"], session):
759 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
762 # See if the package is NEW
763 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
766 # Validate the priority
767 if entry["priority"].find('/') != -1:
768 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
770 # Determine the location
771 location = cnf["Dir::Pool"]
772 l = get_location(location, entry["component"], archive, session)
774 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (entry["component"], archive))
775 entry["location id"] = -1
777 entry["location id"] = l.location_id
779 # Check the md5sum & size against existing files (if any)
780 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
782 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
783 entry["size"], entry["md5sum"], entry["location id"])
786 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
787 elif found is False and poolfile is not None:
788 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
791 entry["files id"] = None
793 entry["files id"] = poolfile.file_id
795 # Check for packages that have moved from one component to another
796 entry['suite'] = suite
797 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
799 entry["othercomponents"] = res.fetchone()[0]
801 def check_files(self, action=True):
802 file_keys = self.pkg.files.keys()
808 os.chdir(self.pkg.directory)
810 ret = holding.copy_to_holding(f)
812 # XXX: Should we bail out here or try and continue?
813 self.rejects.append(ret)
817 # Check there isn't already a .changes file of the same name in
818 # the proposed-updates "CopyChanges" storage directories.
819 # [NB: this check must be done post-suite mapping]
820 base_filename = os.path.basename(self.pkg.changes_file)
822 for suite in self.pkg.changes["distribution"].keys():
823 copychanges = "Suite::%s::CopyChanges" % (suite)
824 if cnf.has_key(copychanges) and \
825 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
826 self.rejects.append("%s: a file with this name already exists in %s" \
827 % (base_filename, cnf[copychanges]))
832 session = DBConn().session()
834 for f, entry in self.pkg.files.items():
835 # Ensure the file does not already exist in one of the accepted directories
836 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
837 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
838 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
839 self.rejects.append("%s file already exists in the %s directory." % (f, d))
841 if not re_taint_free.match(f):
842 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
844 # Check the file is readable
845 if os.access(f, os.R_OK) == 0:
846 # When running in -n, copy_to_holding() won't have
847 # generated the reject_message, so we need to.
849 if os.path.exists(f):
850 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
852 self.rejects.append("Can't read `%s'. [file not found]" % (f))
853 entry["type"] = "unreadable"
856 # If it's byhand skip remaining checks
857 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
859 entry["type"] = "byhand"
861 # Checks for a binary package...
862 elif re_isadeb.match(f):
864 entry["type"] = "deb"
866 # This routine appends to self.rejects/warnings as appropriate
867 self.binary_file_checks(f, session)
869 # Checks for a source package...
870 elif re_issource.match(f):
873 # This routine appends to self.rejects/warnings as appropriate
874 self.source_file_checks(f, session)
876 # Not a binary or source package? Assume byhand...
879 entry["type"] = "byhand"
881 # Per-suite file checks
882 entry["oldfiles"] = {}
883 for suite in self.pkg.changes["distribution"].keys():
884 self.per_suite_file_checks(f, suite, session)
888 # If the .changes file says it has source, it must have source.
889 if self.pkg.changes["architecture"].has_key("source"):
891 self.rejects.append("no source found and Architecture line in changes mention source.")
893 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
894 self.rejects.append("source only uploads are not supported.")
896 ###########################################################################
897 def check_dsc(self, action=True, session=None):
898 """Returns bool indicating whether or not the source changes are valid"""
899 # Ensure there is source to check
900 if not self.pkg.changes["architecture"].has_key("source"):
905 for f, entry in self.pkg.files.items():
906 if entry["type"] == "dsc":
908 self.rejects.append("can not process a .changes file with multiple .dsc's.")
913 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
915 self.rejects.append("source uploads must contain a dsc file")
918 # Parse the .dsc file
920 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
921 except CantOpenError:
922 # if not -n copy_to_holding() will have done this for us...
924 self.rejects.append("%s: can't read file." % (dsc_filename))
925 except ParseChangesError, line:
926 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
927 except InvalidDscError, line:
928 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
929 except ChangesUnicodeError:
930 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
932 # Build up the file list of files mentioned by the .dsc
934 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
935 except NoFilesFieldError:
936 self.rejects.append("%s: no Files: field." % (dsc_filename))
938 except UnknownFormatError, format:
939 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
941 except ParseChangesError, line:
942 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
945 # Enforce mandatory fields
946 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
947 if not self.pkg.dsc.has_key(i):
948 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
951 # Validate the source and version fields
952 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
953 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
954 if not re_valid_version.match(self.pkg.dsc["version"]):
955 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
957 # Only a limited list of source formats are allowed in each suite
958 for dist in self.pkg.changes["distribution"].keys():
959 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
960 if self.pkg.dsc["format"] not in allowed:
961 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
963 # Validate the Maintainer field
965 # We ignore the return value
966 fix_maintainer(self.pkg.dsc["maintainer"])
967 except ParseMaintError, msg:
968 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
969 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
971 # Validate the build-depends field(s)
972 for field_name in [ "build-depends", "build-depends-indep" ]:
973 field = self.pkg.dsc.get(field_name)
975 # Have apt try to parse them...
977 apt_pkg.ParseSrcDepends(field)
979 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
981 # Ensure the version number in the .dsc matches the version number in the .changes
982 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
983 changes_version = self.pkg.files[dsc_filename]["version"]
985 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
986 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
988 # Ensure the Files field contain only what's expected
989 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
991 # Ensure source is newer than existing source in target suites
992 session = DBConn().session()
993 self.check_source_against_db(dsc_filename, session)
994 self.check_dsc_against_db(dsc_filename, session)
999 ###########################################################################
1001 def get_changelog_versions(self, source_dir):
1002 """Extracts a the source package and (optionally) grabs the
1003 version history out of debian/changelog for the BTS."""
1007 # Find the .dsc (again)
1009 for f in self.pkg.files.keys():
1010 if self.pkg.files[f]["type"] == "dsc":
1013 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1014 if not dsc_filename:
1017 # Create a symlink mirror of the source files in our temporary directory
1018 for f in self.pkg.files.keys():
1019 m = re_issource.match(f)
1021 src = os.path.join(source_dir, f)
1022 # If a file is missing for whatever reason, give up.
1023 if not os.path.exists(src):
1026 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1027 self.pkg.orig_files[f].has_key("path"):
1029 dest = os.path.join(os.getcwd(), f)
1030 os.symlink(src, dest)
1032 # If the orig files are not a part of the upload, create symlinks to the
1034 for orig_file in self.pkg.orig_files.keys():
1035 if not self.pkg.orig_files[orig_file].has_key("path"):
1037 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1038 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1040 # Extract the source
1041 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1042 (result, output) = commands.getstatusoutput(cmd)
1044 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1045 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1048 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1051 # Get the upstream version
1052 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1053 if re_strip_revision.search(upstr_version):
1054 upstr_version = re_strip_revision.sub('', upstr_version)
1056 # Ensure the changelog file exists
1057 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1058 if not os.path.exists(changelog_filename):
1059 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1062 # Parse the changelog
1063 self.pkg.dsc["bts changelog"] = ""
1064 changelog_file = utils.open_file(changelog_filename)
1065 for line in changelog_file.readlines():
1066 m = re_changelog_versions.match(line)
1068 self.pkg.dsc["bts changelog"] += line
1069 changelog_file.close()
1071 # Check we found at least one revision in the changelog
1072 if not self.pkg.dsc["bts changelog"]:
1073 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1075 def check_source(self):
1077 # a) there's no source
1078 # or c) the orig files are MIA
1079 if not self.pkg.changes["architecture"].has_key("source") \
1080 or len(self.pkg.orig_files) == 0:
1083 tmpdir = utils.temp_dirname()
1085 # Move into the temporary directory
1089 # Get the changelog version history
1090 self.get_changelog_versions(cwd)
1092 # Move back and cleanup the temporary tree
1096 shutil.rmtree(tmpdir)
1098 if e.errno != errno.EACCES:
1100 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1102 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1103 # We probably have u-r or u-w directories so chmod everything
1105 cmd = "chmod -R u+rwx %s" % (tmpdir)
1106 result = os.system(cmd)
1108 utils.fubar("'%s' failed with result %s." % (cmd, result))
1109 shutil.rmtree(tmpdir)
1110 except Exception, e:
1111 print "foobar2 (%s)" % e
1112 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1114 ###########################################################################
1115 def ensure_hashes(self):
1116 # Make sure we recognise the format of the Files: field in the .changes
1117 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1118 if len(format) == 2:
1119 format = int(format[0]), int(format[1])
1121 format = int(float(format[0])), 0
1123 # We need to deal with the original changes blob, as the fields we need
1124 # might not be in the changes dict serialised into the .dak anymore.
1125 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1127 # Copy the checksums over to the current changes dict. This will keep
1128 # the existing modifications to it intact.
1129 for field in orig_changes:
1130 if field.startswith('checksums-'):
1131 self.pkg.changes[field] = orig_changes[field]
1133 # Check for unsupported hashes
1134 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1135 self.rejects.append(j)
1137 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1138 self.rejects.append(j)
1140 # We have to calculate the hash if we have an earlier changes version than
1141 # the hash appears in rather than require it exist in the changes file
1142 for hashname, hashfunc, version in utils.known_hashes:
1143 # TODO: Move _ensure_changes_hash into this class
1144 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1145 self.rejects.append(j)
1146 if "source" in self.pkg.changes["architecture"]:
1147 # TODO: Move _ensure_dsc_hash into this class
1148 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1149 self.rejects.append(j)
1151 def check_hashes(self):
1152 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1153 self.rejects.append(m)
1155 for m in utils.check_size(".changes", self.pkg.files):
1156 self.rejects.append(m)
1158 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1159 self.rejects.append(m)
1161 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1162 self.rejects.append(m)
1164 self.ensure_hashes()
1166 ###########################################################################
1168 def ensure_orig(self, target_dir='.', session=None):
1170 Ensures that all orig files mentioned in the changes file are present
1171 in target_dir. If they do not exist, they are symlinked into place.
1173 An list containing the symlinks that were created are returned (so they
1180 for filename, entry in self.pkg.dsc_files.iteritems():
1181 if not re_is_orig_source.match(filename):
1182 # File is not an orig; ignore
1185 if os.path.exists(filename):
1186 # File exists, no need to continue
1189 def symlink_if_valid(path):
1190 f = utils.open_file(path)
1191 md5sum = apt_pkg.md5sum(f)
1194 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1195 expected = (int(entry['size']), entry['md5sum'])
1197 if fingerprint != expected:
1200 dest = os.path.join(target_dir, filename)
1202 os.symlink(path, dest)
1203 symlinked.append(dest)
1209 session_ = DBConn().session()
1214 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1215 poolfile_path = os.path.join(
1216 poolfile.location.path, poolfile.filename
1219 if symlink_if_valid(poolfile_path):
1229 # Look in some other queues for the file
1230 queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1231 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1233 for queue in queues:
1234 if not cnf.get('Dir::Queue::%s' % queue):
1237 queuefile_path = os.path.join(
1238 cnf['Dir::Queue::%s' % queue], filename
1241 if not os.path.exists(queuefile_path):
1242 # Does not exist in this queue
1245 if symlink_if_valid(queuefile_path):
1250 ###########################################################################
1252 def check_lintian(self):
1255 # Don't reject binary uploads
1256 if not self.pkg.changes['architecture'].has_key('source'):
1259 # Only check some distributions
1261 for dist in ('unstable', 'experimental'):
1262 if dist in self.pkg.changes['distribution']:
1269 tagfile = cnf.get("Dinstall::LintianTags")
1271 # We don't have a tagfile, so just don't do anything.
1274 # Parse the yaml file
1275 sourcefile = file(tagfile, 'r')
1276 sourcecontent = sourcefile.read()
1279 lintiantags = yaml.load(sourcecontent)['lintian']
1280 except yaml.YAMLError, msg:
1281 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1284 # Try and find all orig mentioned in the .dsc
1285 symlinked = self.ensure_orig()
1287 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1288 # so put it together like it. We put all types of tags in one file and then sort
1289 # through lintians output later to see if its a fatal tag we detected, or not.
1290 # So we only run lintian once on all tags, even if we might reject on some, but not
1292 # Additionally build up a set of tags
1294 (fd, temp_filename) = utils.temp_filename()
1295 temptagfile = os.fdopen(fd, 'w')
1296 for tagtype in lintiantags:
1297 for tag in lintiantags[tagtype]:
1298 temptagfile.write("%s\n" % tag)
1302 # So now we should look at running lintian at the .changes file, capturing output
1304 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1305 (result, output) = commands.getstatusoutput(command)
1307 # We are done with lintian, remove our tempfile and any symlinks we created
1308 os.unlink(temp_filename)
1309 for symlink in symlinked:
1313 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1314 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1316 if len(output) == 0:
1321 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1323 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1324 # are having a victim for a reject.
1325 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1326 for line in output.split('\n'):
1327 m = re_parse_lintian.match(line)
1332 epackage = m.group(2)
1336 # So lets check if we know the tag at all.
1337 if etag not in tags:
1341 # We know it and it is overriden. Check that override is allowed.
1342 if etag in lintiantags['warning']:
1343 # The tag is overriden, and it is allowed to be overriden.
1344 # Don't add a reject message.
1346 elif etag in lintiantags['error']:
1347 # The tag is overriden - but is not allowed to be
1348 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1349 log("ftpmaster does not allow tag to be overridable", etag)
1351 # Tag is known, it is not overriden, direct reject.
1352 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1353 # Now tell if they *might* override it.
1354 if etag in lintiantags['warning']:
1355 log("auto rejecting", "overridable", etag)
1356 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1358 log("auto rejecting", "not overridable", etag)
1360 ###########################################################################
1361 def check_urgency(self):
1363 if self.pkg.changes["architecture"].has_key("source"):
1364 if not self.pkg.changes.has_key("urgency"):
1365 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1366 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1367 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1368 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1369 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1370 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1372 ###########################################################################
1374 # Sanity check the time stamps of files inside debs.
1375 # [Files in the near future cause ugly warnings and extreme time
1376 # travel can cause errors on extraction]
1378 def check_timestamps(self):
1381 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1382 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1383 tar = TarTime(future_cutoff, past_cutoff)
1385 for filename, entry in self.pkg.files.items():
1386 if entry["type"] == "deb":
1389 deb_file = utils.open_file(filename)
1390 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1393 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1394 except SystemError, e:
1395 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1396 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1399 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1403 future_files = tar.future_files.keys()
1405 num_future_files = len(future_files)
1406 future_file = future_files[0]
1407 future_date = tar.future_files[future_file]
1408 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1409 % (filename, num_future_files, future_file, time.ctime(future_date)))
1411 ancient_files = tar.ancient_files.keys()
1413 num_ancient_files = len(ancient_files)
1414 ancient_file = ancient_files[0]
1415 ancient_date = tar.ancient_files[ancient_file]
1416 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1417 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1419 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1421 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1422 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1424 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1430 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1431 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1432 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1433 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1434 self.pkg.changes["sponsoremail"] = uid_email
1439 ###########################################################################
1440 # check_signed_by_key checks
1441 ###########################################################################
1443 def check_signed_by_key(self):
1444 """Ensure the .changes is signed by an authorized uploader."""
1445 session = DBConn().session()
1447 # First of all we check that the person has proper upload permissions
1448 # and that this upload isn't blocked
1449 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1452 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1455 # TODO: Check that import-keyring adds UIDs properly
1457 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1460 # Check that the fingerprint which uploaded has permission to do so
1461 self.check_upload_permissions(fpr, session)
1463 # Check that this package is not in a transition
1464 self.check_transition(session)
1469 def check_upload_permissions(self, fpr, session):
1470 # Check any one-off upload blocks
1471 self.check_upload_blocks(fpr, session)
1473 # Start with DM as a special case
1474 # DM is a special case unfortunately, so we check it first
1475 # (keys with no source access get more access than DMs in one
1476 # way; DMs can only upload for their packages whether source
1477 # or binary, whereas keys with no access might be able to
1478 # upload some binaries)
1479 if fpr.source_acl.access_level == 'dm':
1480 self.check_dm_upload(fpr, session)
1482 # Check source-based permissions for other types
1483 if self.pkg.changes["architecture"].has_key("source"):
1484 if fpr.source_acl.access_level is None:
1485 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1486 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1487 self.rejects.append(rej)
1490 # If not a DM, we allow full upload rights
1491 uid_email = "%s@debian.org" % (fpr.uid.uid)
1492 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1495 # Check binary upload permissions
1496 # By this point we know that DMs can't have got here unless they
1497 # are allowed to deal with the package concerned so just apply
1499 if fpr.binary_acl.access_level == 'full':
1502 # Otherwise we're in the map case
1503 tmparches = self.pkg.changes["architecture"].copy()
1504 tmparches.pop('source', None)
1506 for bam in fpr.binary_acl_map:
1507 tmparches.pop(bam.architecture.arch_string, None)
1509 if len(tmparches.keys()) > 0:
1510 if fpr.binary_reject:
1511 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1512 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1513 self.rejects.append(rej)
1515 # TODO: This is where we'll implement reject vs throw away binaries later
1516 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1517 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1518 rej += "\nFingerprint: %s", (fpr.fingerprint)
1519 self.rejects.append(rej)
1522 def check_upload_blocks(self, fpr, session):
1523 """Check whether any upload blocks apply to this source, source
1524 version, uid / fpr combination"""
1526 def block_rej_template(fb):
1527 rej = 'Manual upload block in place for package %s' % fb.source
1528 if fb.version is not None:
1529 rej += ', version %s' % fb.version
1532 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1533 # version is None if the block applies to all versions
1534 if fb.version is None or fb.version == self.pkg.changes['version']:
1535 # Check both fpr and uid - either is enough to cause a reject
1536 if fb.fpr is not None:
1537 if fb.fpr.fingerprint == fpr.fingerprint:
1538 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1539 if fb.uid is not None:
1540 if fb.uid == fpr.uid:
1541 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1544 def check_dm_upload(self, fpr, session):
1545 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1546 ## none of the uploaded packages are NEW
1548 for f in self.pkg.files.keys():
1549 if self.pkg.files[f].has_key("byhand"):
1550 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1552 if self.pkg.files[f].has_key("new"):
1553 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1559 ## the most recent version of the package uploaded to unstable or
1560 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1561 ## section of its control file
1562 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1563 q = q.join(SrcAssociation)
1564 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1565 q = q.order_by(desc('source.version')).limit(1)
1570 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1571 self.rejects.append(rej)
1575 if not r.dm_upload_allowed:
1576 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1577 self.rejects.append(rej)
1580 ## the Maintainer: field of the uploaded .changes file corresponds with
1581 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1583 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1584 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1586 ## the most recent version of the package uploaded to unstable or
1587 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1588 ## non-developer maintainers cannot NMU or hijack packages)
1590 # srcuploaders includes the maintainer
1592 for sup in r.srcuploaders:
1593 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1594 # Eww - I hope we never have two people with the same name in Debian
1595 if email == fpr.uid.uid or name == fpr.uid.name:
1600 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1603 ## none of the packages are being taken over from other source packages
1604 for b in self.pkg.changes["binary"].keys():
1605 for suite in self.pkg.changes["distribution"].keys():
1606 q = session.query(DBSource)
1607 q = q.join(DBBinary).filter_by(package=b)
1608 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1611 if s.source != self.pkg.changes["source"]:
1612 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1616 def check_transition(self, session):
1619 sourcepkg = self.pkg.changes["source"]
1621 # No sourceful upload -> no need to do anything else, direct return
1622 # We also work with unstable uploads, not experimental or those going to some
1623 # proposed-updates queue
1624 if "source" not in self.pkg.changes["architecture"] or \
1625 "unstable" not in self.pkg.changes["distribution"]:
1628 # Also only check if there is a file defined (and existant) with
1630 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1631 if transpath == "" or not os.path.exists(transpath):
1634 # Parse the yaml file
1635 sourcefile = file(transpath, 'r')
1636 sourcecontent = sourcefile.read()
1638 transitions = yaml.load(sourcecontent)
1639 except yaml.YAMLError, msg:
1640 # This shouldn't happen, there is a wrapper to edit the file which
1641 # checks it, but we prefer to be safe than ending up rejecting
1643 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1646 # Now look through all defined transitions
1647 for trans in transitions:
1648 t = transitions[trans]
1649 source = t["source"]
1652 # Will be None if nothing is in testing.
1653 current = get_source_in_suite(source, "testing", session)
1654 if current is not None:
1655 compare = apt_pkg.VersionCompare(current.version, expected)
1657 if current is None or compare < 0:
1658 # This is still valid, the current version in testing is older than
1659 # the new version we wait for, or there is none in testing yet
1661 # Check if the source we look at is affected by this.
1662 if sourcepkg in t['packages']:
1663 # The source is affected, lets reject it.
1665 rejectmsg = "%s: part of the %s transition.\n\n" % (
1668 if current is not None:
1669 currentlymsg = "at version %s" % (current.version)
1671 currentlymsg = "not present in testing"
1673 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1675 rejectmsg += "\n".join(textwrap.wrap("""Your package
1676 is part of a testing transition designed to get %s migrated (it is
1677 currently %s, we need version %s). This transition is managed by the
1678 Release Team, and %s is the Release-Team member responsible for it.
1679 Please mail debian-release@lists.debian.org or contact %s directly if you
1680 need further assistance. You might want to upload to experimental until this
1681 transition is done."""
1682 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1684 self.rejects.append(rejectmsg)
1687 ###########################################################################
1688 # End check_signed_by_key checks
1689 ###########################################################################
1691 def build_summaries(self):
1692 """ Build a summary of changes the upload introduces. """
1694 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1696 short_summary = summary
1698 # This is for direport's benefit...
1699 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1702 summary += "Changes: " + f
1704 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1706 summary += self.announce(short_summary, 0)
1708 return (summary, short_summary)
1710 ###########################################################################
1712 def close_bugs(self, summary, action):
1714 Send mail to close bugs as instructed by the closes field in the changes file.
1715 Also add a line to summary if any work was done.
1717 @type summary: string
1718 @param summary: summary text, as given by L{build_summaries}
1721 @param action: Set to false no real action will be done.
1724 @return: summary. If action was taken, extended by the list of closed bugs.
1728 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1730 bugs = self.pkg.changes["closes"].keys()
1736 summary += "Closing bugs: "
1738 summary += "%s " % (bug)
1741 self.Subst["__BUG_NUMBER__"] = bug
1742 if self.pkg.changes["distribution"].has_key("stable"):
1743 self.Subst["__STABLE_WARNING__"] = """
1744 Note that this package is not part of the released stable Debian
1745 distribution. It may have dependencies on other unreleased software,
1746 or other instabilities. Please take care if you wish to install it.
1747 The update will eventually make its way into the next released Debian
1750 self.Subst["__STABLE_WARNING__"] = ""
1751 mail_message = utils.TemplateSubst(self.Subst, template)
1752 utils.send_mail(mail_message)
1754 # Clear up after ourselves
1755 del self.Subst["__BUG_NUMBER__"]
1756 del self.Subst["__STABLE_WARNING__"]
1758 if action and self.logger:
1759 self.logger.log(["closing bugs"] + bugs)
1765 ###########################################################################
1767 def announce(self, short_summary, action):
1769 Send an announce mail about a new upload.
1771 @type short_summary: string
1772 @param short_summary: Short summary text to include in the mail
1775 @param action: Set to false no real action will be done.
1778 @return: Textstring about action taken.
1783 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1785 # Only do announcements for source uploads with a recent dpkg-dev installed
1786 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1787 self.pkg.changes["architecture"].has_key("source"):
1793 self.Subst["__SHORT_SUMMARY__"] = short_summary
1795 for dist in self.pkg.changes["distribution"].keys():
1796 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1797 if announce_list == "" or lists_done.has_key(announce_list):
1800 lists_done[announce_list] = 1
1801 summary += "Announcing to %s\n" % (announce_list)
1805 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1806 if cnf.get("Dinstall::TrackingServer") and \
1807 self.pkg.changes["architecture"].has_key("source"):
1808 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1809 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1811 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1812 utils.send_mail(mail_message)
1814 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1816 if cnf.FindB("Dinstall::CloseBugs"):
1817 summary = self.close_bugs(summary, action)
1819 del self.Subst["__SHORT_SUMMARY__"]
1823 ###########################################################################
1825 def accept (self, summary, short_summary, session):
1829 This moves all files referenced from the .changes into the pool,
1830 sends the accepted mail, announces to lists, closes bugs and
1831 also checks for override disparities. If enabled it will write out
1832 the version history for the BTS Version Tracking and will finally call
1835 @type summary: string
1836 @param summary: Summary text
1838 @type short_summary: string
1839 @param short_summary: Short summary
1843 stats = SummaryStats()
1846 self.logger.log(["installing changes", self.pkg.changes_file])
1848 # Add the .dsc file to the DB first
1849 for newfile, entry in self.pkg.files.items():
1850 if entry["type"] == "dsc":
1851 dsc_component, dsc_location_id = add_dsc_to_db(self, newfile, session)
1853 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1854 for newfile, entry in self.pkg.files.items():
1855 if entry["type"] == "deb":
1856 add_deb_to_db(self, newfile, session)
1858 # If this is a sourceful diff only upload that is moving
1859 # cross-component we need to copy the .orig files into the new
1860 # component too for the same reasons as above.
1861 if self.pkg.changes["architecture"].has_key("source"):
1862 for orig_file in self.pkg.orig_files.keys():
1863 if not self.pkg.orig_files[orig_file].has_key("id"):
1864 continue # Skip if it's not in the pool
1865 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1866 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1867 continue # Skip if the location didn't change
1870 oldf = get_poolfile_by_id(orig_file_id, session)
1871 old_filename = os.path.join(oldf.location.path, oldf.filename)
1872 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1873 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1875 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1877 # TODO: Care about size/md5sum collisions etc
1878 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
1881 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1882 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1884 # TODO: Check that there's only 1 here
1885 source = get_sources_from_name(self.pkg.changes["source"], self.pkg.changes["version"])[0]
1886 dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
1887 dscf.poolfile_id = newf.file_id
1891 # Install the files into the pool
1892 for newfile, entry in self.pkg.files.items():
1893 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1894 utils.move(newfile, destination)
1895 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1896 stats.accept_bytes += float(entry["size"])
1898 # Copy the .changes file across for suite which need it.
1900 for suite_name in self.pkg.changes["distribution"].keys():
1901 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1902 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1904 for dest in copy_changes.keys():
1905 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1907 # We're done - commit the database changes
1909 # Our SQL session will automatically start a new transaction after
1912 # Move the .changes into the 'done' directory
1913 utils.move(self.pkg.changes_file,
1914 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1916 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1917 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1919 # Send accept mail, announce to lists, close bugs and check for
1920 # override disparities
1921 if not cnf["Dinstall::Options::No-Mail"]:
1923 self.Subst["__SUITE__"] = ""
1924 self.Subst["__SUMMARY__"] = summary
1925 mail_message = utils.TemplateSubst(self.Subst,
1926 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1927 utils.send_mail(mail_message)
1928 self.announce(short_summary, 1)
1930 ## Helper stuff for DebBugs Version Tracking
1931 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1932 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1933 # the conditionalization on dsc["bts changelog"] should be
1936 # Write out the version history from the changelog
1937 if self.pkg.changes["architecture"].has_key("source") and \
1938 self.pkg.dsc.has_key("bts changelog"):
1940 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1941 version_history = os.fdopen(fd, 'w')
1942 version_history.write(self.pkg.dsc["bts changelog"])
1943 version_history.close()
1944 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1945 self.pkg.changes_file[:-8]+".versions")
1946 os.rename(temp_filename, filename)
1947 os.chmod(filename, 0644)
1949 # Write out the binary -> source mapping.
1950 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1951 debinfo = os.fdopen(fd, 'w')
1952 for name, entry in sorted(self.pkg.files.items()):
1953 if entry["type"] == "deb":
1954 line = " ".join([entry["package"], entry["version"],
1955 entry["architecture"], entry["source package"],
1956 entry["source version"]])
1957 debinfo.write(line+"\n")
1959 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1960 self.pkg.changes_file[:-8]+".debinfo")
1961 os.rename(temp_filename, filename)
1962 os.chmod(filename, 0644)
1965 # res = get_or_set_queue('buildd', session).autobuild_upload(self.pkg, session)
1968 # now_date = datetime.now()
1973 stats.accept_count += 1
1975 def check_override(self):
1977 Checks override entries for validity. Mails "Override disparity" warnings,
1978 if that feature is enabled.
1980 Abandons the check if
1981 - override disparity checks are disabled
1982 - mail sending is disabled
1987 # Abandon the check if:
1988 # a) override disparity checks have been disabled
1989 # b) we're not sending mail
1990 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1991 cnf["Dinstall::Options::No-Mail"]:
1994 summary = self.pkg.check_override()
1999 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2002 self.Subst["__SUMMARY__"] = summary
2003 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2004 utils.send_mail(mail_message)
2005 del self.Subst["__SUMMARY__"]
2007 ###########################################################################
2009 def remove(self, from_dir=None):
2011 Used (for instance) in p-u to remove the package from unchecked
2013 Also removes the package from holding area.
2015 if from_dir is None:
2016 from_dir = self.pkg.directory
2019 for f in self.pkg.files.keys():
2020 os.unlink(os.path.join(from_dir, f))
2021 if os.path.exists(os.path.join(h.holding_dir, f)):
2022 os.unlink(os.path.join(h.holding_dir, f))
2024 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2025 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2026 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2028 ###########################################################################
2030 def move_to_dir (self, dest, perms=0660, changesperms=0664):
2032 Move files to dest with certain perms/changesperms
2035 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2036 dest, perms=changesperms)
2037 for f in self.pkg.files.keys():
2038 utils.move(os.path.join(h.holding_dir, f), dest, perms=perms)
2040 ###########################################################################
2042 def force_reject(self, reject_files):
2044 Forcefully move files from the current directory to the
2045 reject directory. If any file already exists in the reject
2046 directory it will be moved to the morgue to make way for
2050 @param files: file dictionary
2056 for file_entry in reject_files:
2057 # Skip any files which don't exist or which we don't have permission to copy.
2058 if os.access(file_entry, os.R_OK) == 0:
2061 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2064 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2066 # File exists? Let's try and move it to the morgue
2067 if e.errno == errno.EEXIST:
2068 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2070 morgue_file = utils.find_next_free(morgue_file)
2071 except NoFreeFilenameError:
2072 # Something's either gone badly Pete Tong, or
2073 # someone is trying to exploit us.
2074 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2076 utils.move(dest_file, morgue_file, perms=0660)
2078 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2081 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2085 # If we got here, we own the destination file, so we can
2086 # safely overwrite it.
2087 utils.move(file_entry, dest_file, 1, perms=0660)
2090 ###########################################################################
2091 def do_reject (self, manual=0, reject_message="", note=""):
2093 Reject an upload. If called without a reject message or C{manual} is
2094 true, spawn an editor so the user can write one.
2097 @param manual: manual or automated rejection
2099 @type reject_message: string
2100 @param reject_message: A reject message
2105 # If we weren't given a manual rejection message, spawn an
2106 # editor so the user can add one in...
2107 if manual and not reject_message:
2108 (fd, temp_filename) = utils.temp_filename()
2109 temp_file = os.fdopen(fd, 'w')
2112 temp_file.write(line)
2114 editor = os.environ.get("EDITOR","vi")
2116 while answer == 'E':
2117 os.system("%s %s" % (editor, temp_filename))
2118 temp_fh = utils.open_file(temp_filename)
2119 reject_message = "".join(temp_fh.readlines())
2121 print "Reject message:"
2122 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2123 prompt = "[R]eject, Edit, Abandon, Quit ?"
2125 while prompt.find(answer) == -1:
2126 answer = utils.our_raw_input(prompt)
2127 m = re_default_answer.search(prompt)
2130 answer = answer[:1].upper()
2131 os.unlink(temp_filename)
2137 print "Rejecting.\n"
2141 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2142 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2144 # Move all the files into the reject directory
2145 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2146 self.force_reject(reject_files)
2148 # If we fail here someone is probably trying to exploit the race
2149 # so let's just raise an exception ...
2150 if os.path.exists(reason_filename):
2151 os.unlink(reason_filename)
2152 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2154 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2158 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2159 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2160 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2161 os.write(reason_fd, reject_message)
2162 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2164 # Build up the rejection email
2165 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2166 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2167 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2168 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2169 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2170 # Write the rejection email out as the <foo>.reason file
2171 os.write(reason_fd, reject_mail_message)
2173 del self.Subst["__REJECTOR_ADDRESS__"]
2174 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2175 del self.Subst["__CC__"]
2179 # Send the rejection mail if appropriate
2180 if not cnf["Dinstall::Options::No-Mail"]:
2181 utils.send_mail(reject_mail_message)
2184 self.logger.log(["rejected", self.pkg.changes_file])
2188 ################################################################################
2189 def in_override_p(self, package, component, suite, binary_type, filename, session):
2191 Check if a package already has override entries in the DB
2193 @type package: string
2194 @param package: package name
2196 @type component: string
2197 @param component: database id of the component
2200 @param suite: database id of the suite
2202 @type binary_type: string
2203 @param binary_type: type of the package
2205 @type filename: string
2206 @param filename: filename we check
2208 @return: the database result. But noone cares anyway.
2214 if binary_type == "": # must be source
2217 file_type = binary_type
2219 # Override suite name; used for example with proposed-updates
2220 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2221 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2223 result = get_override(package, suite, component, file_type, session)
2225 # If checking for a source package fall back on the binary override type
2226 if file_type == "dsc" and len(result) < 1:
2227 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2229 # Remember the section and priority so we can check them later if appropriate
2232 self.pkg.files[filename]["override section"] = result.section.section
2233 self.pkg.files[filename]["override priority"] = result.priority.priority
2238 ################################################################################
2239 def get_anyversion(self, sv_list, suite):
2242 @param sv_list: list of (suite, version) tuples to check
2245 @param suite: suite name
2251 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2252 for (s, v) in sv_list:
2253 if s in [ x.lower() for x in anysuite ]:
2254 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2259 ################################################################################
2261 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2264 @param sv_list: list of (suite, version) tuples to check
2266 @type filename: string
2267 @param filename: XXX
2269 @type new_version: string
2270 @param new_version: XXX
2272 Ensure versions are newer than existing packages in target
2273 suites and that cross-suite version checking rules as
2274 set out in the conf file are satisfied.
2279 # Check versions for each target suite
2280 for target_suite in self.pkg.changes["distribution"].keys():
2281 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2282 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2284 # Enforce "must be newer than target suite" even if conffile omits it
2285 if target_suite not in must_be_newer_than:
2286 must_be_newer_than.append(target_suite)
2288 for (suite, existent_version) in sv_list:
2289 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2291 if suite in must_be_newer_than and sourceful and vercmp < 1:
2292 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2294 if suite in must_be_older_than and vercmp > -1:
2297 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2298 # we really use the other suite, ignoring the conflicting one ...
2299 addsuite = self.pkg.changes["distribution-version"][suite]
2301 add_version = self.get_anyversion(sv_list, addsuite)
2302 target_version = self.get_anyversion(sv_list, target_suite)
2305 # not add_version can only happen if we map to a suite
2306 # that doesn't enhance the suite we're propup'ing from.
2307 # so "propup-ver x a b c; map a d" is a problem only if
2308 # d doesn't enhance a.
2310 # i think we could always propagate in this case, rather
2311 # than complaining. either way, this isn't a REJECT issue
2313 # And - we really should complain to the dorks who configured dak
2314 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2315 self.pkg.changes.setdefault("propdistribution", {})
2316 self.pkg.changes["propdistribution"][addsuite] = 1
2318 elif not target_version:
2319 # not targets_version is true when the package is NEW
2320 # we could just stick with the "...old version..." REJECT
2321 # for this, I think.
2322 self.rejects.append("Won't propogate NEW packages.")
2323 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2324 # propogation would be redundant. no need to reject though.
2325 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2327 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2328 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2330 self.warnings.append("Propogating upload to %s" % (addsuite))
2331 self.pkg.changes.setdefault("propdistribution", {})
2332 self.pkg.changes["propdistribution"][addsuite] = 1
2336 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2338 ################################################################################
2339 def check_binary_against_db(self, filename, session):
2340 # Ensure version is sane
2341 q = session.query(BinAssociation)
2342 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2343 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2345 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2346 filename, self.pkg.files[filename]["version"], sourceful=False)
2348 # Check for any existing copies of the file
2349 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2350 q = q.filter_by(version=self.pkg.files[filename]["version"])
2351 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2354 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2356 ################################################################################
2358 def check_source_against_db(self, filename, session):
2361 source = self.pkg.dsc.get("source")
2362 version = self.pkg.dsc.get("version")
2364 # Ensure version is sane
2365 q = session.query(SrcAssociation)
2366 q = q.join(DBSource).filter(DBSource.source==source)
2368 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2369 filename, version, sourceful=True)
2371 ################################################################################
2372 def check_dsc_against_db(self, filename, session):
2375 @warning: NB: this function can remove entries from the 'files' index [if
2376 the orig tarball is a duplicate of the one in the archive]; if
2377 you're iterating over 'files' and call this function as part of
2378 the loop, be sure to add a check to the top of the loop to
2379 ensure you haven't just tried to dereference the deleted entry.
2384 self.pkg.orig_files = {} # XXX: do we need to clear it?
2385 orig_files = self.pkg.orig_files
2387 # Try and find all files mentioned in the .dsc. This has
2388 # to work harder to cope with the multiple possible
2389 # locations of an .orig.tar.gz.
2390 # The ordering on the select is needed to pick the newest orig
2391 # when it exists in multiple places.
2392 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2394 if self.pkg.files.has_key(dsc_name):
2395 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2396 actual_size = int(self.pkg.files[dsc_name]["size"])
2397 found = "%s in incoming" % (dsc_name)
2399 # Check the file does not already exist in the archive
2400 ql = get_poolfile_like_name(dsc_name, session)
2402 # Strip out anything that isn't '%s' or '/%s$'
2404 if not i.filename.endswith(dsc_name):
2407 # "[dak] has not broken them. [dak] has fixed a
2408 # brokenness. Your crappy hack exploited a bug in
2411 # "(Come on! I thought it was always obvious that
2412 # one just doesn't release different files with
2413 # the same name and version.)"
2414 # -- ajk@ on d-devel@l.d.o
2417 # Ignore exact matches for .orig.tar.gz
2419 if re_is_orig_source.match(dsc_name):
2421 if self.pkg.files.has_key(dsc_name) and \
2422 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2423 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2424 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2425 # TODO: Don't delete the entry, just mark it as not needed
2426 # This would fix the stupidity of changing something we often iterate over
2427 # whilst we're doing it
2428 del self.pkg.files[dsc_name]
2429 if not orig_files.has_key(dsc_name):
2430 orig_files[dsc_name] = {}
2431 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2435 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2437 elif re_is_orig_source.match(dsc_name):
2439 ql = get_poolfile_like_name(dsc_name, session)
2441 # Strip out anything that isn't '%s' or '/%s$'
2442 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2444 if not i.filename.endswith(dsc_name):
2448 # Unfortunately, we may get more than one match here if,
2449 # for example, the package was in potato but had an -sa
2450 # upload in woody. So we need to choose the right one.
2452 # default to something sane in case we don't match any or have only one
2457 old_file = os.path.join(i.location.path, i.filename)
2458 old_file_fh = utils.open_file(old_file)
2459 actual_md5 = apt_pkg.md5sum(old_file_fh)
2461 actual_size = os.stat(old_file)[stat.ST_SIZE]
2462 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2465 old_file = os.path.join(i.location.path, i.filename)
2466 old_file_fh = utils.open_file(old_file)
2467 actual_md5 = apt_pkg.md5sum(old_file_fh)
2469 actual_size = os.stat(old_file)[stat.ST_SIZE]
2471 suite_type = x.location.archive_type
2472 # need this for updating dsc_files in install()
2473 dsc_entry["files id"] = x.file_id
2474 # See install() in process-accepted...
2475 if not orig_files.has_key(dsc_name):
2476 orig_files[dsc_name] = {}
2477 orig_files[dsc_name]["id"] = x.file_id
2478 orig_files[dsc_name]["path"] = old_file
2479 orig_files[dsc_name]["location"] = x.location.location_id
2481 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2482 # Not there? Check the queue directories...
2483 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2484 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2486 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2487 if os.path.exists(in_otherdir):
2488 in_otherdir_fh = utils.open_file(in_otherdir)
2489 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2490 in_otherdir_fh.close()
2491 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2493 if not orig_files.has_key(dsc_name):
2494 orig_files[dsc_name] = {}
2495 orig_files[dsc_name]["path"] = in_otherdir
2498 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2501 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2503 if actual_md5 != dsc_entry["md5sum"]:
2504 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2505 if actual_size != int(dsc_entry["size"]):
2506 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2508 ################################################################################
2509 # This is used by process-new and process-holding to recheck a changes file
2510 # at the time we're running. It mainly wraps various other internal functions
2511 # and is similar to accepted_checks - these should probably be tidied up
2513 def recheck(self, session):
2515 for f in self.pkg.files.keys():
2516 # The .orig.tar.gz can disappear out from under us is it's a
2517 # duplicate of one in the archive.
2518 if not self.pkg.files.has_key(f):
2521 entry = self.pkg.files[f]
2523 # Check that the source still exists
2524 if entry["type"] == "deb":
2525 source_version = entry["source version"]
2526 source_package = entry["source package"]
2527 if not self.pkg.changes["architecture"].has_key("source") \
2528 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2529 source_epochless_version = re_no_epoch.sub('', source_version)
2530 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2532 for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
2533 if cnf.has_key("Dir::Queue::%s" % (q)):
2534 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2537 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2539 # Version and file overwrite checks
2540 if entry["type"] == "deb":
2541 self.check_binary_against_db(f, session)
2542 elif entry["type"] == "dsc":
2543 self.check_source_against_db(f, session)
2544 self.check_dsc_against_db(f, session)
2546 ################################################################################
2547 def accepted_checks(self, overwrite_checks, session):
2548 # Recheck anything that relies on the database; since that's not
2549 # frozen between accept and our run time when called from p-a.
2551 # overwrite_checks is set to False when installing to stable/oldstable
2556 # Find the .dsc (again)
2558 for f in self.pkg.files.keys():
2559 if self.pkg.files[f]["type"] == "dsc":
2562 for checkfile in self.pkg.files.keys():
2563 # The .orig.tar.gz can disappear out from under us is it's a
2564 # duplicate of one in the archive.
2565 if not self.pkg.files.has_key(checkfile):
2568 entry = self.pkg.files[checkfile]
2570 # Check that the source still exists
2571 if entry["type"] == "deb":
2572 source_version = entry["source version"]
2573 source_package = entry["source package"]
2574 if not self.pkg.changes["architecture"].has_key("source") \
2575 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2576 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2578 # Version and file overwrite checks
2579 if overwrite_checks:
2580 if entry["type"] == "deb":
2581 self.check_binary_against_db(checkfile, session)
2582 elif entry["type"] == "dsc":
2583 self.check_source_against_db(checkfile, session)
2584 self.check_dsc_against_db(dsc_filename, session)
2586 # propogate in the case it is in the override tables:
2587 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2588 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2589 propogate[suite] = 1
2591 nopropogate[suite] = 1
2593 for suite in propogate.keys():
2594 if suite in nopropogate:
2596 self.pkg.changes["distribution"][suite] = 1
2598 for checkfile in self.pkg.files.keys():
2599 # Check the package is still in the override tables
2600 for suite in self.pkg.changes["distribution"].keys():
2601 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2602 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2604 ################################################################################
2605 # This is not really a reject, but an unaccept, but since a) the code for
2606 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2607 # extremely rare, for now we'll go with whining at our admin folks...
2609 def do_unaccept(self):
2613 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2614 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2615 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2616 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2617 if cnf.has_key("Dinstall::Bcc"):
2618 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2620 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2622 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2624 # Write the rejection email out as the <foo>.reason file
2625 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2626 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2628 # If we fail here someone is probably trying to exploit the race
2629 # so let's just raise an exception ...
2630 if os.path.exists(reject_filename):
2631 os.unlink(reject_filename)
2633 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2634 os.write(fd, reject_mail_message)
2637 utils.send_mail(reject_mail_message)
2639 del self.Subst["__REJECTOR_ADDRESS__"]
2640 del self.Subst["__REJECT_MESSAGE__"]
2641 del self.Subst["__CC__"]
2643 ################################################################################
2644 # If any file of an upload has a recent mtime then chances are good
2645 # the file is still being uploaded.
2647 def upload_too_new(self):
2650 # Move back to the original directory to get accurate time stamps
2652 os.chdir(self.pkg.directory)
2653 file_list = self.pkg.files.keys()
2654 file_list.extend(self.pkg.dsc_files.keys())
2655 file_list.append(self.pkg.changes_file)
2658 last_modified = time.time()-os.path.getmtime(f)
2659 if last_modified < int(cnf["Dinstall::SkipTime"]):