5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
81 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
83 # Validate the override type
84 type_id = get_override_type(file_type, session)
86 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
90 ################################################################################
92 # Determine what parts in a .changes are NEW
94 def determine_new(changes, files, warn=1):
96 Determine what parts in a C{changes} file are NEW.
98 @type changes: Upload.Pkg.changes dict
99 @param changes: Changes dictionary
101 @type files: Upload.Pkg.files dict
102 @param files: Files dictionary
105 @param warn: Warn if overrides are added for (old)stable
108 @return: dictionary of NEW components.
113 session = DBConn().session()
115 # Build up a list of potentially new things
116 for name, f in files.items():
117 # Skip byhand elements
118 # if f["type"] == "byhand":
121 priority = f["priority"]
122 section = f["section"]
123 file_type = get_type(f, session)
124 component = f["component"]
126 if file_type == "dsc":
129 if not new.has_key(pkg):
131 new[pkg]["priority"] = priority
132 new[pkg]["section"] = section
133 new[pkg]["type"] = file_type
134 new[pkg]["component"] = component
135 new[pkg]["files"] = []
137 old_type = new[pkg]["type"]
138 if old_type != file_type:
139 # source gets trumped by deb or udeb
140 if old_type == "dsc":
141 new[pkg]["priority"] = priority
142 new[pkg]["section"] = section
143 new[pkg]["type"] = file_type
144 new[pkg]["component"] = component
146 new[pkg]["files"].append(name)
148 if f.has_key("othercomponents"):
149 new[pkg]["othercomponents"] = f["othercomponents"]
151 # Fix up the list of target suites
153 for suite in changes["suite"].keys():
154 override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
156 (olderr, newerr) = (get_suite(suite, session) == None,
157 get_suite(override, session) == None)
159 (oinv, newinv) = ("", "")
160 if olderr: oinv = "invalid "
161 if newerr: ninv = "invalid "
162 print "warning: overriding %ssuite %s to %ssuite %s" % (
163 oinv, suite, ninv, override)
164 del changes["suite"][suite]
165 changes["suite"][override] = 1
167 for suite in changes["suite"].keys():
168 for pkg in new.keys():
169 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
171 for file_entry in new[pkg]["files"]:
172 if files[file_entry].has_key("new"):
173 del files[file_entry]["new"]
177 for s in ['stable', 'oldstable']:
178 if changes["suite"].has_key(s):
179 print "WARNING: overrides will be added for %s!" % s
180 for pkg in new.keys():
181 if new[pkg].has_key("othercomponents"):
182 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
188 ################################################################################
190 def check_valid(new):
192 Check if section and priority for NEW packages exist in database.
193 Additionally does sanity checks:
194 - debian-installer packages have to be udeb (or source)
195 - non debian-installer packages can not be udeb
196 - source priority can only be assigned to dsc file types
199 @param new: Dict of new packages with their section, priority and type.
202 for pkg in new.keys():
203 section_name = new[pkg]["section"]
204 priority_name = new[pkg]["priority"]
205 file_type = new[pkg]["type"]
207 section = get_section(section_name)
209 new[pkg]["section id"] = -1
211 new[pkg]["section id"] = section.section_id
213 priority = get_priority(priority_name)
215 new[pkg]["priority id"] = -1
217 new[pkg]["priority id"] = priority.priority_id
220 di = section_name.find("debian-installer") != -1
222 # If d-i, we must be udeb and vice-versa
223 if (di and file_type not in ("udeb", "dsc")) or \
224 (not di and file_type == "udeb"):
225 new[pkg]["section id"] = -1
227 # If dsc we need to be source and vice-versa
228 if (priority == "source" and file_type != "dsc") or \
229 (priority != "source" and file_type == "dsc"):
230 new[pkg]["priority id"] = -1
232 ###############################################################################
234 def check_status(files):
236 for f in files.keys():
237 if files[f].has_key("byhand"):
239 elif files[f].has_key("new"):
243 ###############################################################################
245 # Used by Upload.check_timestamps
246 class TarTime(object):
247 def __init__(self, future_cutoff, past_cutoff):
249 self.future_cutoff = future_cutoff
250 self.past_cutoff = past_cutoff
253 self.future_files = {}
254 self.ancient_files = {}
256 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
257 if MTime > self.future_cutoff:
258 self.future_files[Name] = MTime
259 if MTime < self.past_cutoff:
260 self.ancient_files[Name] = MTime
262 ###############################################################################
264 class Upload(object):
266 Everything that has to do with an upload processed.
274 ###########################################################################
277 """ Reset a number of internal variables."""
279 # Initialize the substitution template map
282 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
283 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
284 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
285 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
291 self.later_check_files = []
295 def package_info(self):
297 Format various messages from this Upload to send to the maintainer.
301 ('Reject Reasons', self.rejects),
302 ('Warnings', self.warnings),
303 ('Notes', self.notes),
307 for title, messages in msgs:
309 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
314 ###########################################################################
315 def update_subst(self):
316 """ Set up the per-package template substitution mappings """
320 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
321 if not self.pkg.changes.has_key("architecture") or not \
322 isinstance(self.pkg.changes["architecture"], dict):
323 self.pkg.changes["architecture"] = { "Unknown" : "" }
325 # and maintainer2047 may not exist.
326 if not self.pkg.changes.has_key("maintainer2047"):
327 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
329 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
330 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
331 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
333 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
334 if self.pkg.changes["architecture"].has_key("source") and \
335 self.pkg.changes["changedby822"] != "" and \
336 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
338 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
339 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
340 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
342 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
343 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
344 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
346 if "sponsoremail" in self.pkg.changes:
347 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
349 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
350 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
352 # Apply any global override of the Maintainer field
353 if cnf.get("Dinstall::OverrideMaintainer"):
354 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
355 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
357 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
358 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
359 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
361 ###########################################################################
362 def load_changes(self, filename):
364 Load a changes file and setup a dictionary around it. Also checks for mandantory
368 @param: Changes filename, full path.
371 @return: whether the changes file was valid or not. We may want to
372 reject even if this is True (see what gets put in self.rejects).
373 This is simply to prevent us even trying things later which will
374 fail because we couldn't properly parse the file.
377 self.pkg.changes_file = filename
379 # Parse the .changes field into a dictionary
381 self.pkg.changes.update(parse_changes(filename))
382 except CantOpenError:
383 self.rejects.append("%s: can't read file." % (filename))
385 except ParseChangesError, line:
386 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
388 except ChangesUnicodeError:
389 self.rejects.append("%s: changes file not proper utf-8" % (filename))
392 # Parse the Files field from the .changes into another dictionary
394 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
395 except ParseChangesError, line:
396 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
398 except UnknownFormatError, format:
399 self.rejects.append("%s: unknown format '%s'." % (filename, format))
402 # Check for mandatory fields
403 for i in ("distribution", "source", "binary", "architecture",
404 "version", "maintainer", "files", "changes", "description"):
405 if not self.pkg.changes.has_key(i):
406 # Avoid undefined errors later
407 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
410 # Strip a source version in brackets from the source field
411 if re_strip_srcver.search(self.pkg.changes["source"]):
412 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
414 # Ensure the source field is a valid package name.
415 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
416 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
418 # Split multi-value fields into a lower-level dictionary
419 for i in ("architecture", "distribution", "binary", "closes"):
420 o = self.pkg.changes.get(i, "")
422 del self.pkg.changes[i]
424 self.pkg.changes[i] = {}
427 self.pkg.changes[i][j] = 1
429 # Fix the Maintainer: field to be RFC822/2047 compatible
431 (self.pkg.changes["maintainer822"],
432 self.pkg.changes["maintainer2047"],
433 self.pkg.changes["maintainername"],
434 self.pkg.changes["maintaineremail"]) = \
435 fix_maintainer (self.pkg.changes["maintainer"])
436 except ParseMaintError, msg:
437 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
438 % (filename, self.pkg.changes["maintainer"], msg))
440 # ...likewise for the Changed-By: field if it exists.
442 (self.pkg.changes["changedby822"],
443 self.pkg.changes["changedby2047"],
444 self.pkg.changes["changedbyname"],
445 self.pkg.changes["changedbyemail"]) = \
446 fix_maintainer (self.pkg.changes.get("changed-by", ""))
447 except ParseMaintError, msg:
448 self.pkg.changes["changedby822"] = ""
449 self.pkg.changes["changedby2047"] = ""
450 self.pkg.changes["changedbyname"] = ""
451 self.pkg.changes["changedbyemail"] = ""
453 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
454 % (filename, self.pkg.changes["changed-by"], msg))
456 # Ensure all the values in Closes: are numbers
457 if self.pkg.changes.has_key("closes"):
458 for i in self.pkg.changes["closes"].keys():
459 if re_isanum.match (i) == None:
460 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
462 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
463 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
464 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
466 # Check the .changes is non-empty
467 if not self.pkg.files:
468 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
471 # Changes was syntactically valid even if we'll reject
474 ###########################################################################
476 def check_distributions(self):
477 "Check and map the Distribution field"
481 # Handle suite mappings
482 for m in Cnf.ValueList("SuiteMappings"):
485 if mtype == "map" or mtype == "silent-map":
486 (source, dest) = args[1:3]
487 if self.pkg.changes["distribution"].has_key(source):
488 del self.pkg.changes["distribution"][source]
489 self.pkg.changes["distribution"][dest] = 1
490 if mtype != "silent-map":
491 self.notes.append("Mapping %s to %s." % (source, dest))
492 if self.pkg.changes.has_key("distribution-version"):
493 if self.pkg.changes["distribution-version"].has_key(source):
494 self.pkg.changes["distribution-version"][source]=dest
495 elif mtype == "map-unreleased":
496 (source, dest) = args[1:3]
497 if self.pkg.changes["distribution"].has_key(source):
498 for arch in self.pkg.changes["architecture"].keys():
499 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
500 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
501 del self.pkg.changes["distribution"][source]
502 self.pkg.changes["distribution"][dest] = 1
504 elif mtype == "ignore":
506 if self.pkg.changes["distribution"].has_key(suite):
507 del self.pkg.changes["distribution"][suite]
508 self.warnings.append("Ignoring %s as a target suite." % (suite))
509 elif mtype == "reject":
511 if self.pkg.changes["distribution"].has_key(suite):
512 self.rejects.append("Uploads to %s are not accepted." % (suite))
513 elif mtype == "propup-version":
514 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
516 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
517 if self.pkg.changes["distribution"].has_key(args[1]):
518 self.pkg.changes.setdefault("distribution-version", {})
519 for suite in args[2:]:
520 self.pkg.changes["distribution-version"][suite] = suite
522 # Ensure there is (still) a target distribution
523 if len(self.pkg.changes["distribution"].keys()) < 1:
524 self.rejects.append("No valid distribution remaining.")
526 # Ensure target distributions exist
527 for suite in self.pkg.changes["distribution"].keys():
528 if not Cnf.has_key("Suite::%s" % (suite)):
529 self.rejects.append("Unknown distribution `%s'." % (suite))
531 ###########################################################################
533 def binary_file_checks(self, f, session):
535 entry = self.pkg.files[f]
537 # Extract package control information
538 deb_file = utils.open_file(f)
540 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
542 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
544 # Can't continue, none of the checks on control would work.
547 # Check for mandantory "Description:"
550 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
552 self.rejects.append("%s: Missing Description in binary package" % (f))
557 # Check for mandatory fields
558 for field in [ "Package", "Architecture", "Version" ]:
559 if control.Find(field) == None:
561 self.rejects.append("%s: No %s field in control." % (f, field))
564 # Ensure the package name matches the one give in the .changes
565 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
566 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
568 # Validate the package field
569 package = control.Find("Package")
570 if not re_valid_pkg_name.match(package):
571 self.rejects.append("%s: invalid package name '%s'." % (f, package))
573 # Validate the version field
574 version = control.Find("Version")
575 if not re_valid_version.match(version):
576 self.rejects.append("%s: invalid version number '%s'." % (f, version))
578 # Ensure the architecture of the .deb is one we know about.
579 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
580 architecture = control.Find("Architecture")
581 upload_suite = self.pkg.changes["distribution"].keys()[0]
583 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
584 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
585 self.rejects.append("Unknown architecture '%s'." % (architecture))
587 # Ensure the architecture of the .deb is one of the ones
588 # listed in the .changes.
589 if not self.pkg.changes["architecture"].has_key(architecture):
590 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
592 # Sanity-check the Depends field
593 depends = control.Find("Depends")
595 self.rejects.append("%s: Depends field is empty." % (f))
597 # Sanity-check the Provides field
598 provides = control.Find("Provides")
600 provide = re_spacestrip.sub('', provides)
602 self.rejects.append("%s: Provides field is empty." % (f))
603 prov_list = provide.split(",")
604 for prov in prov_list:
605 if not re_valid_pkg_name.match(prov):
606 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
608 # Check the section & priority match those given in the .changes (non-fatal)
609 if control.Find("Section") and entry["section"] != "" \
610 and entry["section"] != control.Find("Section"):
611 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
612 (f, control.Find("Section", ""), entry["section"]))
613 if control.Find("Priority") and entry["priority"] != "" \
614 and entry["priority"] != control.Find("Priority"):
615 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
616 (f, control.Find("Priority", ""), entry["priority"]))
618 entry["package"] = package
619 entry["architecture"] = architecture
620 entry["version"] = version
621 entry["maintainer"] = control.Find("Maintainer", "")
623 if f.endswith(".udeb"):
624 self.pkg.files[f]["dbtype"] = "udeb"
625 elif f.endswith(".deb"):
626 self.pkg.files[f]["dbtype"] = "deb"
628 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
630 entry["source"] = control.Find("Source", entry["package"])
632 # Get the source version
633 source = entry["source"]
636 if source.find("(") != -1:
637 m = re_extract_src_version.match(source)
639 source_version = m.group(2)
641 if not source_version:
642 source_version = self.pkg.files[f]["version"]
644 entry["source package"] = source
645 entry["source version"] = source_version
647 # Ensure the filename matches the contents of the .deb
648 m = re_isadeb.match(f)
651 file_package = m.group(1)
652 if entry["package"] != file_package:
653 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
654 (f, file_package, entry["dbtype"], entry["package"]))
655 epochless_version = re_no_epoch.sub('', control.Find("Version"))
658 file_version = m.group(2)
659 if epochless_version != file_version:
660 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
661 (f, file_version, entry["dbtype"], epochless_version))
664 file_architecture = m.group(3)
665 if entry["architecture"] != file_architecture:
666 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
667 (f, file_architecture, entry["dbtype"], entry["architecture"]))
669 # Check for existent source
670 source_version = entry["source version"]
671 source_package = entry["source package"]
672 if self.pkg.changes["architecture"].has_key("source"):
673 if source_version != self.pkg.changes["version"]:
674 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
675 (source_version, f, self.pkg.changes["version"]))
677 # Check in the SQL database
678 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
679 # Check in one of the other directories
680 source_epochless_version = re_no_epoch.sub('', source_version)
681 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
682 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
684 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
687 dsc_file_exists = False
688 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
689 if cnf.has_key("Dir::Queue::%s" % (myq)):
690 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
691 dsc_file_exists = True
694 if not dsc_file_exists:
695 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
697 # Check the version and for file overwrites
698 self.check_binary_against_db(f, session)
700 # Temporarily disable contents generation until we change the table storage layout
703 #if len(b.rejects) > 0:
704 # for j in b.rejects:
705 # self.rejects.append(j)
707 def source_file_checks(self, f, session):
708 entry = self.pkg.files[f]
710 m = re_issource.match(f)
714 entry["package"] = m.group(1)
715 entry["version"] = m.group(2)
716 entry["type"] = m.group(3)
718 # Ensure the source package name matches the Source filed in the .changes
719 if self.pkg.changes["source"] != entry["package"]:
720 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
722 # Ensure the source version matches the version in the .changes file
723 if re_is_orig_source.match(f):
724 changes_version = self.pkg.changes["chopversion2"]
726 changes_version = self.pkg.changes["chopversion"]
728 if changes_version != entry["version"]:
729 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
731 # Ensure the .changes lists source in the Architecture field
732 if not self.pkg.changes["architecture"].has_key("source"):
733 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
735 # Check the signature of a .dsc file
736 if entry["type"] == "dsc":
737 # check_signature returns either:
738 # (None, [list, of, rejects]) or (signature, [])
739 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
741 self.rejects.append(j)
743 entry["architecture"] = "source"
745 def per_suite_file_checks(self, f, suite, session):
747 entry = self.pkg.files[f]
750 if entry.has_key("byhand"):
753 # Check we have fields we need to do these checks
755 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
756 if not entry.has_key(m):
757 self.rejects.append("file '%s' does not have field %s set" % (f, m))
763 # Handle component mappings
764 for m in cnf.ValueList("ComponentMappings"):
765 (source, dest) = m.split()
766 if entry["component"] == source:
767 entry["original component"] = source
768 entry["component"] = dest
770 # Ensure the component is valid for the target suite
771 if cnf.has_key("Suite:%s::Components" % (suite)) and \
772 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
773 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
776 # Validate the component
777 if not get_component(entry["component"], session):
778 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
781 # See if the package is NEW
782 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
785 # Validate the priority
786 if entry["priority"].find('/') != -1:
787 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
789 # Determine the location
790 location = cnf["Dir::Pool"]
791 l = get_location(location, entry["component"], session=session)
793 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
794 entry["location id"] = -1
796 entry["location id"] = l.location_id
798 # Check the md5sum & size against existing files (if any)
799 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
801 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
802 entry["size"], entry["md5sum"], entry["location id"])
805 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
806 elif found is False and poolfile is not None:
807 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
810 entry["files id"] = None
812 entry["files id"] = poolfile.file_id
814 # Check for packages that have moved from one component to another
815 entry['suite'] = suite
816 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
818 entry["othercomponents"] = res.fetchone()[0]
820 def check_files(self, action=True):
821 file_keys = self.pkg.files.keys()
827 os.chdir(self.pkg.directory)
829 ret = holding.copy_to_holding(f)
831 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
835 # check we already know the changes file
836 # [NB: this check must be done post-suite mapping]
837 base_filename = os.path.basename(self.pkg.changes_file)
839 session = DBConn().session()
842 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
843 # if in the pool or in a queue other than unchecked, reject
844 if (dbc.in_queue is None) \
845 or (dbc.in_queue is not None
846 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
847 self.rejects.append("%s file already known to dak" % base_filename)
848 except NoResultFound, e:
855 for f, entry in self.pkg.files.items():
856 # Ensure the file does not already exist in one of the accepted directories
857 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
858 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
859 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
860 self.rejects.append("%s file already exists in the %s directory." % (f, d))
862 if not re_taint_free.match(f):
863 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
865 # Check the file is readable
866 if os.access(f, os.R_OK) == 0:
867 # When running in -n, copy_to_holding() won't have
868 # generated the reject_message, so we need to.
870 if os.path.exists(f):
871 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
873 # Don't directly reject, mark to check later to deal with orig's
874 # we can find in the pool
875 self.later_check_files.append(f)
876 entry["type"] = "unreadable"
879 # If it's byhand skip remaining checks
880 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
882 entry["type"] = "byhand"
884 # Checks for a binary package...
885 elif re_isadeb.match(f):
887 entry["type"] = "deb"
889 # This routine appends to self.rejects/warnings as appropriate
890 self.binary_file_checks(f, session)
892 # Checks for a source package...
893 elif re_issource.match(f):
896 # This routine appends to self.rejects/warnings as appropriate
897 self.source_file_checks(f, session)
899 # Not a binary or source package? Assume byhand...
902 entry["type"] = "byhand"
904 # Per-suite file checks
905 entry["oldfiles"] = {}
906 for suite in self.pkg.changes["distribution"].keys():
907 self.per_suite_file_checks(f, suite, session)
911 # If the .changes file says it has source, it must have source.
912 if self.pkg.changes["architecture"].has_key("source"):
914 self.rejects.append("no source found and Architecture line in changes mention source.")
916 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
917 self.rejects.append("source only uploads are not supported.")
919 ###########################################################################
920 def check_dsc(self, action=True, session=None):
921 """Returns bool indicating whether or not the source changes are valid"""
922 # Ensure there is source to check
923 if not self.pkg.changes["architecture"].has_key("source"):
928 for f, entry in self.pkg.files.items():
929 if entry["type"] == "dsc":
931 self.rejects.append("can not process a .changes file with multiple .dsc's.")
936 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
938 self.rejects.append("source uploads must contain a dsc file")
941 # Parse the .dsc file
943 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
944 except CantOpenError:
945 # if not -n copy_to_holding() will have done this for us...
947 self.rejects.append("%s: can't read file." % (dsc_filename))
948 except ParseChangesError, line:
949 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
950 except InvalidDscError, line:
951 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
952 except ChangesUnicodeError:
953 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
955 # Build up the file list of files mentioned by the .dsc
957 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
958 except NoFilesFieldError:
959 self.rejects.append("%s: no Files: field." % (dsc_filename))
961 except UnknownFormatError, format:
962 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
964 except ParseChangesError, line:
965 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
968 # Enforce mandatory fields
969 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
970 if not self.pkg.dsc.has_key(i):
971 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
974 # Validate the source and version fields
975 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
976 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
977 if not re_valid_version.match(self.pkg.dsc["version"]):
978 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
980 # Only a limited list of source formats are allowed in each suite
981 for dist in self.pkg.changes["distribution"].keys():
982 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
983 if self.pkg.dsc["format"] not in allowed:
984 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
986 # Validate the Maintainer field
988 # We ignore the return value
989 fix_maintainer(self.pkg.dsc["maintainer"])
990 except ParseMaintError, msg:
991 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
992 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
994 # Validate the build-depends field(s)
995 for field_name in [ "build-depends", "build-depends-indep" ]:
996 field = self.pkg.dsc.get(field_name)
998 # Have apt try to parse them...
1000 apt_pkg.ParseSrcDepends(field)
1002 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1004 # Ensure the version number in the .dsc matches the version number in the .changes
1005 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1006 changes_version = self.pkg.files[dsc_filename]["version"]
1008 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1009 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1011 # Ensure the Files field contain only what's expected
1012 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1014 # Ensure source is newer than existing source in target suites
1015 session = DBConn().session()
1016 self.check_source_against_db(dsc_filename, session)
1017 self.check_dsc_against_db(dsc_filename, session)
1020 # Finally, check if we're missing any files
1021 for f in self.later_check_files:
1022 self.rejects.append("Could not find file %s references in changes" % f)
1026 ###########################################################################
1028 def get_changelog_versions(self, source_dir):
1029 """Extracts a the source package and (optionally) grabs the
1030 version history out of debian/changelog for the BTS."""
1034 # Find the .dsc (again)
1036 for f in self.pkg.files.keys():
1037 if self.pkg.files[f]["type"] == "dsc":
1040 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1041 if not dsc_filename:
1044 # Create a symlink mirror of the source files in our temporary directory
1045 for f in self.pkg.files.keys():
1046 m = re_issource.match(f)
1048 src = os.path.join(source_dir, f)
1049 # If a file is missing for whatever reason, give up.
1050 if not os.path.exists(src):
1053 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1054 self.pkg.orig_files[f].has_key("path"):
1056 dest = os.path.join(os.getcwd(), f)
1057 os.symlink(src, dest)
1059 # If the orig files are not a part of the upload, create symlinks to the
1061 for orig_file in self.pkg.orig_files.keys():
1062 if not self.pkg.orig_files[orig_file].has_key("path"):
1064 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1065 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1067 # Extract the source
1068 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1069 (result, output) = commands.getstatusoutput(cmd)
1071 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1072 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1075 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1078 # Get the upstream version
1079 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1080 if re_strip_revision.search(upstr_version):
1081 upstr_version = re_strip_revision.sub('', upstr_version)
1083 # Ensure the changelog file exists
1084 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1085 if not os.path.exists(changelog_filename):
1086 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1089 # Parse the changelog
1090 self.pkg.dsc["bts changelog"] = ""
1091 changelog_file = utils.open_file(changelog_filename)
1092 for line in changelog_file.readlines():
1093 m = re_changelog_versions.match(line)
1095 self.pkg.dsc["bts changelog"] += line
1096 changelog_file.close()
1098 # Check we found at least one revision in the changelog
1099 if not self.pkg.dsc["bts changelog"]:
1100 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1102 def check_source(self):
1104 # a) there's no source
1105 if not self.pkg.changes["architecture"].has_key("source"):
1108 tmpdir = utils.temp_dirname()
1110 # Move into the temporary directory
1114 # Get the changelog version history
1115 self.get_changelog_versions(cwd)
1117 # Move back and cleanup the temporary tree
1121 shutil.rmtree(tmpdir)
1123 if e.errno != errno.EACCES:
1125 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1127 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1128 # We probably have u-r or u-w directories so chmod everything
1130 cmd = "chmod -R u+rwx %s" % (tmpdir)
1131 result = os.system(cmd)
1133 utils.fubar("'%s' failed with result %s." % (cmd, result))
1134 shutil.rmtree(tmpdir)
1135 except Exception, e:
1136 print "foobar2 (%s)" % e
1137 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1139 ###########################################################################
1140 def ensure_hashes(self):
1141 # Make sure we recognise the format of the Files: field in the .changes
1142 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1143 if len(format) == 2:
1144 format = int(format[0]), int(format[1])
1146 format = int(float(format[0])), 0
1148 # We need to deal with the original changes blob, as the fields we need
1149 # might not be in the changes dict serialised into the .dak anymore.
1150 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1152 # Copy the checksums over to the current changes dict. This will keep
1153 # the existing modifications to it intact.
1154 for field in orig_changes:
1155 if field.startswith('checksums-'):
1156 self.pkg.changes[field] = orig_changes[field]
1158 # Check for unsupported hashes
1159 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1160 self.rejects.append(j)
1162 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1163 self.rejects.append(j)
1165 # We have to calculate the hash if we have an earlier changes version than
1166 # the hash appears in rather than require it exist in the changes file
1167 for hashname, hashfunc, version in utils.known_hashes:
1168 # TODO: Move _ensure_changes_hash into this class
1169 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1170 self.rejects.append(j)
1171 if "source" in self.pkg.changes["architecture"]:
1172 # TODO: Move _ensure_dsc_hash into this class
1173 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1174 self.rejects.append(j)
1176 def check_hashes(self):
1177 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1178 self.rejects.append(m)
1180 for m in utils.check_size(".changes", self.pkg.files):
1181 self.rejects.append(m)
1183 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1184 self.rejects.append(m)
1186 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1187 self.rejects.append(m)
1189 self.ensure_hashes()
1191 ###########################################################################
1193 def ensure_orig(self, target_dir='.', session=None):
1195 Ensures that all orig files mentioned in the changes file are present
1196 in target_dir. If they do not exist, they are symlinked into place.
1198 An list containing the symlinks that were created are returned (so they
1205 for filename, entry in self.pkg.dsc_files.iteritems():
1206 if not re_is_orig_source.match(filename):
1207 # File is not an orig; ignore
1210 if os.path.exists(filename):
1211 # File exists, no need to continue
1214 def symlink_if_valid(path):
1215 f = utils.open_file(path)
1216 md5sum = apt_pkg.md5sum(f)
1219 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1220 expected = (int(entry['size']), entry['md5sum'])
1222 if fingerprint != expected:
1225 dest = os.path.join(target_dir, filename)
1227 os.symlink(path, dest)
1228 symlinked.append(dest)
1234 session_ = DBConn().session()
1239 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1240 poolfile_path = os.path.join(
1241 poolfile.location.path, poolfile.filename
1244 if symlink_if_valid(poolfile_path):
1254 # Look in some other queues for the file
1255 queues = ('New', 'Byhand', 'ProposedUpdates',
1256 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1258 for queue in queues:
1259 if not cnf.get('Dir::Queue::%s' % queue):
1262 queuefile_path = os.path.join(
1263 cnf['Dir::Queue::%s' % queue], filename
1266 if not os.path.exists(queuefile_path):
1267 # Does not exist in this queue
1270 if symlink_if_valid(queuefile_path):
1275 ###########################################################################
1277 def check_lintian(self):
1279 Extends self.rejects by checking the output of lintian against tags
1280 specified in Dinstall::LintianTags.
1285 # Don't reject binary uploads
1286 if not self.pkg.changes['architecture'].has_key('source'):
1289 # Only check some distributions
1290 for dist in ('unstable', 'experimental'):
1291 if dist in self.pkg.changes['distribution']:
1296 # If we do not have a tagfile, don't do anything
1297 tagfile = cnf.get("Dinstall::LintianTags")
1301 # Parse the yaml file
1302 sourcefile = file(tagfile, 'r')
1303 sourcecontent = sourcefile.read()
1307 lintiantags = yaml.load(sourcecontent)['lintian']
1308 except yaml.YAMLError, msg:
1309 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1312 # Try and find all orig mentioned in the .dsc
1313 symlinked = self.ensure_orig()
1315 # Setup the input file for lintian
1316 fd, temp_filename = utils.temp_filename()
1317 temptagfile = os.fdopen(fd, 'w')
1318 for tags in lintiantags.values():
1319 temptagfile.writelines(['%s\n' % x for x in tags])
1323 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1324 (temp_filename, self.pkg.changes_file)
1326 result, output = commands.getstatusoutput(cmd)
1328 # Remove our tempfile and any symlinks we created
1329 os.unlink(temp_filename)
1331 for symlink in symlinked:
1335 utils.warn("lintian failed for %s [return code: %s]." % \
1336 (self.pkg.changes_file, result))
1337 utils.warn(utils.prefix_multi_line_string(output, \
1338 " [possible output:] "))
1343 [self.pkg.changes_file, "check_lintian"] + list(txt)
1347 parsed_tags = parse_lintian_output(output)
1348 self.rejects.extend(
1349 generate_reject_messages(parsed_tags, lintiantags, log=log)
1352 ###########################################################################
1353 def check_urgency(self):
1355 if self.pkg.changes["architecture"].has_key("source"):
1356 if not self.pkg.changes.has_key("urgency"):
1357 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1358 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1359 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1360 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1361 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1362 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1364 ###########################################################################
1366 # Sanity check the time stamps of files inside debs.
1367 # [Files in the near future cause ugly warnings and extreme time
1368 # travel can cause errors on extraction]
1370 def check_timestamps(self):
1373 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1374 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1375 tar = TarTime(future_cutoff, past_cutoff)
1377 for filename, entry in self.pkg.files.items():
1378 if entry["type"] == "deb":
1381 deb_file = utils.open_file(filename)
1382 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1385 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1386 except SystemError, e:
1387 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1388 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1391 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1395 future_files = tar.future_files.keys()
1397 num_future_files = len(future_files)
1398 future_file = future_files[0]
1399 future_date = tar.future_files[future_file]
1400 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1401 % (filename, num_future_files, future_file, time.ctime(future_date)))
1403 ancient_files = tar.ancient_files.keys()
1405 num_ancient_files = len(ancient_files)
1406 ancient_file = ancient_files[0]
1407 ancient_date = tar.ancient_files[ancient_file]
1408 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1409 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1411 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1413 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1414 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1416 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1422 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1423 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1424 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1425 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1426 self.pkg.changes["sponsoremail"] = uid_email
1431 ###########################################################################
1432 # check_signed_by_key checks
1433 ###########################################################################
1435 def check_signed_by_key(self):
1436 """Ensure the .changes is signed by an authorized uploader."""
1437 session = DBConn().session()
1439 # First of all we check that the person has proper upload permissions
1440 # and that this upload isn't blocked
1441 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1444 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1447 # TODO: Check that import-keyring adds UIDs properly
1449 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1452 # Check that the fingerprint which uploaded has permission to do so
1453 self.check_upload_permissions(fpr, session)
1455 # Check that this package is not in a transition
1456 self.check_transition(session)
1461 def check_upload_permissions(self, fpr, session):
1462 # Check any one-off upload blocks
1463 self.check_upload_blocks(fpr, session)
1465 # Start with DM as a special case
1466 # DM is a special case unfortunately, so we check it first
1467 # (keys with no source access get more access than DMs in one
1468 # way; DMs can only upload for their packages whether source
1469 # or binary, whereas keys with no access might be able to
1470 # upload some binaries)
1471 if fpr.source_acl.access_level == 'dm':
1472 self.check_dm_upload(fpr, session)
1474 # Check source-based permissions for other types
1475 if self.pkg.changes["architecture"].has_key("source") and \
1476 fpr.source_acl.access_level is None:
1477 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1478 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1479 self.rejects.append(rej)
1481 # If not a DM, we allow full upload rights
1482 uid_email = "%s@debian.org" % (fpr.uid.uid)
1483 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1486 # Check binary upload permissions
1487 # By this point we know that DMs can't have got here unless they
1488 # are allowed to deal with the package concerned so just apply
1490 if fpr.binary_acl.access_level == 'full':
1493 # Otherwise we're in the map case
1494 tmparches = self.pkg.changes["architecture"].copy()
1495 tmparches.pop('source', None)
1497 for bam in fpr.binary_acl_map:
1498 tmparches.pop(bam.architecture.arch_string, None)
1500 if len(tmparches.keys()) > 0:
1501 if fpr.binary_reject:
1502 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1503 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1504 self.rejects.append(rej)
1506 # TODO: This is where we'll implement reject vs throw away binaries later
1507 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1508 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1509 rej += "\nFingerprint: %s", (fpr.fingerprint)
1510 self.rejects.append(rej)
1513 def check_upload_blocks(self, fpr, session):
1514 """Check whether any upload blocks apply to this source, source
1515 version, uid / fpr combination"""
1517 def block_rej_template(fb):
1518 rej = 'Manual upload block in place for package %s' % fb.source
1519 if fb.version is not None:
1520 rej += ', version %s' % fb.version
1523 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1524 # version is None if the block applies to all versions
1525 if fb.version is None or fb.version == self.pkg.changes['version']:
1526 # Check both fpr and uid - either is enough to cause a reject
1527 if fb.fpr is not None:
1528 if fb.fpr.fingerprint == fpr.fingerprint:
1529 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1530 if fb.uid is not None:
1531 if fb.uid == fpr.uid:
1532 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1535 def check_dm_upload(self, fpr, session):
1536 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1537 ## none of the uploaded packages are NEW
1539 for f in self.pkg.files.keys():
1540 if self.pkg.files[f].has_key("byhand"):
1541 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1543 if self.pkg.files[f].has_key("new"):
1544 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1550 ## the most recent version of the package uploaded to unstable or
1551 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1552 ## section of its control file
1553 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1554 q = q.join(SrcAssociation)
1555 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1556 q = q.order_by(desc('source.version')).limit(1)
1561 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1562 self.rejects.append(rej)
1566 if not r.dm_upload_allowed:
1567 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1568 self.rejects.append(rej)
1571 ## the Maintainer: field of the uploaded .changes file corresponds with
1572 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1574 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1575 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1577 ## the most recent version of the package uploaded to unstable or
1578 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1579 ## non-developer maintainers cannot NMU or hijack packages)
1581 # srcuploaders includes the maintainer
1583 for sup in r.srcuploaders:
1584 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1585 # Eww - I hope we never have two people with the same name in Debian
1586 if email == fpr.uid.uid or name == fpr.uid.name:
1591 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1594 ## none of the packages are being taken over from other source packages
1595 for b in self.pkg.changes["binary"].keys():
1596 for suite in self.pkg.changes["distribution"].keys():
1597 q = session.query(DBSource)
1598 q = q.join(DBBinary).filter_by(package=b)
1599 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1602 if s.source != self.pkg.changes["source"]:
1603 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1607 def check_transition(self, session):
1610 sourcepkg = self.pkg.changes["source"]
1612 # No sourceful upload -> no need to do anything else, direct return
1613 # We also work with unstable uploads, not experimental or those going to some
1614 # proposed-updates queue
1615 if "source" not in self.pkg.changes["architecture"] or \
1616 "unstable" not in self.pkg.changes["distribution"]:
1619 # Also only check if there is a file defined (and existant) with
1621 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1622 if transpath == "" or not os.path.exists(transpath):
1625 # Parse the yaml file
1626 sourcefile = file(transpath, 'r')
1627 sourcecontent = sourcefile.read()
1629 transitions = yaml.load(sourcecontent)
1630 except yaml.YAMLError, msg:
1631 # This shouldn't happen, there is a wrapper to edit the file which
1632 # checks it, but we prefer to be safe than ending up rejecting
1634 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1637 # Now look through all defined transitions
1638 for trans in transitions:
1639 t = transitions[trans]
1640 source = t["source"]
1643 # Will be None if nothing is in testing.
1644 current = get_source_in_suite(source, "testing", session)
1645 if current is not None:
1646 compare = apt_pkg.VersionCompare(current.version, expected)
1648 if current is None or compare < 0:
1649 # This is still valid, the current version in testing is older than
1650 # the new version we wait for, or there is none in testing yet
1652 # Check if the source we look at is affected by this.
1653 if sourcepkg in t['packages']:
1654 # The source is affected, lets reject it.
1656 rejectmsg = "%s: part of the %s transition.\n\n" % (
1659 if current is not None:
1660 currentlymsg = "at version %s" % (current.version)
1662 currentlymsg = "not present in testing"
1664 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1666 rejectmsg += "\n".join(textwrap.wrap("""Your package
1667 is part of a testing transition designed to get %s migrated (it is
1668 currently %s, we need version %s). This transition is managed by the
1669 Release Team, and %s is the Release-Team member responsible for it.
1670 Please mail debian-release@lists.debian.org or contact %s directly if you
1671 need further assistance. You might want to upload to experimental until this
1672 transition is done."""
1673 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1675 self.rejects.append(rejectmsg)
1678 ###########################################################################
1679 # End check_signed_by_key checks
1680 ###########################################################################
1682 def build_summaries(self):
1683 """ Build a summary of changes the upload introduces. """
1685 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1687 short_summary = summary
1689 # This is for direport's benefit...
1690 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1693 summary += "Changes: " + f
1695 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1697 summary += self.announce(short_summary, 0)
1699 return (summary, short_summary)
1701 ###########################################################################
1703 def close_bugs(self, summary, action):
1705 Send mail to close bugs as instructed by the closes field in the changes file.
1706 Also add a line to summary if any work was done.
1708 @type summary: string
1709 @param summary: summary text, as given by L{build_summaries}
1712 @param action: Set to false no real action will be done.
1715 @return: summary. If action was taken, extended by the list of closed bugs.
1719 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1721 bugs = self.pkg.changes["closes"].keys()
1727 summary += "Closing bugs: "
1729 summary += "%s " % (bug)
1732 self.Subst["__BUG_NUMBER__"] = bug
1733 if self.pkg.changes["distribution"].has_key("stable"):
1734 self.Subst["__STABLE_WARNING__"] = """
1735 Note that this package is not part of the released stable Debian
1736 distribution. It may have dependencies on other unreleased software,
1737 or other instabilities. Please take care if you wish to install it.
1738 The update will eventually make its way into the next released Debian
1741 self.Subst["__STABLE_WARNING__"] = ""
1742 mail_message = utils.TemplateSubst(self.Subst, template)
1743 utils.send_mail(mail_message)
1745 # Clear up after ourselves
1746 del self.Subst["__BUG_NUMBER__"]
1747 del self.Subst["__STABLE_WARNING__"]
1749 if action and self.logger:
1750 self.logger.log(["closing bugs"] + bugs)
1756 ###########################################################################
1758 def announce(self, short_summary, action):
1760 Send an announce mail about a new upload.
1762 @type short_summary: string
1763 @param short_summary: Short summary text to include in the mail
1766 @param action: Set to false no real action will be done.
1769 @return: Textstring about action taken.
1774 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1776 # Only do announcements for source uploads with a recent dpkg-dev installed
1777 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1778 self.pkg.changes["architecture"].has_key("source"):
1784 self.Subst["__SHORT_SUMMARY__"] = short_summary
1786 for dist in self.pkg.changes["distribution"].keys():
1787 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1788 if announce_list == "" or lists_done.has_key(announce_list):
1791 lists_done[announce_list] = 1
1792 summary += "Announcing to %s\n" % (announce_list)
1796 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1797 if cnf.get("Dinstall::TrackingServer") and \
1798 self.pkg.changes["architecture"].has_key("source"):
1799 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1800 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1802 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1803 utils.send_mail(mail_message)
1805 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1807 if cnf.FindB("Dinstall::CloseBugs"):
1808 summary = self.close_bugs(summary, action)
1810 del self.Subst["__SHORT_SUMMARY__"]
1814 ###########################################################################
1816 def accept (self, summary, short_summary, session=None):
1820 This moves all files referenced from the .changes into the pool,
1821 sends the accepted mail, announces to lists, closes bugs and
1822 also checks for override disparities. If enabled it will write out
1823 the version history for the BTS Version Tracking and will finally call
1826 @type summary: string
1827 @param summary: Summary text
1829 @type short_summary: string
1830 @param short_summary: Short summary
1834 stats = SummaryStats()
1837 self.logger.log(["installing changes", self.pkg.changes_file])
1841 # Add the .dsc file to the DB first
1842 for newfile, entry in self.pkg.files.items():
1843 if entry["type"] == "dsc":
1844 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1848 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1849 for newfile, entry in self.pkg.files.items():
1850 if entry["type"] == "deb":
1851 poolfiles.append(add_deb_to_db(self, newfile, session))
1853 # If this is a sourceful diff only upload that is moving
1854 # cross-component we need to copy the .orig files into the new
1855 # component too for the same reasons as above.
1856 # XXX: mhy: I think this should be in add_dsc_to_db
1857 if self.pkg.changes["architecture"].has_key("source"):
1858 for orig_file in self.pkg.orig_files.keys():
1859 if not self.pkg.orig_files[orig_file].has_key("id"):
1860 continue # Skip if it's not in the pool
1861 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1862 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1863 continue # Skip if the location didn't change
1866 oldf = get_poolfile_by_id(orig_file_id, session)
1867 old_filename = os.path.join(oldf.location.path, oldf.filename)
1868 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1869 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1871 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1873 # TODO: Care about size/md5sum collisions etc
1874 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1876 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1878 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1879 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1883 # Don't reference the old file from this changes
1885 if p.file_id == oldf.file_id:
1888 poolfiles.append(newf)
1890 # Fix up the DSC references
1893 for df in source.srcfiles:
1894 if df.poolfile.file_id == oldf.file_id:
1895 # Add a new DSC entry and mark the old one for deletion
1896 # Don't do it in the loop so we don't change the thing we're iterating over
1898 newdscf.source_id = source.source_id
1899 newdscf.poolfile_id = newf.file_id
1900 session.add(newdscf)
1910 # Make sure that our source object is up-to-date
1911 session.expire(source)
1913 # Install the files into the pool
1914 for newfile, entry in self.pkg.files.items():
1915 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1916 utils.move(newfile, destination)
1917 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1918 stats.accept_bytes += float(entry["size"])
1920 # Copy the .changes file across for suite which need it.
1922 for suite_name in self.pkg.changes["distribution"].keys():
1923 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1924 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1926 for dest in copy_changes.keys():
1927 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1929 # We're done - commit the database changes
1931 # Our SQL session will automatically start a new transaction after
1934 # Move the .changes into the 'done' directory
1935 utils.move(self.pkg.changes_file,
1936 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1938 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1939 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1942 self.Subst["__SUITE__"] = ""
1943 self.Subst["__SUMMARY__"] = summary
1944 mail_message = utils.TemplateSubst(self.Subst,
1945 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1946 utils.send_mail(mail_message)
1947 self.announce(short_summary, 1)
1949 ## Helper stuff for DebBugs Version Tracking
1950 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1951 if self.pkg.changes["architecture"].has_key("source"):
1952 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1953 version_history = os.fdopen(fd, 'w')
1954 version_history.write(self.pkg.dsc["bts changelog"])
1955 version_history.close()
1956 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1957 self.pkg.changes_file[:-8]+".versions")
1958 os.rename(temp_filename, filename)
1959 os.chmod(filename, 0644)
1961 # Write out the binary -> source mapping.
1962 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1963 debinfo = os.fdopen(fd, 'w')
1964 for name, entry in sorted(self.pkg.files.items()):
1965 if entry["type"] == "deb":
1966 line = " ".join([entry["package"], entry["version"],
1967 entry["architecture"], entry["source package"],
1968 entry["source version"]])
1969 debinfo.write(line+"\n")
1971 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1972 self.pkg.changes_file[:-8]+".debinfo")
1973 os.rename(temp_filename, filename)
1974 os.chmod(filename, 0644)
1978 # Set up our copy queues (e.g. buildd queues)
1979 for suite_name in self.pkg.changes["distribution"].keys():
1980 suite = get_suite(suite_name, session)
1981 for q in suite.copy_queues:
1983 q.add_file_from_pool(f)
1988 stats.accept_count += 1
1990 def check_override(self):
1992 Checks override entries for validity. Mails "Override disparity" warnings,
1993 if that feature is enabled.
1995 Abandons the check if
1996 - override disparity checks are disabled
1997 - mail sending is disabled
2002 # Abandon the check if override disparity checks have been disabled
2003 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2006 summary = self.pkg.check_override()
2011 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2014 self.Subst["__SUMMARY__"] = summary
2015 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2016 utils.send_mail(mail_message)
2017 del self.Subst["__SUMMARY__"]
2019 ###########################################################################
2021 def remove(self, from_dir=None):
2023 Used (for instance) in p-u to remove the package from unchecked
2025 Also removes the package from holding area.
2027 if from_dir is None:
2028 from_dir = self.pkg.directory
2031 for f in self.pkg.files.keys():
2032 os.unlink(os.path.join(from_dir, f))
2033 if os.path.exists(os.path.join(h.holding_dir, f)):
2034 os.unlink(os.path.join(h.holding_dir, f))
2036 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2037 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2038 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2040 ###########################################################################
2042 def move_to_queue (self, queue):
2044 Move files to a destination queue using the permissions in the table
2047 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2048 queue.path, perms=int(queue.change_perms, 8))
2049 for f in self.pkg.files.keys():
2050 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2052 ###########################################################################
2054 def force_reject(self, reject_files):
2056 Forcefully move files from the current directory to the
2057 reject directory. If any file already exists in the reject
2058 directory it will be moved to the morgue to make way for
2061 @type reject_files: dict
2062 @param reject_files: file dictionary
2068 for file_entry in reject_files:
2069 # Skip any files which don't exist or which we don't have permission to copy.
2070 if os.access(file_entry, os.R_OK) == 0:
2073 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2076 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2078 # File exists? Let's find a new name by adding a number
2079 if e.errno == errno.EEXIST:
2081 dest_file = utils.find_next_free(dest_file, 255)
2082 except NoFreeFilenameError:
2083 # Something's either gone badly Pete Tong, or
2084 # someone is trying to exploit us.
2085 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2088 # Make sure we really got it
2090 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2093 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2097 # If we got here, we own the destination file, so we can
2098 # safely overwrite it.
2099 utils.move(file_entry, dest_file, 1, perms=0660)
2102 ###########################################################################
2103 def do_reject (self, manual=0, reject_message="", notes=""):
2105 Reject an upload. If called without a reject message or C{manual} is
2106 true, spawn an editor so the user can write one.
2109 @param manual: manual or automated rejection
2111 @type reject_message: string
2112 @param reject_message: A reject message
2117 # If we weren't given a manual rejection message, spawn an
2118 # editor so the user can add one in...
2119 if manual and not reject_message:
2120 (fd, temp_filename) = utils.temp_filename()
2121 temp_file = os.fdopen(fd, 'w')
2124 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2125 % (note.author, note.version, note.notedate, note.comment))
2127 editor = os.environ.get("EDITOR","vi")
2129 while answer == 'E':
2130 os.system("%s %s" % (editor, temp_filename))
2131 temp_fh = utils.open_file(temp_filename)
2132 reject_message = "".join(temp_fh.readlines())
2134 print "Reject message:"
2135 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2136 prompt = "[R]eject, Edit, Abandon, Quit ?"
2138 while prompt.find(answer) == -1:
2139 answer = utils.our_raw_input(prompt)
2140 m = re_default_answer.search(prompt)
2143 answer = answer[:1].upper()
2144 os.unlink(temp_filename)
2150 print "Rejecting.\n"
2154 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2155 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2157 # Move all the files into the reject directory
2158 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2159 self.force_reject(reject_files)
2161 # If we fail here someone is probably trying to exploit the race
2162 # so let's just raise an exception ...
2163 if os.path.exists(reason_filename):
2164 os.unlink(reason_filename)
2165 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2167 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2171 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2172 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2173 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2174 os.write(reason_fd, reject_message)
2175 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2177 # Build up the rejection email
2178 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2179 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2180 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2181 self.Subst["__REJECT_MESSAGE__"] = ""
2182 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2183 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2184 # Write the rejection email out as the <foo>.reason file
2185 os.write(reason_fd, reject_mail_message)
2187 del self.Subst["__REJECTOR_ADDRESS__"]
2188 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2189 del self.Subst["__CC__"]
2193 # Send the rejection mail
2194 utils.send_mail(reject_mail_message)
2197 self.logger.log(["rejected", self.pkg.changes_file])
2201 ################################################################################
2202 def in_override_p(self, package, component, suite, binary_type, filename, session):
2204 Check if a package already has override entries in the DB
2206 @type package: string
2207 @param package: package name
2209 @type component: string
2210 @param component: database id of the component
2213 @param suite: database id of the suite
2215 @type binary_type: string
2216 @param binary_type: type of the package
2218 @type filename: string
2219 @param filename: filename we check
2221 @return: the database result. But noone cares anyway.
2227 if binary_type == "": # must be source
2230 file_type = binary_type
2232 # Override suite name; used for example with proposed-updates
2233 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2234 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2236 result = get_override(package, suite, component, file_type, session)
2238 # If checking for a source package fall back on the binary override type
2239 if file_type == "dsc" and len(result) < 1:
2240 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2242 # Remember the section and priority so we can check them later if appropriate
2245 self.pkg.files[filename]["override section"] = result.section.section
2246 self.pkg.files[filename]["override priority"] = result.priority.priority
2251 ################################################################################
2252 def get_anyversion(self, sv_list, suite):
2255 @param sv_list: list of (suite, version) tuples to check
2258 @param suite: suite name
2264 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2265 for (s, v) in sv_list:
2266 if s in [ x.lower() for x in anysuite ]:
2267 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2272 ################################################################################
2274 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2277 @param sv_list: list of (suite, version) tuples to check
2279 @type filename: string
2280 @param filename: XXX
2282 @type new_version: string
2283 @param new_version: XXX
2285 Ensure versions are newer than existing packages in target
2286 suites and that cross-suite version checking rules as
2287 set out in the conf file are satisfied.
2292 # Check versions for each target suite
2293 for target_suite in self.pkg.changes["distribution"].keys():
2294 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2295 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2297 # Enforce "must be newer than target suite" even if conffile omits it
2298 if target_suite not in must_be_newer_than:
2299 must_be_newer_than.append(target_suite)
2301 for (suite, existent_version) in sv_list:
2302 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2304 if suite in must_be_newer_than and sourceful and vercmp < 1:
2305 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2307 if suite in must_be_older_than and vercmp > -1:
2310 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2311 # we really use the other suite, ignoring the conflicting one ...
2312 addsuite = self.pkg.changes["distribution-version"][suite]
2314 add_version = self.get_anyversion(sv_list, addsuite)
2315 target_version = self.get_anyversion(sv_list, target_suite)
2318 # not add_version can only happen if we map to a suite
2319 # that doesn't enhance the suite we're propup'ing from.
2320 # so "propup-ver x a b c; map a d" is a problem only if
2321 # d doesn't enhance a.
2323 # i think we could always propagate in this case, rather
2324 # than complaining. either way, this isn't a REJECT issue
2326 # And - we really should complain to the dorks who configured dak
2327 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2328 self.pkg.changes.setdefault("propdistribution", {})
2329 self.pkg.changes["propdistribution"][addsuite] = 1
2331 elif not target_version:
2332 # not targets_version is true when the package is NEW
2333 # we could just stick with the "...old version..." REJECT
2334 # for this, I think.
2335 self.rejects.append("Won't propogate NEW packages.")
2336 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2337 # propogation would be redundant. no need to reject though.
2338 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2340 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2341 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2343 self.warnings.append("Propogating upload to %s" % (addsuite))
2344 self.pkg.changes.setdefault("propdistribution", {})
2345 self.pkg.changes["propdistribution"][addsuite] = 1
2349 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2351 ################################################################################
2352 def check_binary_against_db(self, filename, session):
2353 # Ensure version is sane
2354 q = session.query(BinAssociation)
2355 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2356 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2358 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2359 filename, self.pkg.files[filename]["version"], sourceful=False)
2361 # Check for any existing copies of the file
2362 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2363 q = q.filter_by(version=self.pkg.files[filename]["version"])
2364 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2367 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2369 ################################################################################
2371 def check_source_against_db(self, filename, session):
2372 source = self.pkg.dsc.get("source")
2373 version = self.pkg.dsc.get("version")
2375 # Ensure version is sane
2376 q = session.query(SrcAssociation)
2377 q = q.join(DBSource).filter(DBSource.source==source)
2379 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2380 filename, version, sourceful=True)
2382 ################################################################################
2383 def check_dsc_against_db(self, filename, session):
2386 @warning: NB: this function can remove entries from the 'files' index [if
2387 the orig tarball is a duplicate of the one in the archive]; if
2388 you're iterating over 'files' and call this function as part of
2389 the loop, be sure to add a check to the top of the loop to
2390 ensure you haven't just tried to dereference the deleted entry.
2395 self.pkg.orig_files = {} # XXX: do we need to clear it?
2396 orig_files = self.pkg.orig_files
2398 # Try and find all files mentioned in the .dsc. This has
2399 # to work harder to cope with the multiple possible
2400 # locations of an .orig.tar.gz.
2401 # The ordering on the select is needed to pick the newest orig
2402 # when it exists in multiple places.
2403 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2405 if self.pkg.files.has_key(dsc_name):
2406 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2407 actual_size = int(self.pkg.files[dsc_name]["size"])
2408 found = "%s in incoming" % (dsc_name)
2410 # Check the file does not already exist in the archive
2411 ql = get_poolfile_like_name(dsc_name, session)
2413 # Strip out anything that isn't '%s' or '/%s$'
2415 if not i.filename.endswith(dsc_name):
2418 # "[dak] has not broken them. [dak] has fixed a
2419 # brokenness. Your crappy hack exploited a bug in
2422 # "(Come on! I thought it was always obvious that
2423 # one just doesn't release different files with
2424 # the same name and version.)"
2425 # -- ajk@ on d-devel@l.d.o
2428 # Ignore exact matches for .orig.tar.gz
2430 if re_is_orig_source.match(dsc_name):
2432 if self.pkg.files.has_key(dsc_name) and \
2433 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2434 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2435 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2436 # TODO: Don't delete the entry, just mark it as not needed
2437 # This would fix the stupidity of changing something we often iterate over
2438 # whilst we're doing it
2439 del self.pkg.files[dsc_name]
2440 dsc_entry["files id"] = i.file_id
2441 if not orig_files.has_key(dsc_name):
2442 orig_files[dsc_name] = {}
2443 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2446 # Don't bitch that we couldn't find this file later
2448 self.later_check_files.remove(dsc_name)
2454 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2456 elif re_is_orig_source.match(dsc_name):
2458 ql = get_poolfile_like_name(dsc_name, session)
2460 # Strip out anything that isn't '%s' or '/%s$'
2461 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2463 if not i.filename.endswith(dsc_name):
2467 # Unfortunately, we may get more than one match here if,
2468 # for example, the package was in potato but had an -sa
2469 # upload in woody. So we need to choose the right one.
2471 # default to something sane in case we don't match any or have only one
2476 old_file = os.path.join(i.location.path, i.filename)
2477 old_file_fh = utils.open_file(old_file)
2478 actual_md5 = apt_pkg.md5sum(old_file_fh)
2480 actual_size = os.stat(old_file)[stat.ST_SIZE]
2481 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2484 old_file = os.path.join(i.location.path, i.filename)
2485 old_file_fh = utils.open_file(old_file)
2486 actual_md5 = apt_pkg.md5sum(old_file_fh)
2488 actual_size = os.stat(old_file)[stat.ST_SIZE]
2490 suite_type = x.location.archive_type
2491 # need this for updating dsc_files in install()
2492 dsc_entry["files id"] = x.file_id
2493 # See install() in process-accepted...
2494 if not orig_files.has_key(dsc_name):
2495 orig_files[dsc_name] = {}
2496 orig_files[dsc_name]["id"] = x.file_id
2497 orig_files[dsc_name]["path"] = old_file
2498 orig_files[dsc_name]["location"] = x.location.location_id
2500 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2501 # Not there? Check the queue directories...
2502 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2503 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2505 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2506 if os.path.exists(in_otherdir):
2507 in_otherdir_fh = utils.open_file(in_otherdir)
2508 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2509 in_otherdir_fh.close()
2510 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2512 if not orig_files.has_key(dsc_name):
2513 orig_files[dsc_name] = {}
2514 orig_files[dsc_name]["path"] = in_otherdir
2517 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2520 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2522 if actual_md5 != dsc_entry["md5sum"]:
2523 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2524 if actual_size != int(dsc_entry["size"]):
2525 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2527 ################################################################################
2528 # This is used by process-new and process-holding to recheck a changes file
2529 # at the time we're running. It mainly wraps various other internal functions
2530 # and is similar to accepted_checks - these should probably be tidied up
2532 def recheck(self, session):
2534 for f in self.pkg.files.keys():
2535 # The .orig.tar.gz can disappear out from under us is it's a
2536 # duplicate of one in the archive.
2537 if not self.pkg.files.has_key(f):
2540 entry = self.pkg.files[f]
2542 # Check that the source still exists
2543 if entry["type"] == "deb":
2544 source_version = entry["source version"]
2545 source_package = entry["source package"]
2546 if not self.pkg.changes["architecture"].has_key("source") \
2547 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2548 source_epochless_version = re_no_epoch.sub('', source_version)
2549 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2551 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2552 if cnf.has_key("Dir::Queue::%s" % (q)):
2553 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2556 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2558 # Version and file overwrite checks
2559 if entry["type"] == "deb":
2560 self.check_binary_against_db(f, session)
2561 elif entry["type"] == "dsc":
2562 self.check_source_against_db(f, session)
2563 self.check_dsc_against_db(f, session)
2565 ################################################################################
2566 def accepted_checks(self, overwrite_checks, session):
2567 # Recheck anything that relies on the database; since that's not
2568 # frozen between accept and our run time when called from p-a.
2570 # overwrite_checks is set to False when installing to stable/oldstable
2575 # Find the .dsc (again)
2577 for f in self.pkg.files.keys():
2578 if self.pkg.files[f]["type"] == "dsc":
2581 for checkfile in self.pkg.files.keys():
2582 # The .orig.tar.gz can disappear out from under us is it's a
2583 # duplicate of one in the archive.
2584 if not self.pkg.files.has_key(checkfile):
2587 entry = self.pkg.files[checkfile]
2589 # Check that the source still exists
2590 if entry["type"] == "deb":
2591 source_version = entry["source version"]
2592 source_package = entry["source package"]
2593 if not self.pkg.changes["architecture"].has_key("source") \
2594 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2595 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2597 # Version and file overwrite checks
2598 if overwrite_checks:
2599 if entry["type"] == "deb":
2600 self.check_binary_against_db(checkfile, session)
2601 elif entry["type"] == "dsc":
2602 self.check_source_against_db(checkfile, session)
2603 self.check_dsc_against_db(dsc_filename, session)
2605 # propogate in the case it is in the override tables:
2606 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2607 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2608 propogate[suite] = 1
2610 nopropogate[suite] = 1
2612 for suite in propogate.keys():
2613 if suite in nopropogate:
2615 self.pkg.changes["distribution"][suite] = 1
2617 for checkfile in self.pkg.files.keys():
2618 # Check the package is still in the override tables
2619 for suite in self.pkg.changes["distribution"].keys():
2620 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2621 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2623 ################################################################################
2624 # This is not really a reject, but an unaccept, but since a) the code for
2625 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2626 # extremely rare, for now we'll go with whining at our admin folks...
2628 def do_unaccept(self):
2632 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2633 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2634 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2635 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2636 if cnf.has_key("Dinstall::Bcc"):
2637 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2639 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2641 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2643 # Write the rejection email out as the <foo>.reason file
2644 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2645 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2647 # If we fail here someone is probably trying to exploit the race
2648 # so let's just raise an exception ...
2649 if os.path.exists(reject_filename):
2650 os.unlink(reject_filename)
2652 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2653 os.write(fd, reject_mail_message)
2656 utils.send_mail(reject_mail_message)
2658 del self.Subst["__REJECTOR_ADDRESS__"]
2659 del self.Subst["__REJECT_MESSAGE__"]
2660 del self.Subst["__CC__"]
2662 ################################################################################
2663 # If any file of an upload has a recent mtime then chances are good
2664 # the file is still being uploaded.
2666 def upload_too_new(self):
2669 # Move back to the original directory to get accurate time stamps
2671 os.chdir(self.pkg.directory)
2672 file_list = self.pkg.files.keys()
2673 file_list.extend(self.pkg.dsc_files.keys())
2674 file_list.append(self.pkg.changes_file)
2677 last_modified = time.time()-os.path.getmtime(f)
2678 if last_modified < int(cnf["Dinstall::SkipTime"]):