5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
81 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
83 # Validate the override type
84 type_id = get_override_type(file_type, session)
86 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
90 ################################################################################
92 # Determine what parts in a .changes are NEW
94 def determine_new(changes, files, warn=1):
96 Determine what parts in a C{changes} file are NEW.
98 @type changes: Upload.Pkg.changes dict
99 @param changes: Changes dictionary
101 @type files: Upload.Pkg.files dict
102 @param files: Files dictionary
105 @param warn: Warn if overrides are added for (old)stable
108 @return: dictionary of NEW components.
113 session = DBConn().session()
115 # Build up a list of potentially new things
116 for name, f in files.items():
117 # Skip byhand elements
118 # if f["type"] == "byhand":
121 priority = f["priority"]
122 section = f["section"]
123 file_type = get_type(f, session)
124 component = f["component"]
126 if file_type == "dsc":
129 if not new.has_key(pkg):
131 new[pkg]["priority"] = priority
132 new[pkg]["section"] = section
133 new[pkg]["type"] = file_type
134 new[pkg]["component"] = component
135 new[pkg]["files"] = []
137 old_type = new[pkg]["type"]
138 if old_type != file_type:
139 # source gets trumped by deb or udeb
140 if old_type == "dsc":
141 new[pkg]["priority"] = priority
142 new[pkg]["section"] = section
143 new[pkg]["type"] = file_type
144 new[pkg]["component"] = component
146 new[pkg]["files"].append(name)
148 if f.has_key("othercomponents"):
149 new[pkg]["othercomponents"] = f["othercomponents"]
151 # Fix up the list of target suites
153 for suite in changes["suite"].keys():
154 override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
156 (olderr, newerr) = (get_suite(suite, session) == None,
157 get_suite(override, session) == None)
159 (oinv, newinv) = ("", "")
160 if olderr: oinv = "invalid "
161 if newerr: ninv = "invalid "
162 print "warning: overriding %ssuite %s to %ssuite %s" % (
163 oinv, suite, ninv, override)
164 del changes["suite"][suite]
165 changes["suite"][override] = 1
167 for suite in changes["suite"].keys():
168 for pkg in new.keys():
169 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
171 for file_entry in new[pkg]["files"]:
172 if files[file_entry].has_key("new"):
173 del files[file_entry]["new"]
177 for s in ['stable', 'oldstable']:
178 if changes["suite"].has_key(s):
179 print "WARNING: overrides will be added for %s!" % s
180 for pkg in new.keys():
181 if new[pkg].has_key("othercomponents"):
182 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
188 ################################################################################
190 def check_valid(new):
192 Check if section and priority for NEW packages exist in database.
193 Additionally does sanity checks:
194 - debian-installer packages have to be udeb (or source)
195 - non debian-installer packages can not be udeb
196 - source priority can only be assigned to dsc file types
199 @param new: Dict of new packages with their section, priority and type.
202 for pkg in new.keys():
203 section_name = new[pkg]["section"]
204 priority_name = new[pkg]["priority"]
205 file_type = new[pkg]["type"]
207 section = get_section(section_name)
209 new[pkg]["section id"] = -1
211 new[pkg]["section id"] = section.section_id
213 priority = get_priority(priority_name)
215 new[pkg]["priority id"] = -1
217 new[pkg]["priority id"] = priority.priority_id
220 di = section_name.find("debian-installer") != -1
222 # If d-i, we must be udeb and vice-versa
223 if (di and file_type not in ("udeb", "dsc")) or \
224 (not di and file_type == "udeb"):
225 new[pkg]["section id"] = -1
227 # If dsc we need to be source and vice-versa
228 if (priority == "source" and file_type != "dsc") or \
229 (priority != "source" and file_type == "dsc"):
230 new[pkg]["priority id"] = -1
232 ###############################################################################
234 def check_status(files):
236 for f in files.keys():
237 if files[f].has_key("byhand"):
239 elif files[f].has_key("new"):
243 ###############################################################################
245 # Used by Upload.check_timestamps
246 class TarTime(object):
247 def __init__(self, future_cutoff, past_cutoff):
249 self.future_cutoff = future_cutoff
250 self.past_cutoff = past_cutoff
253 self.future_files = {}
254 self.ancient_files = {}
256 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
257 if MTime > self.future_cutoff:
258 self.future_files[Name] = MTime
259 if MTime < self.past_cutoff:
260 self.ancient_files[Name] = MTime
262 ###############################################################################
264 class Upload(object):
266 Everything that has to do with an upload processed.
274 ###########################################################################
277 """ Reset a number of internal variables."""
279 # Initialize the substitution template map
282 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
283 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
284 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
285 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
291 self.later_check_files = []
295 def package_info(self):
297 Format various messages from this Upload to send to the maintainer.
301 ('Reject Reasons', self.rejects),
302 ('Warnings', self.warnings),
303 ('Notes', self.notes),
307 for title, messages in msgs:
309 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
314 ###########################################################################
315 def update_subst(self):
316 """ Set up the per-package template substitution mappings """
320 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
321 if not self.pkg.changes.has_key("architecture") or not \
322 isinstance(self.pkg.changes["architecture"], dict):
323 self.pkg.changes["architecture"] = { "Unknown" : "" }
325 # and maintainer2047 may not exist.
326 if not self.pkg.changes.has_key("maintainer2047"):
327 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
329 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
330 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
331 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
333 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
334 if self.pkg.changes["architecture"].has_key("source") and \
335 self.pkg.changes["changedby822"] != "" and \
336 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
338 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
339 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
340 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
342 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
343 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
344 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
346 if "sponsoremail" in self.pkg.changes:
347 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
349 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
350 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
352 # Apply any global override of the Maintainer field
353 if cnf.get("Dinstall::OverrideMaintainer"):
354 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
355 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
357 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
358 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
359 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
361 ###########################################################################
362 def load_changes(self, filename):
365 @rvalue: whether the changes file was valid or not. We may want to
366 reject even if this is True (see what gets put in self.rejects).
367 This is simply to prevent us even trying things later which will
368 fail because we couldn't properly parse the file.
371 self.pkg.changes_file = filename
373 # Parse the .changes field into a dictionary
375 self.pkg.changes.update(parse_changes(filename))
376 except CantOpenError:
377 self.rejects.append("%s: can't read file." % (filename))
379 except ParseChangesError, line:
380 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
382 except ChangesUnicodeError:
383 self.rejects.append("%s: changes file not proper utf-8" % (filename))
386 # Parse the Files field from the .changes into another dictionary
388 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
389 except ParseChangesError, line:
390 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
392 except UnknownFormatError, format:
393 self.rejects.append("%s: unknown format '%s'." % (filename, format))
396 # Check for mandatory fields
397 for i in ("distribution", "source", "binary", "architecture",
398 "version", "maintainer", "files", "changes", "description"):
399 if not self.pkg.changes.has_key(i):
400 # Avoid undefined errors later
401 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
404 # Strip a source version in brackets from the source field
405 if re_strip_srcver.search(self.pkg.changes["source"]):
406 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
408 # Ensure the source field is a valid package name.
409 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
410 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
412 # Split multi-value fields into a lower-level dictionary
413 for i in ("architecture", "distribution", "binary", "closes"):
414 o = self.pkg.changes.get(i, "")
416 del self.pkg.changes[i]
418 self.pkg.changes[i] = {}
421 self.pkg.changes[i][j] = 1
423 # Fix the Maintainer: field to be RFC822/2047 compatible
425 (self.pkg.changes["maintainer822"],
426 self.pkg.changes["maintainer2047"],
427 self.pkg.changes["maintainername"],
428 self.pkg.changes["maintaineremail"]) = \
429 fix_maintainer (self.pkg.changes["maintainer"])
430 except ParseMaintError, msg:
431 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
432 % (filename, self.pkg.changes["maintainer"], msg))
434 # ...likewise for the Changed-By: field if it exists.
436 (self.pkg.changes["changedby822"],
437 self.pkg.changes["changedby2047"],
438 self.pkg.changes["changedbyname"],
439 self.pkg.changes["changedbyemail"]) = \
440 fix_maintainer (self.pkg.changes.get("changed-by", ""))
441 except ParseMaintError, msg:
442 self.pkg.changes["changedby822"] = ""
443 self.pkg.changes["changedby2047"] = ""
444 self.pkg.changes["changedbyname"] = ""
445 self.pkg.changes["changedbyemail"] = ""
447 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
448 % (filename, self.pkg.changes["changed-by"], msg))
450 # Ensure all the values in Closes: are numbers
451 if self.pkg.changes.has_key("closes"):
452 for i in self.pkg.changes["closes"].keys():
453 if re_isanum.match (i) == None:
454 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
456 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
457 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
458 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
460 # Check the .changes is non-empty
461 if not self.pkg.files:
462 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
465 # Changes was syntactically valid even if we'll reject
468 ###########################################################################
470 def check_distributions(self):
471 "Check and map the Distribution field"
475 # Handle suite mappings
476 for m in Cnf.ValueList("SuiteMappings"):
479 if mtype == "map" or mtype == "silent-map":
480 (source, dest) = args[1:3]
481 if self.pkg.changes["distribution"].has_key(source):
482 del self.pkg.changes["distribution"][source]
483 self.pkg.changes["distribution"][dest] = 1
484 if mtype != "silent-map":
485 self.notes.append("Mapping %s to %s." % (source, dest))
486 if self.pkg.changes.has_key("distribution-version"):
487 if self.pkg.changes["distribution-version"].has_key(source):
488 self.pkg.changes["distribution-version"][source]=dest
489 elif mtype == "map-unreleased":
490 (source, dest) = args[1:3]
491 if self.pkg.changes["distribution"].has_key(source):
492 for arch in self.pkg.changes["architecture"].keys():
493 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
494 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
495 del self.pkg.changes["distribution"][source]
496 self.pkg.changes["distribution"][dest] = 1
498 elif mtype == "ignore":
500 if self.pkg.changes["distribution"].has_key(suite):
501 del self.pkg.changes["distribution"][suite]
502 self.warnings.append("Ignoring %s as a target suite." % (suite))
503 elif mtype == "reject":
505 if self.pkg.changes["distribution"].has_key(suite):
506 self.rejects.append("Uploads to %s are not accepted." % (suite))
507 elif mtype == "propup-version":
508 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
510 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
511 if self.pkg.changes["distribution"].has_key(args[1]):
512 self.pkg.changes.setdefault("distribution-version", {})
513 for suite in args[2:]:
514 self.pkg.changes["distribution-version"][suite] = suite
516 # Ensure there is (still) a target distribution
517 if len(self.pkg.changes["distribution"].keys()) < 1:
518 self.rejects.append("No valid distribution remaining.")
520 # Ensure target distributions exist
521 for suite in self.pkg.changes["distribution"].keys():
522 if not Cnf.has_key("Suite::%s" % (suite)):
523 self.rejects.append("Unknown distribution `%s'." % (suite))
525 ###########################################################################
527 def binary_file_checks(self, f, session):
529 entry = self.pkg.files[f]
531 # Extract package control information
532 deb_file = utils.open_file(f)
534 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
536 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
538 # Can't continue, none of the checks on control would work.
541 # Check for mandantory "Description:"
544 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
546 self.rejects.append("%s: Missing Description in binary package" % (f))
551 # Check for mandatory fields
552 for field in [ "Package", "Architecture", "Version" ]:
553 if control.Find(field) == None:
555 self.rejects.append("%s: No %s field in control." % (f, field))
558 # Ensure the package name matches the one give in the .changes
559 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
560 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
562 # Validate the package field
563 package = control.Find("Package")
564 if not re_valid_pkg_name.match(package):
565 self.rejects.append("%s: invalid package name '%s'." % (f, package))
567 # Validate the version field
568 version = control.Find("Version")
569 if not re_valid_version.match(version):
570 self.rejects.append("%s: invalid version number '%s'." % (f, version))
572 # Ensure the architecture of the .deb is one we know about.
573 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
574 architecture = control.Find("Architecture")
575 upload_suite = self.pkg.changes["distribution"].keys()[0]
577 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
578 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
579 self.rejects.append("Unknown architecture '%s'." % (architecture))
581 # Ensure the architecture of the .deb is one of the ones
582 # listed in the .changes.
583 if not self.pkg.changes["architecture"].has_key(architecture):
584 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
586 # Sanity-check the Depends field
587 depends = control.Find("Depends")
589 self.rejects.append("%s: Depends field is empty." % (f))
591 # Sanity-check the Provides field
592 provides = control.Find("Provides")
594 provide = re_spacestrip.sub('', provides)
596 self.rejects.append("%s: Provides field is empty." % (f))
597 prov_list = provide.split(",")
598 for prov in prov_list:
599 if not re_valid_pkg_name.match(prov):
600 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
602 # Check the section & priority match those given in the .changes (non-fatal)
603 if control.Find("Section") and entry["section"] != "" \
604 and entry["section"] != control.Find("Section"):
605 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
606 (f, control.Find("Section", ""), entry["section"]))
607 if control.Find("Priority") and entry["priority"] != "" \
608 and entry["priority"] != control.Find("Priority"):
609 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
610 (f, control.Find("Priority", ""), entry["priority"]))
612 entry["package"] = package
613 entry["architecture"] = architecture
614 entry["version"] = version
615 entry["maintainer"] = control.Find("Maintainer", "")
617 if f.endswith(".udeb"):
618 self.pkg.files[f]["dbtype"] = "udeb"
619 elif f.endswith(".deb"):
620 self.pkg.files[f]["dbtype"] = "deb"
622 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
624 entry["source"] = control.Find("Source", entry["package"])
626 # Get the source version
627 source = entry["source"]
630 if source.find("(") != -1:
631 m = re_extract_src_version.match(source)
633 source_version = m.group(2)
635 if not source_version:
636 source_version = self.pkg.files[f]["version"]
638 entry["source package"] = source
639 entry["source version"] = source_version
641 # Ensure the filename matches the contents of the .deb
642 m = re_isadeb.match(f)
645 file_package = m.group(1)
646 if entry["package"] != file_package:
647 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
648 (f, file_package, entry["dbtype"], entry["package"]))
649 epochless_version = re_no_epoch.sub('', control.Find("Version"))
652 file_version = m.group(2)
653 if epochless_version != file_version:
654 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
655 (f, file_version, entry["dbtype"], epochless_version))
658 file_architecture = m.group(3)
659 if entry["architecture"] != file_architecture:
660 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
661 (f, file_architecture, entry["dbtype"], entry["architecture"]))
663 # Check for existent source
664 source_version = entry["source version"]
665 source_package = entry["source package"]
666 if self.pkg.changes["architecture"].has_key("source"):
667 if source_version != self.pkg.changes["version"]:
668 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
669 (source_version, f, self.pkg.changes["version"]))
671 # Check in the SQL database
672 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
673 # Check in one of the other directories
674 source_epochless_version = re_no_epoch.sub('', source_version)
675 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
676 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
678 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
681 dsc_file_exists = False
682 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
683 if cnf.has_key("Dir::Queue::%s" % (myq)):
684 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
685 dsc_file_exists = True
688 if not dsc_file_exists:
689 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
691 # Check the version and for file overwrites
692 self.check_binary_against_db(f, session)
694 # Temporarily disable contents generation until we change the table storage layout
697 #if len(b.rejects) > 0:
698 # for j in b.rejects:
699 # self.rejects.append(j)
701 def source_file_checks(self, f, session):
702 entry = self.pkg.files[f]
704 m = re_issource.match(f)
708 entry["package"] = m.group(1)
709 entry["version"] = m.group(2)
710 entry["type"] = m.group(3)
712 # Ensure the source package name matches the Source filed in the .changes
713 if self.pkg.changes["source"] != entry["package"]:
714 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
716 # Ensure the source version matches the version in the .changes file
717 if re_is_orig_source.match(f):
718 changes_version = self.pkg.changes["chopversion2"]
720 changes_version = self.pkg.changes["chopversion"]
722 if changes_version != entry["version"]:
723 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
725 # Ensure the .changes lists source in the Architecture field
726 if not self.pkg.changes["architecture"].has_key("source"):
727 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
729 # Check the signature of a .dsc file
730 if entry["type"] == "dsc":
731 # check_signature returns either:
732 # (None, [list, of, rejects]) or (signature, [])
733 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
735 self.rejects.append(j)
737 entry["architecture"] = "source"
739 def per_suite_file_checks(self, f, suite, session):
741 entry = self.pkg.files[f]
744 if entry.has_key("byhand"):
747 # Check we have fields we need to do these checks
749 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
750 if not entry.has_key(m):
751 self.rejects.append("file '%s' does not have field %s set" % (f, m))
757 # Handle component mappings
758 for m in cnf.ValueList("ComponentMappings"):
759 (source, dest) = m.split()
760 if entry["component"] == source:
761 entry["original component"] = source
762 entry["component"] = dest
764 # Ensure the component is valid for the target suite
765 if cnf.has_key("Suite:%s::Components" % (suite)) and \
766 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
767 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
770 # Validate the component
771 if not get_component(entry["component"], session):
772 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
775 # See if the package is NEW
776 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
779 # Validate the priority
780 if entry["priority"].find('/') != -1:
781 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
783 # Determine the location
784 location = cnf["Dir::Pool"]
785 l = get_location(location, entry["component"], session=session)
787 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
788 entry["location id"] = -1
790 entry["location id"] = l.location_id
792 # Check the md5sum & size against existing files (if any)
793 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
795 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
796 entry["size"], entry["md5sum"], entry["location id"])
799 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
800 elif found is False and poolfile is not None:
801 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
804 entry["files id"] = None
806 entry["files id"] = poolfile.file_id
808 # Check for packages that have moved from one component to another
809 entry['suite'] = suite
810 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
812 entry["othercomponents"] = res.fetchone()[0]
814 def check_files(self, action=True):
815 file_keys = self.pkg.files.keys()
821 os.chdir(self.pkg.directory)
823 ret = holding.copy_to_holding(f)
825 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
829 # check we already know the changes file
830 # [NB: this check must be done post-suite mapping]
831 base_filename = os.path.basename(self.pkg.changes_file)
833 session = DBConn().session()
836 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
837 # if in the pool or in a queue other than unchecked, reject
838 if (dbc.in_queue is None) \
839 or (dbc.in_queue is not None
840 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
841 self.rejects.append("%s file already known to dak" % base_filename)
842 except NoResultFound, e:
849 for f, entry in self.pkg.files.items():
850 # Ensure the file does not already exist in one of the accepted directories
851 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
852 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
853 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
854 self.rejects.append("%s file already exists in the %s directory." % (f, d))
856 if not re_taint_free.match(f):
857 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
859 # Check the file is readable
860 if os.access(f, os.R_OK) == 0:
861 # When running in -n, copy_to_holding() won't have
862 # generated the reject_message, so we need to.
864 if os.path.exists(f):
865 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
867 # Don't directly reject, mark to check later to deal with orig's
868 # we can find in the pool
869 self.later_check_files.append(f)
870 entry["type"] = "unreadable"
873 # If it's byhand skip remaining checks
874 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
876 entry["type"] = "byhand"
878 # Checks for a binary package...
879 elif re_isadeb.match(f):
881 entry["type"] = "deb"
883 # This routine appends to self.rejects/warnings as appropriate
884 self.binary_file_checks(f, session)
886 # Checks for a source package...
887 elif re_issource.match(f):
890 # This routine appends to self.rejects/warnings as appropriate
891 self.source_file_checks(f, session)
893 # Not a binary or source package? Assume byhand...
896 entry["type"] = "byhand"
898 # Per-suite file checks
899 entry["oldfiles"] = {}
900 for suite in self.pkg.changes["distribution"].keys():
901 self.per_suite_file_checks(f, suite, session)
905 # If the .changes file says it has source, it must have source.
906 if self.pkg.changes["architecture"].has_key("source"):
908 self.rejects.append("no source found and Architecture line in changes mention source.")
910 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
911 self.rejects.append("source only uploads are not supported.")
913 ###########################################################################
914 def check_dsc(self, action=True, session=None):
915 """Returns bool indicating whether or not the source changes are valid"""
916 # Ensure there is source to check
917 if not self.pkg.changes["architecture"].has_key("source"):
922 for f, entry in self.pkg.files.items():
923 if entry["type"] == "dsc":
925 self.rejects.append("can not process a .changes file with multiple .dsc's.")
930 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
932 self.rejects.append("source uploads must contain a dsc file")
935 # Parse the .dsc file
937 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
938 except CantOpenError:
939 # if not -n copy_to_holding() will have done this for us...
941 self.rejects.append("%s: can't read file." % (dsc_filename))
942 except ParseChangesError, line:
943 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
944 except InvalidDscError, line:
945 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
946 except ChangesUnicodeError:
947 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
949 # Build up the file list of files mentioned by the .dsc
951 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
952 except NoFilesFieldError:
953 self.rejects.append("%s: no Files: field." % (dsc_filename))
955 except UnknownFormatError, format:
956 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
958 except ParseChangesError, line:
959 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
962 # Enforce mandatory fields
963 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
964 if not self.pkg.dsc.has_key(i):
965 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
968 # Validate the source and version fields
969 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
970 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
971 if not re_valid_version.match(self.pkg.dsc["version"]):
972 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
974 # Only a limited list of source formats are allowed in each suite
975 for dist in self.pkg.changes["distribution"].keys():
976 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
977 if self.pkg.dsc["format"] not in allowed:
978 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
980 # Validate the Maintainer field
982 # We ignore the return value
983 fix_maintainer(self.pkg.dsc["maintainer"])
984 except ParseMaintError, msg:
985 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
986 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
988 # Validate the build-depends field(s)
989 for field_name in [ "build-depends", "build-depends-indep" ]:
990 field = self.pkg.dsc.get(field_name)
992 # Have apt try to parse them...
994 apt_pkg.ParseSrcDepends(field)
996 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
998 # Ensure the version number in the .dsc matches the version number in the .changes
999 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1000 changes_version = self.pkg.files[dsc_filename]["version"]
1002 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1003 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1005 # Ensure the Files field contain only what's expected
1006 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1008 # Ensure source is newer than existing source in target suites
1009 session = DBConn().session()
1010 self.check_source_against_db(dsc_filename, session)
1011 self.check_dsc_against_db(dsc_filename, session)
1014 # Finally, check if we're missing any files
1015 for f in self.later_check_files:
1016 self.rejects.append("Could not find file %s references in changes" % f)
1020 ###########################################################################
1022 def get_changelog_versions(self, source_dir):
1023 """Extracts a the source package and (optionally) grabs the
1024 version history out of debian/changelog for the BTS."""
1028 # Find the .dsc (again)
1030 for f in self.pkg.files.keys():
1031 if self.pkg.files[f]["type"] == "dsc":
1034 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1035 if not dsc_filename:
1038 # Create a symlink mirror of the source files in our temporary directory
1039 for f in self.pkg.files.keys():
1040 m = re_issource.match(f)
1042 src = os.path.join(source_dir, f)
1043 # If a file is missing for whatever reason, give up.
1044 if not os.path.exists(src):
1047 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1048 self.pkg.orig_files[f].has_key("path"):
1050 dest = os.path.join(os.getcwd(), f)
1051 os.symlink(src, dest)
1053 # If the orig files are not a part of the upload, create symlinks to the
1055 for orig_file in self.pkg.orig_files.keys():
1056 if not self.pkg.orig_files[orig_file].has_key("path"):
1058 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1059 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1061 # Extract the source
1062 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1063 (result, output) = commands.getstatusoutput(cmd)
1065 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1066 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1069 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1072 # Get the upstream version
1073 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1074 if re_strip_revision.search(upstr_version):
1075 upstr_version = re_strip_revision.sub('', upstr_version)
1077 # Ensure the changelog file exists
1078 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1079 if not os.path.exists(changelog_filename):
1080 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1083 # Parse the changelog
1084 self.pkg.dsc["bts changelog"] = ""
1085 changelog_file = utils.open_file(changelog_filename)
1086 for line in changelog_file.readlines():
1087 m = re_changelog_versions.match(line)
1089 self.pkg.dsc["bts changelog"] += line
1090 changelog_file.close()
1092 # Check we found at least one revision in the changelog
1093 if not self.pkg.dsc["bts changelog"]:
1094 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1096 def check_source(self):
1098 # a) there's no source
1099 if not self.pkg.changes["architecture"].has_key("source"):
1102 tmpdir = utils.temp_dirname()
1104 # Move into the temporary directory
1108 # Get the changelog version history
1109 self.get_changelog_versions(cwd)
1111 # Move back and cleanup the temporary tree
1115 shutil.rmtree(tmpdir)
1117 if e.errno != errno.EACCES:
1119 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1121 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1122 # We probably have u-r or u-w directories so chmod everything
1124 cmd = "chmod -R u+rwx %s" % (tmpdir)
1125 result = os.system(cmd)
1127 utils.fubar("'%s' failed with result %s." % (cmd, result))
1128 shutil.rmtree(tmpdir)
1129 except Exception, e:
1130 print "foobar2 (%s)" % e
1131 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1133 ###########################################################################
1134 def ensure_hashes(self):
1135 # Make sure we recognise the format of the Files: field in the .changes
1136 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1137 if len(format) == 2:
1138 format = int(format[0]), int(format[1])
1140 format = int(float(format[0])), 0
1142 # We need to deal with the original changes blob, as the fields we need
1143 # might not be in the changes dict serialised into the .dak anymore.
1144 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1146 # Copy the checksums over to the current changes dict. This will keep
1147 # the existing modifications to it intact.
1148 for field in orig_changes:
1149 if field.startswith('checksums-'):
1150 self.pkg.changes[field] = orig_changes[field]
1152 # Check for unsupported hashes
1153 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1154 self.rejects.append(j)
1156 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1157 self.rejects.append(j)
1159 # We have to calculate the hash if we have an earlier changes version than
1160 # the hash appears in rather than require it exist in the changes file
1161 for hashname, hashfunc, version in utils.known_hashes:
1162 # TODO: Move _ensure_changes_hash into this class
1163 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1164 self.rejects.append(j)
1165 if "source" in self.pkg.changes["architecture"]:
1166 # TODO: Move _ensure_dsc_hash into this class
1167 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1168 self.rejects.append(j)
1170 def check_hashes(self):
1171 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1172 self.rejects.append(m)
1174 for m in utils.check_size(".changes", self.pkg.files):
1175 self.rejects.append(m)
1177 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1178 self.rejects.append(m)
1180 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1181 self.rejects.append(m)
1183 self.ensure_hashes()
1185 ###########################################################################
1187 def ensure_orig(self, target_dir='.', session=None):
1189 Ensures that all orig files mentioned in the changes file are present
1190 in target_dir. If they do not exist, they are symlinked into place.
1192 An list containing the symlinks that were created are returned (so they
1199 for filename, entry in self.pkg.dsc_files.iteritems():
1200 if not re_is_orig_source.match(filename):
1201 # File is not an orig; ignore
1204 if os.path.exists(filename):
1205 # File exists, no need to continue
1208 def symlink_if_valid(path):
1209 f = utils.open_file(path)
1210 md5sum = apt_pkg.md5sum(f)
1213 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1214 expected = (int(entry['size']), entry['md5sum'])
1216 if fingerprint != expected:
1219 dest = os.path.join(target_dir, filename)
1221 os.symlink(path, dest)
1222 symlinked.append(dest)
1228 session_ = DBConn().session()
1233 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1234 poolfile_path = os.path.join(
1235 poolfile.location.path, poolfile.filename
1238 if symlink_if_valid(poolfile_path):
1248 # Look in some other queues for the file
1249 queues = ('New', 'Byhand', 'ProposedUpdates',
1250 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1252 for queue in queues:
1253 if not cnf.get('Dir::Queue::%s' % queue):
1256 queuefile_path = os.path.join(
1257 cnf['Dir::Queue::%s' % queue], filename
1260 if not os.path.exists(queuefile_path):
1261 # Does not exist in this queue
1264 if symlink_if_valid(queuefile_path):
1269 ###########################################################################
1271 def check_lintian(self):
1273 Extends self.rejects by checking the output of lintian against tags
1274 specified in Dinstall::LintianTags.
1279 # Don't reject binary uploads
1280 if not self.pkg.changes['architecture'].has_key('source'):
1283 # Only check some distributions
1284 for dist in ('unstable', 'experimental'):
1285 if dist in self.pkg.changes['distribution']:
1290 # If we do not have a tagfile, don't do anything
1291 tagfile = cnf.get("Dinstall::LintianTags")
1295 # Parse the yaml file
1296 sourcefile = file(tagfile, 'r')
1297 sourcecontent = sourcefile.read()
1301 lintiantags = yaml.load(sourcecontent)['lintian']
1302 except yaml.YAMLError, msg:
1303 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1306 # Try and find all orig mentioned in the .dsc
1307 symlinked = self.ensure_orig()
1309 # Setup the input file for lintian
1310 fd, temp_filename = utils.temp_filename()
1311 temptagfile = os.fdopen(fd, 'w')
1312 for tags in lintiantags.values():
1313 temptagfile.writelines(['%s\n' % x for x in tags])
1317 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1318 (temp_filename, self.pkg.changes_file)
1320 result, output = commands.getstatusoutput(cmd)
1322 # Remove our tempfile and any symlinks we created
1323 os.unlink(temp_filename)
1325 for symlink in symlinked:
1329 utils.warn("lintian failed for %s [return code: %s]." % \
1330 (self.pkg.changes_file, result))
1331 utils.warn(utils.prefix_multi_line_string(output, \
1332 " [possible output:] "))
1337 [self.pkg.changes_file, "check_lintian"] + list(txt)
1341 parsed_tags = parse_lintian_output(output)
1342 self.rejects.extend(
1343 generate_reject_messages(parsed_tags, lintiantags, log=log)
1346 ###########################################################################
1347 def check_urgency(self):
1349 if self.pkg.changes["architecture"].has_key("source"):
1350 if not self.pkg.changes.has_key("urgency"):
1351 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1352 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1353 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1354 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1355 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1356 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1358 ###########################################################################
1360 # Sanity check the time stamps of files inside debs.
1361 # [Files in the near future cause ugly warnings and extreme time
1362 # travel can cause errors on extraction]
1364 def check_timestamps(self):
1367 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1368 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1369 tar = TarTime(future_cutoff, past_cutoff)
1371 for filename, entry in self.pkg.files.items():
1372 if entry["type"] == "deb":
1375 deb_file = utils.open_file(filename)
1376 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1379 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1380 except SystemError, e:
1381 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1382 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1385 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1389 future_files = tar.future_files.keys()
1391 num_future_files = len(future_files)
1392 future_file = future_files[0]
1393 future_date = tar.future_files[future_file]
1394 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1395 % (filename, num_future_files, future_file, time.ctime(future_date)))
1397 ancient_files = tar.ancient_files.keys()
1399 num_ancient_files = len(ancient_files)
1400 ancient_file = ancient_files[0]
1401 ancient_date = tar.ancient_files[ancient_file]
1402 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1403 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1405 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1407 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1408 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1410 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1416 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1417 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1418 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1419 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1420 self.pkg.changes["sponsoremail"] = uid_email
1425 ###########################################################################
1426 # check_signed_by_key checks
1427 ###########################################################################
1429 def check_signed_by_key(self):
1430 """Ensure the .changes is signed by an authorized uploader."""
1431 session = DBConn().session()
1433 # First of all we check that the person has proper upload permissions
1434 # and that this upload isn't blocked
1435 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1438 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1441 # TODO: Check that import-keyring adds UIDs properly
1443 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1446 # Check that the fingerprint which uploaded has permission to do so
1447 self.check_upload_permissions(fpr, session)
1449 # Check that this package is not in a transition
1450 self.check_transition(session)
1455 def check_upload_permissions(self, fpr, session):
1456 # Check any one-off upload blocks
1457 self.check_upload_blocks(fpr, session)
1459 # Start with DM as a special case
1460 # DM is a special case unfortunately, so we check it first
1461 # (keys with no source access get more access than DMs in one
1462 # way; DMs can only upload for their packages whether source
1463 # or binary, whereas keys with no access might be able to
1464 # upload some binaries)
1465 if fpr.source_acl.access_level == 'dm':
1466 self.check_dm_upload(fpr, session)
1468 # Check source-based permissions for other types
1469 if self.pkg.changes["architecture"].has_key("source") and \
1470 fpr.source_acl.access_level is None:
1471 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1472 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1473 self.rejects.append(rej)
1475 # If not a DM, we allow full upload rights
1476 uid_email = "%s@debian.org" % (fpr.uid.uid)
1477 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1480 # Check binary upload permissions
1481 # By this point we know that DMs can't have got here unless they
1482 # are allowed to deal with the package concerned so just apply
1484 if fpr.binary_acl.access_level == 'full':
1487 # Otherwise we're in the map case
1488 tmparches = self.pkg.changes["architecture"].copy()
1489 tmparches.pop('source', None)
1491 for bam in fpr.binary_acl_map:
1492 tmparches.pop(bam.architecture.arch_string, None)
1494 if len(tmparches.keys()) > 0:
1495 if fpr.binary_reject:
1496 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1497 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1498 self.rejects.append(rej)
1500 # TODO: This is where we'll implement reject vs throw away binaries later
1501 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1502 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1503 rej += "\nFingerprint: %s", (fpr.fingerprint)
1504 self.rejects.append(rej)
1507 def check_upload_blocks(self, fpr, session):
1508 """Check whether any upload blocks apply to this source, source
1509 version, uid / fpr combination"""
1511 def block_rej_template(fb):
1512 rej = 'Manual upload block in place for package %s' % fb.source
1513 if fb.version is not None:
1514 rej += ', version %s' % fb.version
1517 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1518 # version is None if the block applies to all versions
1519 if fb.version is None or fb.version == self.pkg.changes['version']:
1520 # Check both fpr and uid - either is enough to cause a reject
1521 if fb.fpr is not None:
1522 if fb.fpr.fingerprint == fpr.fingerprint:
1523 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1524 if fb.uid is not None:
1525 if fb.uid == fpr.uid:
1526 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1529 def check_dm_upload(self, fpr, session):
1530 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1531 ## none of the uploaded packages are NEW
1533 for f in self.pkg.files.keys():
1534 if self.pkg.files[f].has_key("byhand"):
1535 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1537 if self.pkg.files[f].has_key("new"):
1538 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1544 ## the most recent version of the package uploaded to unstable or
1545 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1546 ## section of its control file
1547 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1548 q = q.join(SrcAssociation)
1549 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1550 q = q.order_by(desc('source.version')).limit(1)
1555 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1556 self.rejects.append(rej)
1560 if not r.dm_upload_allowed:
1561 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1562 self.rejects.append(rej)
1565 ## the Maintainer: field of the uploaded .changes file corresponds with
1566 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1568 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1569 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1571 ## the most recent version of the package uploaded to unstable or
1572 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1573 ## non-developer maintainers cannot NMU or hijack packages)
1575 # srcuploaders includes the maintainer
1577 for sup in r.srcuploaders:
1578 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1579 # Eww - I hope we never have two people with the same name in Debian
1580 if email == fpr.uid.uid or name == fpr.uid.name:
1585 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1588 ## none of the packages are being taken over from other source packages
1589 for b in self.pkg.changes["binary"].keys():
1590 for suite in self.pkg.changes["distribution"].keys():
1591 q = session.query(DBSource)
1592 q = q.join(DBBinary).filter_by(package=b)
1593 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1596 if s.source != self.pkg.changes["source"]:
1597 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1601 def check_transition(self, session):
1604 sourcepkg = self.pkg.changes["source"]
1606 # No sourceful upload -> no need to do anything else, direct return
1607 # We also work with unstable uploads, not experimental or those going to some
1608 # proposed-updates queue
1609 if "source" not in self.pkg.changes["architecture"] or \
1610 "unstable" not in self.pkg.changes["distribution"]:
1613 # Also only check if there is a file defined (and existant) with
1615 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1616 if transpath == "" or not os.path.exists(transpath):
1619 # Parse the yaml file
1620 sourcefile = file(transpath, 'r')
1621 sourcecontent = sourcefile.read()
1623 transitions = yaml.load(sourcecontent)
1624 except yaml.YAMLError, msg:
1625 # This shouldn't happen, there is a wrapper to edit the file which
1626 # checks it, but we prefer to be safe than ending up rejecting
1628 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1631 # Now look through all defined transitions
1632 for trans in transitions:
1633 t = transitions[trans]
1634 source = t["source"]
1637 # Will be None if nothing is in testing.
1638 current = get_source_in_suite(source, "testing", session)
1639 if current is not None:
1640 compare = apt_pkg.VersionCompare(current.version, expected)
1642 if current is None or compare < 0:
1643 # This is still valid, the current version in testing is older than
1644 # the new version we wait for, or there is none in testing yet
1646 # Check if the source we look at is affected by this.
1647 if sourcepkg in t['packages']:
1648 # The source is affected, lets reject it.
1650 rejectmsg = "%s: part of the %s transition.\n\n" % (
1653 if current is not None:
1654 currentlymsg = "at version %s" % (current.version)
1656 currentlymsg = "not present in testing"
1658 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1660 rejectmsg += "\n".join(textwrap.wrap("""Your package
1661 is part of a testing transition designed to get %s migrated (it is
1662 currently %s, we need version %s). This transition is managed by the
1663 Release Team, and %s is the Release-Team member responsible for it.
1664 Please mail debian-release@lists.debian.org or contact %s directly if you
1665 need further assistance. You might want to upload to experimental until this
1666 transition is done."""
1667 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1669 self.rejects.append(rejectmsg)
1672 ###########################################################################
1673 # End check_signed_by_key checks
1674 ###########################################################################
1676 def build_summaries(self):
1677 """ Build a summary of changes the upload introduces. """
1679 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1681 short_summary = summary
1683 # This is for direport's benefit...
1684 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1687 summary += "Changes: " + f
1689 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1691 summary += self.announce(short_summary, 0)
1693 return (summary, short_summary)
1695 ###########################################################################
1697 def close_bugs(self, summary, action):
1699 Send mail to close bugs as instructed by the closes field in the changes file.
1700 Also add a line to summary if any work was done.
1702 @type summary: string
1703 @param summary: summary text, as given by L{build_summaries}
1706 @param action: Set to false no real action will be done.
1709 @return: summary. If action was taken, extended by the list of closed bugs.
1713 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1715 bugs = self.pkg.changes["closes"].keys()
1721 summary += "Closing bugs: "
1723 summary += "%s " % (bug)
1726 self.Subst["__BUG_NUMBER__"] = bug
1727 if self.pkg.changes["distribution"].has_key("stable"):
1728 self.Subst["__STABLE_WARNING__"] = """
1729 Note that this package is not part of the released stable Debian
1730 distribution. It may have dependencies on other unreleased software,
1731 or other instabilities. Please take care if you wish to install it.
1732 The update will eventually make its way into the next released Debian
1735 self.Subst["__STABLE_WARNING__"] = ""
1736 mail_message = utils.TemplateSubst(self.Subst, template)
1737 utils.send_mail(mail_message)
1739 # Clear up after ourselves
1740 del self.Subst["__BUG_NUMBER__"]
1741 del self.Subst["__STABLE_WARNING__"]
1743 if action and self.logger:
1744 self.logger.log(["closing bugs"] + bugs)
1750 ###########################################################################
1752 def announce(self, short_summary, action):
1754 Send an announce mail about a new upload.
1756 @type short_summary: string
1757 @param short_summary: Short summary text to include in the mail
1760 @param action: Set to false no real action will be done.
1763 @return: Textstring about action taken.
1768 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1770 # Only do announcements for source uploads with a recent dpkg-dev installed
1771 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1772 self.pkg.changes["architecture"].has_key("source"):
1778 self.Subst["__SHORT_SUMMARY__"] = short_summary
1780 for dist in self.pkg.changes["distribution"].keys():
1781 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1782 if announce_list == "" or lists_done.has_key(announce_list):
1785 lists_done[announce_list] = 1
1786 summary += "Announcing to %s\n" % (announce_list)
1790 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1791 if cnf.get("Dinstall::TrackingServer") and \
1792 self.pkg.changes["architecture"].has_key("source"):
1793 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1794 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1796 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1797 utils.send_mail(mail_message)
1799 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1801 if cnf.FindB("Dinstall::CloseBugs"):
1802 summary = self.close_bugs(summary, action)
1804 del self.Subst["__SHORT_SUMMARY__"]
1808 ###########################################################################
1810 def accept (self, summary, short_summary, session=None):
1814 This moves all files referenced from the .changes into the pool,
1815 sends the accepted mail, announces to lists, closes bugs and
1816 also checks for override disparities. If enabled it will write out
1817 the version history for the BTS Version Tracking and will finally call
1820 @type summary: string
1821 @param summary: Summary text
1823 @type short_summary: string
1824 @param short_summary: Short summary
1828 stats = SummaryStats()
1831 self.logger.log(["installing changes", self.pkg.changes_file])
1835 # Add the .dsc file to the DB first
1836 for newfile, entry in self.pkg.files.items():
1837 if entry["type"] == "dsc":
1838 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1842 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1843 for newfile, entry in self.pkg.files.items():
1844 if entry["type"] == "deb":
1845 poolfiles.append(add_deb_to_db(self, newfile, session))
1847 # If this is a sourceful diff only upload that is moving
1848 # cross-component we need to copy the .orig files into the new
1849 # component too for the same reasons as above.
1850 # XXX: mhy: I think this should be in add_dsc_to_db
1851 if self.pkg.changes["architecture"].has_key("source"):
1852 for orig_file in self.pkg.orig_files.keys():
1853 if not self.pkg.orig_files[orig_file].has_key("id"):
1854 continue # Skip if it's not in the pool
1855 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1856 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1857 continue # Skip if the location didn't change
1860 oldf = get_poolfile_by_id(orig_file_id, session)
1861 old_filename = os.path.join(oldf.location.path, oldf.filename)
1862 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1863 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1865 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1867 # TODO: Care about size/md5sum collisions etc
1868 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1870 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1872 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1873 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1877 # Don't reference the old file from this changes
1879 if p.file_id == oldf.file_id:
1882 poolfiles.append(newf)
1884 # Fix up the DSC references
1887 for df in source.srcfiles:
1888 if df.poolfile.file_id == oldf.file_id:
1889 # Add a new DSC entry and mark the old one for deletion
1890 # Don't do it in the loop so we don't change the thing we're iterating over
1892 newdscf.source_id = source.source_id
1893 newdscf.poolfile_id = newf.file_id
1894 session.add(newdscf)
1904 # Make sure that our source object is up-to-date
1905 session.expire(source)
1907 # Install the files into the pool
1908 for newfile, entry in self.pkg.files.items():
1909 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1910 utils.move(newfile, destination)
1911 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1912 stats.accept_bytes += float(entry["size"])
1914 # Copy the .changes file across for suite which need it.
1916 for suite_name in self.pkg.changes["distribution"].keys():
1917 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1918 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1920 for dest in copy_changes.keys():
1921 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1923 # We're done - commit the database changes
1925 # Our SQL session will automatically start a new transaction after
1928 # Move the .changes into the 'done' directory
1929 utils.move(self.pkg.changes_file,
1930 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1932 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1933 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1936 self.Subst["__SUITE__"] = ""
1937 self.Subst["__SUMMARY__"] = summary
1938 mail_message = utils.TemplateSubst(self.Subst,
1939 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1940 utils.send_mail(mail_message)
1941 self.announce(short_summary, 1)
1943 ## Helper stuff for DebBugs Version Tracking
1944 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1945 if self.pkg.changes["architecture"].has_key("source"):
1946 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1947 version_history = os.fdopen(fd, 'w')
1948 version_history.write(self.pkg.dsc["bts changelog"])
1949 version_history.close()
1950 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1951 self.pkg.changes_file[:-8]+".versions")
1952 os.rename(temp_filename, filename)
1953 os.chmod(filename, 0644)
1955 # Write out the binary -> source mapping.
1956 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1957 debinfo = os.fdopen(fd, 'w')
1958 for name, entry in sorted(self.pkg.files.items()):
1959 if entry["type"] == "deb":
1960 line = " ".join([entry["package"], entry["version"],
1961 entry["architecture"], entry["source package"],
1962 entry["source version"]])
1963 debinfo.write(line+"\n")
1965 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1966 self.pkg.changes_file[:-8]+".debinfo")
1967 os.rename(temp_filename, filename)
1968 os.chmod(filename, 0644)
1972 # Set up our copy queues (e.g. buildd queues)
1973 for suite_name in self.pkg.changes["distribution"].keys():
1974 suite = get_suite(suite_name, session)
1975 for q in suite.copy_queues:
1977 q.add_file_from_pool(f)
1982 stats.accept_count += 1
1984 def check_override(self):
1986 Checks override entries for validity. Mails "Override disparity" warnings,
1987 if that feature is enabled.
1989 Abandons the check if
1990 - override disparity checks are disabled
1991 - mail sending is disabled
1996 # Abandon the check if override disparity checks have been disabled
1997 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2000 summary = self.pkg.check_override()
2005 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2008 self.Subst["__SUMMARY__"] = summary
2009 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2010 utils.send_mail(mail_message)
2011 del self.Subst["__SUMMARY__"]
2013 ###########################################################################
2015 def remove(self, from_dir=None):
2017 Used (for instance) in p-u to remove the package from unchecked
2019 Also removes the package from holding area.
2021 if from_dir is None:
2022 from_dir = self.pkg.directory
2025 for f in self.pkg.files.keys():
2026 os.unlink(os.path.join(from_dir, f))
2027 if os.path.exists(os.path.join(h.holding_dir, f)):
2028 os.unlink(os.path.join(h.holding_dir, f))
2030 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2031 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2032 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2034 ###########################################################################
2036 def move_to_queue (self, queue):
2038 Move files to a destination queue using the permissions in the table
2041 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2042 queue.path, perms=int(queue.change_perms, 8))
2043 for f in self.pkg.files.keys():
2044 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2046 ###########################################################################
2048 def force_reject(self, reject_files):
2050 Forcefully move files from the current directory to the
2051 reject directory. If any file already exists in the reject
2052 directory it will be moved to the morgue to make way for
2056 @param files: file dictionary
2062 for file_entry in reject_files:
2063 # Skip any files which don't exist or which we don't have permission to copy.
2064 if os.access(file_entry, os.R_OK) == 0:
2067 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2070 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2072 # File exists? Let's find a new name by adding a number
2073 if e.errno == errno.EEXIST:
2075 dest_file = utils.find_next_free(dest_file, 255)
2076 except NoFreeFilenameError:
2077 # Something's either gone badly Pete Tong, or
2078 # someone is trying to exploit us.
2079 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2082 # Make sure we really got it
2084 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2087 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2091 # If we got here, we own the destination file, so we can
2092 # safely overwrite it.
2093 utils.move(file_entry, dest_file, 1, perms=0660)
2096 ###########################################################################
2097 def do_reject (self, manual=0, reject_message="", notes=""):
2099 Reject an upload. If called without a reject message or C{manual} is
2100 true, spawn an editor so the user can write one.
2103 @param manual: manual or automated rejection
2105 @type reject_message: string
2106 @param reject_message: A reject message
2111 # If we weren't given a manual rejection message, spawn an
2112 # editor so the user can add one in...
2113 if manual and not reject_message:
2114 (fd, temp_filename) = utils.temp_filename()
2115 temp_file = os.fdopen(fd, 'w')
2118 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2119 % (note.author, note.version, note.notedate, note.comment))
2121 editor = os.environ.get("EDITOR","vi")
2123 while answer == 'E':
2124 os.system("%s %s" % (editor, temp_filename))
2125 temp_fh = utils.open_file(temp_filename)
2126 reject_message = "".join(temp_fh.readlines())
2128 print "Reject message:"
2129 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2130 prompt = "[R]eject, Edit, Abandon, Quit ?"
2132 while prompt.find(answer) == -1:
2133 answer = utils.our_raw_input(prompt)
2134 m = re_default_answer.search(prompt)
2137 answer = answer[:1].upper()
2138 os.unlink(temp_filename)
2144 print "Rejecting.\n"
2148 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2149 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2151 # Move all the files into the reject directory
2152 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2153 self.force_reject(reject_files)
2155 # If we fail here someone is probably trying to exploit the race
2156 # so let's just raise an exception ...
2157 if os.path.exists(reason_filename):
2158 os.unlink(reason_filename)
2159 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2161 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2165 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2166 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2167 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2168 os.write(reason_fd, reject_message)
2169 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2171 # Build up the rejection email
2172 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2173 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2174 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2175 self.Subst["__REJECT_MESSAGE__"] = ""
2176 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2177 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2178 # Write the rejection email out as the <foo>.reason file
2179 os.write(reason_fd, reject_mail_message)
2181 del self.Subst["__REJECTOR_ADDRESS__"]
2182 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2183 del self.Subst["__CC__"]
2187 # Send the rejection mail
2188 utils.send_mail(reject_mail_message)
2191 self.logger.log(["rejected", self.pkg.changes_file])
2195 ################################################################################
2196 def in_override_p(self, package, component, suite, binary_type, filename, session):
2198 Check if a package already has override entries in the DB
2200 @type package: string
2201 @param package: package name
2203 @type component: string
2204 @param component: database id of the component
2207 @param suite: database id of the suite
2209 @type binary_type: string
2210 @param binary_type: type of the package
2212 @type filename: string
2213 @param filename: filename we check
2215 @return: the database result. But noone cares anyway.
2221 if binary_type == "": # must be source
2224 file_type = binary_type
2226 # Override suite name; used for example with proposed-updates
2227 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2228 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2230 result = get_override(package, suite, component, file_type, session)
2232 # If checking for a source package fall back on the binary override type
2233 if file_type == "dsc" and len(result) < 1:
2234 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2236 # Remember the section and priority so we can check them later if appropriate
2239 self.pkg.files[filename]["override section"] = result.section.section
2240 self.pkg.files[filename]["override priority"] = result.priority.priority
2245 ################################################################################
2246 def get_anyversion(self, sv_list, suite):
2249 @param sv_list: list of (suite, version) tuples to check
2252 @param suite: suite name
2258 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2259 for (s, v) in sv_list:
2260 if s in [ x.lower() for x in anysuite ]:
2261 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2266 ################################################################################
2268 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2271 @param sv_list: list of (suite, version) tuples to check
2273 @type filename: string
2274 @param filename: XXX
2276 @type new_version: string
2277 @param new_version: XXX
2279 Ensure versions are newer than existing packages in target
2280 suites and that cross-suite version checking rules as
2281 set out in the conf file are satisfied.
2286 # Check versions for each target suite
2287 for target_suite in self.pkg.changes["distribution"].keys():
2288 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2289 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2291 # Enforce "must be newer than target suite" even if conffile omits it
2292 if target_suite not in must_be_newer_than:
2293 must_be_newer_than.append(target_suite)
2295 for (suite, existent_version) in sv_list:
2296 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2298 if suite in must_be_newer_than and sourceful and vercmp < 1:
2299 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2301 if suite in must_be_older_than and vercmp > -1:
2304 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2305 # we really use the other suite, ignoring the conflicting one ...
2306 addsuite = self.pkg.changes["distribution-version"][suite]
2308 add_version = self.get_anyversion(sv_list, addsuite)
2309 target_version = self.get_anyversion(sv_list, target_suite)
2312 # not add_version can only happen if we map to a suite
2313 # that doesn't enhance the suite we're propup'ing from.
2314 # so "propup-ver x a b c; map a d" is a problem only if
2315 # d doesn't enhance a.
2317 # i think we could always propagate in this case, rather
2318 # than complaining. either way, this isn't a REJECT issue
2320 # And - we really should complain to the dorks who configured dak
2321 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2322 self.pkg.changes.setdefault("propdistribution", {})
2323 self.pkg.changes["propdistribution"][addsuite] = 1
2325 elif not target_version:
2326 # not targets_version is true when the package is NEW
2327 # we could just stick with the "...old version..." REJECT
2328 # for this, I think.
2329 self.rejects.append("Won't propogate NEW packages.")
2330 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2331 # propogation would be redundant. no need to reject though.
2332 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2334 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2335 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2337 self.warnings.append("Propogating upload to %s" % (addsuite))
2338 self.pkg.changes.setdefault("propdistribution", {})
2339 self.pkg.changes["propdistribution"][addsuite] = 1
2343 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2345 ################################################################################
2346 def check_binary_against_db(self, filename, session):
2347 # Ensure version is sane
2348 q = session.query(BinAssociation)
2349 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2350 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2352 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2353 filename, self.pkg.files[filename]["version"], sourceful=False)
2355 # Check for any existing copies of the file
2356 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2357 q = q.filter_by(version=self.pkg.files[filename]["version"])
2358 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2361 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2363 ################################################################################
2365 def check_source_against_db(self, filename, session):
2366 source = self.pkg.dsc.get("source")
2367 version = self.pkg.dsc.get("version")
2369 # Ensure version is sane
2370 q = session.query(SrcAssociation)
2371 q = q.join(DBSource).filter(DBSource.source==source)
2373 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2374 filename, version, sourceful=True)
2376 ################################################################################
2377 def check_dsc_against_db(self, filename, session):
2380 @warning: NB: this function can remove entries from the 'files' index [if
2381 the orig tarball is a duplicate of the one in the archive]; if
2382 you're iterating over 'files' and call this function as part of
2383 the loop, be sure to add a check to the top of the loop to
2384 ensure you haven't just tried to dereference the deleted entry.
2389 self.pkg.orig_files = {} # XXX: do we need to clear it?
2390 orig_files = self.pkg.orig_files
2392 # Try and find all files mentioned in the .dsc. This has
2393 # to work harder to cope with the multiple possible
2394 # locations of an .orig.tar.gz.
2395 # The ordering on the select is needed to pick the newest orig
2396 # when it exists in multiple places.
2397 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2399 if self.pkg.files.has_key(dsc_name):
2400 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2401 actual_size = int(self.pkg.files[dsc_name]["size"])
2402 found = "%s in incoming" % (dsc_name)
2404 # Check the file does not already exist in the archive
2405 ql = get_poolfile_like_name(dsc_name, session)
2407 # Strip out anything that isn't '%s' or '/%s$'
2409 if not i.filename.endswith(dsc_name):
2412 # "[dak] has not broken them. [dak] has fixed a
2413 # brokenness. Your crappy hack exploited a bug in
2416 # "(Come on! I thought it was always obvious that
2417 # one just doesn't release different files with
2418 # the same name and version.)"
2419 # -- ajk@ on d-devel@l.d.o
2422 # Ignore exact matches for .orig.tar.gz
2424 if re_is_orig_source.match(dsc_name):
2426 if self.pkg.files.has_key(dsc_name) and \
2427 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2428 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2429 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2430 # TODO: Don't delete the entry, just mark it as not needed
2431 # This would fix the stupidity of changing something we often iterate over
2432 # whilst we're doing it
2433 del self.pkg.files[dsc_name]
2434 dsc_entry["files id"] = i.file_id
2435 if not orig_files.has_key(dsc_name):
2436 orig_files[dsc_name] = {}
2437 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2440 # Don't bitch that we couldn't find this file later
2442 self.later_check_files.remove(dsc_name)
2448 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2450 elif re_is_orig_source.match(dsc_name):
2452 ql = get_poolfile_like_name(dsc_name, session)
2454 # Strip out anything that isn't '%s' or '/%s$'
2455 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2457 if not i.filename.endswith(dsc_name):
2461 # Unfortunately, we may get more than one match here if,
2462 # for example, the package was in potato but had an -sa
2463 # upload in woody. So we need to choose the right one.
2465 # default to something sane in case we don't match any or have only one
2470 old_file = os.path.join(i.location.path, i.filename)
2471 old_file_fh = utils.open_file(old_file)
2472 actual_md5 = apt_pkg.md5sum(old_file_fh)
2474 actual_size = os.stat(old_file)[stat.ST_SIZE]
2475 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2478 old_file = os.path.join(i.location.path, i.filename)
2479 old_file_fh = utils.open_file(old_file)
2480 actual_md5 = apt_pkg.md5sum(old_file_fh)
2482 actual_size = os.stat(old_file)[stat.ST_SIZE]
2484 suite_type = x.location.archive_type
2485 # need this for updating dsc_files in install()
2486 dsc_entry["files id"] = x.file_id
2487 # See install() in process-accepted...
2488 if not orig_files.has_key(dsc_name):
2489 orig_files[dsc_name] = {}
2490 orig_files[dsc_name]["id"] = x.file_id
2491 orig_files[dsc_name]["path"] = old_file
2492 orig_files[dsc_name]["location"] = x.location.location_id
2494 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2495 # Not there? Check the queue directories...
2496 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2497 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2499 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2500 if os.path.exists(in_otherdir):
2501 in_otherdir_fh = utils.open_file(in_otherdir)
2502 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2503 in_otherdir_fh.close()
2504 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2506 if not orig_files.has_key(dsc_name):
2507 orig_files[dsc_name] = {}
2508 orig_files[dsc_name]["path"] = in_otherdir
2511 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2514 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2516 if actual_md5 != dsc_entry["md5sum"]:
2517 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2518 if actual_size != int(dsc_entry["size"]):
2519 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2521 ################################################################################
2522 # This is used by process-new and process-holding to recheck a changes file
2523 # at the time we're running. It mainly wraps various other internal functions
2524 # and is similar to accepted_checks - these should probably be tidied up
2526 def recheck(self, session):
2528 for f in self.pkg.files.keys():
2529 # The .orig.tar.gz can disappear out from under us is it's a
2530 # duplicate of one in the archive.
2531 if not self.pkg.files.has_key(f):
2534 entry = self.pkg.files[f]
2536 # Check that the source still exists
2537 if entry["type"] == "deb":
2538 source_version = entry["source version"]
2539 source_package = entry["source package"]
2540 if not self.pkg.changes["architecture"].has_key("source") \
2541 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2542 source_epochless_version = re_no_epoch.sub('', source_version)
2543 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2545 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2546 if cnf.has_key("Dir::Queue::%s" % (q)):
2547 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2550 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2552 # Version and file overwrite checks
2553 if entry["type"] == "deb":
2554 self.check_binary_against_db(f, session)
2555 elif entry["type"] == "dsc":
2556 self.check_source_against_db(f, session)
2557 self.check_dsc_against_db(f, session)
2559 ################################################################################
2560 def accepted_checks(self, overwrite_checks, session):
2561 # Recheck anything that relies on the database; since that's not
2562 # frozen between accept and our run time when called from p-a.
2564 # overwrite_checks is set to False when installing to stable/oldstable
2569 # Find the .dsc (again)
2571 for f in self.pkg.files.keys():
2572 if self.pkg.files[f]["type"] == "dsc":
2575 for checkfile in self.pkg.files.keys():
2576 # The .orig.tar.gz can disappear out from under us is it's a
2577 # duplicate of one in the archive.
2578 if not self.pkg.files.has_key(checkfile):
2581 entry = self.pkg.files[checkfile]
2583 # Check that the source still exists
2584 if entry["type"] == "deb":
2585 source_version = entry["source version"]
2586 source_package = entry["source package"]
2587 if not self.pkg.changes["architecture"].has_key("source") \
2588 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2589 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2591 # Version and file overwrite checks
2592 if overwrite_checks:
2593 if entry["type"] == "deb":
2594 self.check_binary_against_db(checkfile, session)
2595 elif entry["type"] == "dsc":
2596 self.check_source_against_db(checkfile, session)
2597 self.check_dsc_against_db(dsc_filename, session)
2599 # propogate in the case it is in the override tables:
2600 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2601 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2602 propogate[suite] = 1
2604 nopropogate[suite] = 1
2606 for suite in propogate.keys():
2607 if suite in nopropogate:
2609 self.pkg.changes["distribution"][suite] = 1
2611 for checkfile in self.pkg.files.keys():
2612 # Check the package is still in the override tables
2613 for suite in self.pkg.changes["distribution"].keys():
2614 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2615 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2617 ################################################################################
2618 # This is not really a reject, but an unaccept, but since a) the code for
2619 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2620 # extremely rare, for now we'll go with whining at our admin folks...
2622 def do_unaccept(self):
2626 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2627 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2628 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2629 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2630 if cnf.has_key("Dinstall::Bcc"):
2631 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2633 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2635 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2637 # Write the rejection email out as the <foo>.reason file
2638 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2639 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2641 # If we fail here someone is probably trying to exploit the race
2642 # so let's just raise an exception ...
2643 if os.path.exists(reject_filename):
2644 os.unlink(reject_filename)
2646 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2647 os.write(fd, reject_mail_message)
2650 utils.send_mail(reject_mail_message)
2652 del self.Subst["__REJECTOR_ADDRESS__"]
2653 del self.Subst["__REJECT_MESSAGE__"]
2654 del self.Subst["__CC__"]
2656 ################################################################################
2657 # If any file of an upload has a recent mtime then chances are good
2658 # the file is still being uploaded.
2660 def upload_too_new(self):
2663 # Move back to the original directory to get accurate time stamps
2665 os.chdir(self.pkg.directory)
2666 file_list = self.pkg.files.keys()
2667 file_list.extend(self.pkg.dsc_files.keys())
2668 file_list.append(self.pkg.changes_file)
2671 last_modified = time.time()-os.path.getmtime(f)
2672 if last_modified < int(cnf["Dinstall::SkipTime"]):