5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
52 from summarystats import SummaryStats
53 from utils import parse_changes, check_dsc_files
54 from textutils import fix_maintainer
55 from binary import Binary
57 ###############################################################################
59 def get_type(f, session):
61 Get the file type of C{f}
64 @param f: file entry from Changes object
66 @type session: SQLA Session
67 @param session: SQL Alchemy session object
74 if f.has_key("dbtype"):
75 file_type = f["dbtype"]
76 elif re_source_ext.match(f["type"]):
79 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
81 # Validate the override type
82 type_id = get_override_type(file_type, session)
84 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
88 ################################################################################
90 # Determine what parts in a .changes are NEW
92 def determine_new(changes, files, warn=1):
94 Determine what parts in a C{changes} file are NEW.
96 @type changes: Upload.Pkg.changes dict
97 @param changes: Changes dictionary
99 @type files: Upload.Pkg.files dict
100 @param files: Files dictionary
103 @param warn: Warn if overrides are added for (old)stable
106 @return: dictionary of NEW components.
111 session = DBConn().session()
113 # Build up a list of potentially new things
114 for name, f in files.items():
115 # Skip byhand elements
116 if f["type"] == "byhand":
119 priority = f["priority"]
120 section = f["section"]
121 file_type = get_type(f, session)
122 component = f["component"]
124 if file_type == "dsc":
127 if not new.has_key(pkg):
129 new[pkg]["priority"] = priority
130 new[pkg]["section"] = section
131 new[pkg]["type"] = file_type
132 new[pkg]["component"] = component
133 new[pkg]["files"] = []
135 old_type = new[pkg]["type"]
136 if old_type != file_type:
137 # source gets trumped by deb or udeb
138 if old_type == "dsc":
139 new[pkg]["priority"] = priority
140 new[pkg]["section"] = section
141 new[pkg]["type"] = file_type
142 new[pkg]["component"] = component
144 new[pkg]["files"].append(name)
146 if f.has_key("othercomponents"):
147 new[pkg]["othercomponents"] = f["othercomponents"]
149 for suite in changes["suite"].keys():
150 for pkg in new.keys():
151 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
153 for file_entry in new[pkg]["files"]:
154 if files[file_entry].has_key("new"):
155 del files[file_entry]["new"]
159 for s in ['stable', 'oldstable']:
160 if changes["suite"].has_key(s):
161 print "WARNING: overrides will be added for %s!" % s
162 for pkg in new.keys():
163 if new[pkg].has_key("othercomponents"):
164 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
170 ################################################################################
172 def check_valid(new):
174 Check if section and priority for NEW packages exist in database.
175 Additionally does sanity checks:
176 - debian-installer packages have to be udeb (or source)
177 - non debian-installer packages can not be udeb
178 - source priority can only be assigned to dsc file types
181 @param new: Dict of new packages with their section, priority and type.
184 for pkg in new.keys():
185 section_name = new[pkg]["section"]
186 priority_name = new[pkg]["priority"]
187 file_type = new[pkg]["type"]
189 section = get_section(section_name)
191 new[pkg]["section id"] = -1
193 new[pkg]["section id"] = section.section_id
195 priority = get_priority(priority_name)
197 new[pkg]["priority id"] = -1
199 new[pkg]["priority id"] = priority.priority_id
202 di = section_name.find("debian-installer") != -1
204 # If d-i, we must be udeb and vice-versa
205 if (di and file_type not in ("udeb", "dsc")) or \
206 (not di and file_type == "udeb"):
207 new[pkg]["section id"] = -1
209 # If dsc we need to be source and vice-versa
210 if (priority == "source" and file_type != "dsc") or \
211 (priority != "source" and file_type == "dsc"):
212 new[pkg]["priority id"] = -1
214 ###############################################################################
216 def check_status(files):
218 for f in files.keys():
219 if files[f]["type"] == "byhand":
221 elif files[f].has_key("new"):
225 ###############################################################################
227 # Used by Upload.check_timestamps
228 class TarTime(object):
229 def __init__(self, future_cutoff, past_cutoff):
231 self.future_cutoff = future_cutoff
232 self.past_cutoff = past_cutoff
235 self.future_files = {}
236 self.ancient_files = {}
238 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
239 if MTime > self.future_cutoff:
240 self.future_files[Name] = MTime
241 if MTime < self.past_cutoff:
242 self.ancient_files[Name] = MTime
244 ###############################################################################
246 class Upload(object):
248 Everything that has to do with an upload processed.
256 ###########################################################################
259 """ Reset a number of internal variables."""
261 # Initialize the substitution template map
264 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
265 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
266 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
267 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
275 def package_info(self):
277 Format various messages from this Upload to send to the maintainer.
281 ('Reject Reasons', self.rejects),
282 ('Warnings', self.warnings),
283 ('Notes', self.notes),
287 for title, messages in msgs:
289 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
293 ###########################################################################
294 def update_subst(self):
295 """ Set up the per-package template substitution mappings """
299 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
300 if not self.pkg.changes.has_key("architecture") or not \
301 isinstance(self.pkg.changes["architecture"], DictType):
302 self.pkg.changes["architecture"] = { "Unknown" : "" }
304 # and maintainer2047 may not exist.
305 if not self.pkg.changes.has_key("maintainer2047"):
306 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
308 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
309 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
310 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
312 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
313 if self.pkg.changes["architecture"].has_key("source") and \
314 self.pkg.changes["changedby822"] != "" and \
315 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
317 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
318 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
319 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
321 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
322 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
323 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
325 if "sponsoremail" in self.pkg.changes:
326 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
328 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
329 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
331 # Apply any global override of the Maintainer field
332 if cnf.get("Dinstall::OverrideMaintainer"):
333 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
334 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
336 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
337 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
338 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
340 ###########################################################################
341 def load_changes(self, filename):
344 @rvalue: whether the changes file was valid or not. We may want to
345 reject even if this is True (see what gets put in self.rejects).
346 This is simply to prevent us even trying things later which will
347 fail because we couldn't properly parse the file.
350 self.pkg.changes_file = filename
352 # Parse the .changes field into a dictionary
354 self.pkg.changes.update(parse_changes(filename))
355 except CantOpenError:
356 self.rejects.append("%s: can't read file." % (filename))
358 except ParseChangesError, line:
359 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
361 except ChangesUnicodeError:
362 self.rejects.append("%s: changes file not proper utf-8" % (filename))
365 # Parse the Files field from the .changes into another dictionary
367 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
368 except ParseChangesError, line:
369 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
371 except UnknownFormatError, format:
372 self.rejects.append("%s: unknown format '%s'." % (filename, format))
375 # Check for mandatory fields
376 for i in ("distribution", "source", "binary", "architecture",
377 "version", "maintainer", "files", "changes", "description"):
378 if not self.pkg.changes.has_key(i):
379 # Avoid undefined errors later
380 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
383 # Strip a source version in brackets from the source field
384 if re_strip_srcver.search(self.pkg.changes["source"]):
385 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
387 # Ensure the source field is a valid package name.
388 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
389 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
391 # Split multi-value fields into a lower-level dictionary
392 for i in ("architecture", "distribution", "binary", "closes"):
393 o = self.pkg.changes.get(i, "")
395 del self.pkg.changes[i]
397 self.pkg.changes[i] = {}
400 self.pkg.changes[i][j] = 1
402 # Fix the Maintainer: field to be RFC822/2047 compatible
404 (self.pkg.changes["maintainer822"],
405 self.pkg.changes["maintainer2047"],
406 self.pkg.changes["maintainername"],
407 self.pkg.changes["maintaineremail"]) = \
408 fix_maintainer (self.pkg.changes["maintainer"])
409 except ParseMaintError, msg:
410 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
411 % (filename, changes["maintainer"], msg))
413 # ...likewise for the Changed-By: field if it exists.
415 (self.pkg.changes["changedby822"],
416 self.pkg.changes["changedby2047"],
417 self.pkg.changes["changedbyname"],
418 self.pkg.changes["changedbyemail"]) = \
419 fix_maintainer (self.pkg.changes.get("changed-by", ""))
420 except ParseMaintError, msg:
421 self.pkg.changes["changedby822"] = ""
422 self.pkg.changes["changedby2047"] = ""
423 self.pkg.changes["changedbyname"] = ""
424 self.pkg.changes["changedbyemail"] = ""
426 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
427 % (filename, changes["changed-by"], msg))
429 # Ensure all the values in Closes: are numbers
430 if self.pkg.changes.has_key("closes"):
431 for i in self.pkg.changes["closes"].keys():
432 if re_isanum.match (i) == None:
433 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
435 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
436 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
437 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
439 # Check there isn't already a changes file of the same name in one
440 # of the queue directories.
441 base_filename = os.path.basename(filename)
442 if get_knownchange(base_filename):
443 self.rejects.append("%s: a file with this name already exists." % (base_filename))
445 # Check the .changes is non-empty
446 if not self.pkg.files:
447 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
450 # Changes was syntactically valid even if we'll reject
453 ###########################################################################
455 def check_distributions(self):
456 "Check and map the Distribution field"
460 # Handle suite mappings
461 for m in Cnf.ValueList("SuiteMappings"):
464 if mtype == "map" or mtype == "silent-map":
465 (source, dest) = args[1:3]
466 if self.pkg.changes["distribution"].has_key(source):
467 del self.pkg.changes["distribution"][source]
468 self.pkg.changes["distribution"][dest] = 1
469 if mtype != "silent-map":
470 self.notes.append("Mapping %s to %s." % (source, dest))
471 if self.pkg.changes.has_key("distribution-version"):
472 if self.pkg.changes["distribution-version"].has_key(source):
473 self.pkg.changes["distribution-version"][source]=dest
474 elif mtype == "map-unreleased":
475 (source, dest) = args[1:3]
476 if self.pkg.changes["distribution"].has_key(source):
477 for arch in self.pkg.changes["architecture"].keys():
478 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
479 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
480 del self.pkg.changes["distribution"][source]
481 self.pkg.changes["distribution"][dest] = 1
483 elif mtype == "ignore":
485 if self.pkg.changes["distribution"].has_key(suite):
486 del self.pkg.changes["distribution"][suite]
487 self.warnings.append("Ignoring %s as a target suite." % (suite))
488 elif mtype == "reject":
490 if self.pkg.changes["distribution"].has_key(suite):
491 self.rejects.append("Uploads to %s are not accepted." % (suite))
492 elif mtype == "propup-version":
493 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
495 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
496 if self.pkg.changes["distribution"].has_key(args[1]):
497 self.pkg.changes.setdefault("distribution-version", {})
498 for suite in args[2:]:
499 self.pkg.changes["distribution-version"][suite] = suite
501 # Ensure there is (still) a target distribution
502 if len(self.pkg.changes["distribution"].keys()) < 1:
503 self.rejects.append("No valid distribution remaining.")
505 # Ensure target distributions exist
506 for suite in self.pkg.changes["distribution"].keys():
507 if not Cnf.has_key("Suite::%s" % (suite)):
508 self.rejects.append("Unknown distribution `%s'." % (suite))
510 ###########################################################################
512 def binary_file_checks(self, f, session):
514 entry = self.pkg.files[f]
516 # Extract package control information
517 deb_file = utils.open_file(f)
519 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
521 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
523 # Can't continue, none of the checks on control would work.
526 # Check for mandantory "Description:"
529 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
531 self.rejects.append("%s: Missing Description in binary package" % (f))
536 # Check for mandatory fields
537 for field in [ "Package", "Architecture", "Version" ]:
538 if control.Find(field) == None:
540 self.rejects.append("%s: No %s field in control." % (f, field))
543 # Ensure the package name matches the one give in the .changes
544 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
545 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
547 # Validate the package field
548 package = control.Find("Package")
549 if not re_valid_pkg_name.match(package):
550 self.rejects.append("%s: invalid package name '%s'." % (f, package))
552 # Validate the version field
553 version = control.Find("Version")
554 if not re_valid_version.match(version):
555 self.rejects.append("%s: invalid version number '%s'." % (f, version))
557 # Ensure the architecture of the .deb is one we know about.
558 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
559 architecture = control.Find("Architecture")
560 upload_suite = self.pkg.changes["distribution"].keys()[0]
562 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
563 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
564 self.rejects.append("Unknown architecture '%s'." % (architecture))
566 # Ensure the architecture of the .deb is one of the ones
567 # listed in the .changes.
568 if not self.pkg.changes["architecture"].has_key(architecture):
569 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
571 # Sanity-check the Depends field
572 depends = control.Find("Depends")
574 self.rejects.append("%s: Depends field is empty." % (f))
576 # Sanity-check the Provides field
577 provides = control.Find("Provides")
579 provide = re_spacestrip.sub('', provides)
581 self.rejects.append("%s: Provides field is empty." % (f))
582 prov_list = provide.split(",")
583 for prov in prov_list:
584 if not re_valid_pkg_name.match(prov):
585 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
587 # Check the section & priority match those given in the .changes (non-fatal)
588 if control.Find("Section") and entry["section"] != "" \
589 and entry["section"] != control.Find("Section"):
590 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
591 (f, control.Find("Section", ""), entry["section"]))
592 if control.Find("Priority") and entry["priority"] != "" \
593 and entry["priority"] != control.Find("Priority"):
594 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
595 (f, control.Find("Priority", ""), entry["priority"]))
597 entry["package"] = package
598 entry["architecture"] = architecture
599 entry["version"] = version
600 entry["maintainer"] = control.Find("Maintainer", "")
602 if f.endswith(".udeb"):
603 self.pkg.files[f]["dbtype"] = "udeb"
604 elif f.endswith(".deb"):
605 self.pkg.files[f]["dbtype"] = "deb"
607 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
609 entry["source"] = control.Find("Source", entry["package"])
611 # Get the source version
612 source = entry["source"]
615 if source.find("(") != -1:
616 m = re_extract_src_version.match(source)
618 source_version = m.group(2)
620 if not source_version:
621 source_version = self.pkg.files[f]["version"]
623 entry["source package"] = source
624 entry["source version"] = source_version
626 # Ensure the filename matches the contents of the .deb
627 m = re_isadeb.match(f)
630 file_package = m.group(1)
631 if entry["package"] != file_package:
632 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
633 (f, file_package, entry["dbtype"], entry["package"]))
634 epochless_version = re_no_epoch.sub('', control.Find("Version"))
637 file_version = m.group(2)
638 if epochless_version != file_version:
639 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
640 (f, file_version, entry["dbtype"], epochless_version))
643 file_architecture = m.group(3)
644 if entry["architecture"] != file_architecture:
645 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
646 (f, file_architecture, entry["dbtype"], entry["architecture"]))
648 # Check for existent source
649 source_version = entry["source version"]
650 source_package = entry["source package"]
651 if self.pkg.changes["architecture"].has_key("source"):
652 if source_version != self.pkg.changes["version"]:
653 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
654 (source_version, f, self.pkg.changes["version"]))
656 # Check in the SQL database
657 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
658 # Check in one of the other directories
659 source_epochless_version = re_no_epoch.sub('', source_version)
660 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
661 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
663 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
666 dsc_file_exists = False
667 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
668 if cnf.has_key("Dir::Queue::%s" % (myq)):
669 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
670 dsc_file_exists = True
673 if not dsc_file_exists:
674 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
676 # Check the version and for file overwrites
677 self.check_binary_against_db(f, session)
679 # Temporarily disable contents generation until we change the table storage layout
682 #if len(b.rejects) > 0:
683 # for j in b.rejects:
684 # self.rejects.append(j)
686 def source_file_checks(self, f, session):
687 entry = self.pkg.files[f]
689 m = re_issource.match(f)
693 entry["package"] = m.group(1)
694 entry["version"] = m.group(2)
695 entry["type"] = m.group(3)
697 # Ensure the source package name matches the Source filed in the .changes
698 if self.pkg.changes["source"] != entry["package"]:
699 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
701 # Ensure the source version matches the version in the .changes file
702 if re_is_orig_source.match(f):
703 changes_version = self.pkg.changes["chopversion2"]
705 changes_version = self.pkg.changes["chopversion"]
707 if changes_version != entry["version"]:
708 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
710 # Ensure the .changes lists source in the Architecture field
711 if not self.pkg.changes["architecture"].has_key("source"):
712 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
714 # Check the signature of a .dsc file
715 if entry["type"] == "dsc":
716 # check_signature returns either:
717 # (None, [list, of, rejects]) or (signature, [])
718 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
720 self.rejects.append(j)
722 entry["architecture"] = "source"
724 def per_suite_file_checks(self, f, suite, session):
726 entry = self.pkg.files[f]
727 archive = utils.where_am_i()
730 if entry.has_key("byhand"):
733 # Check we have fields we need to do these checks
735 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
736 if not entry.has_key(m):
737 self.rejects.append("file '%s' does not have field %s set" % (f, m))
743 # Handle component mappings
744 for m in cnf.ValueList("ComponentMappings"):
745 (source, dest) = m.split()
746 if entry["component"] == source:
747 entry["original component"] = source
748 entry["component"] = dest
750 # Ensure the component is valid for the target suite
751 if cnf.has_key("Suite:%s::Components" % (suite)) and \
752 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
753 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
756 # Validate the component
757 if not get_component(entry["component"], session):
758 self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
761 # See if the package is NEW
762 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
765 # Validate the priority
766 if entry["priority"].find('/') != -1:
767 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
769 # Determine the location
770 location = cnf["Dir::Pool"]
771 l = get_location(location, entry["component"], archive, session)
773 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
774 entry["location id"] = -1
776 entry["location id"] = l.location_id
778 # Check the md5sum & size against existing files (if any)
779 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
781 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
782 entry["size"], entry["md5sum"], entry["location id"])
785 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
786 elif found is False and poolfile is not None:
787 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
790 entry["files id"] = None
792 entry["files id"] = poolfile.file_id
794 # Check for packages that have moved from one component to another
795 entry['suite'] = suite
796 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
798 entry["othercomponents"] = res.fetchone()[0]
800 def check_files(self, action=True):
801 archive = utils.where_am_i()
802 file_keys = self.pkg.files.keys()
806 # XXX: As far as I can tell, this can no longer happen - see
807 # comments by AJ in old revisions - mhy
808 # if reprocess is 2 we've already done this and we're checking
809 # things again for the new .orig.tar.gz.
810 # [Yes, I'm fully aware of how disgusting this is]
811 if action and self.reprocess < 2:
813 os.chdir(self.pkg.directory)
815 ret = holding.copy_to_holding(f)
817 # XXX: Should we bail out here or try and continue?
818 self.rejects.append(ret)
822 # Check there isn't already a .changes or .dak file of the same name in
823 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
824 # [NB: this check must be done post-suite mapping]
825 base_filename = os.path.basename(self.pkg.changes_file)
826 dot_dak_filename = base_filename[:-8] + ".dak"
828 for suite in self.pkg.changes["distribution"].keys():
829 copychanges = "Suite::%s::CopyChanges" % (suite)
830 if cnf.has_key(copychanges) and \
831 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
832 self.rejects.append("%s: a file with this name already exists in %s" \
833 % (base_filename, cnf[copychanges]))
835 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
836 if cnf.has_key(copy_dot_dak) and \
837 os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
838 self.rejects.append("%s: a file with this name already exists in %s" \
839 % (dot_dak_filename, Cnf[copy_dot_dak]))
845 session = DBConn().session()
847 for f, entry in self.pkg.files.items():
848 # Ensure the file does not already exist in one of the accepted directories
849 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
850 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
851 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
852 self.rejects.append("%s file already exists in the %s directory." % (f, d))
854 if not re_taint_free.match(f):
855 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
857 # Check the file is readable
858 if os.access(f, os.R_OK) == 0:
859 # When running in -n, copy_to_holding() won't have
860 # generated the reject_message, so we need to.
862 if os.path.exists(f):
863 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
865 self.rejects.append("Can't read `%s'. [file not found]" % (f))
866 entry["type"] = "unreadable"
869 # If it's byhand skip remaining checks
870 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
872 entry["type"] = "byhand"
874 # Checks for a binary package...
875 elif re_isadeb.match(f):
877 entry["type"] = "deb"
879 # This routine appends to self.rejects/warnings as appropriate
880 self.binary_file_checks(f, session)
882 # Checks for a source package...
883 elif re_issource.match(f):
886 # This routine appends to self.rejects/warnings as appropriate
887 self.source_file_checks(f, session)
889 # Not a binary or source package? Assume byhand...
892 entry["type"] = "byhand"
894 # Per-suite file checks
895 entry["oldfiles"] = {}
896 for suite in self.pkg.changes["distribution"].keys():
897 self.per_suite_file_checks(f, suite, session)
901 # If the .changes file says it has source, it must have source.
902 if self.pkg.changes["architecture"].has_key("source"):
904 self.rejects.append("no source found and Architecture line in changes mention source.")
906 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
907 self.rejects.append("source only uploads are not supported.")
909 ###########################################################################
910 def check_dsc(self, action=True, session=None):
911 """Returns bool indicating whether or not the source changes are valid"""
912 # Ensure there is source to check
913 if not self.pkg.changes["architecture"].has_key("source"):
918 for f, entry in self.pkg.files.items():
919 if entry["type"] == "dsc":
921 self.rejects.append("can not process a .changes file with multiple .dsc's.")
926 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
928 self.rejects.append("source uploads must contain a dsc file")
931 # Parse the .dsc file
933 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
934 except CantOpenError:
935 # if not -n copy_to_holding() will have done this for us...
937 self.rejects.append("%s: can't read file." % (dsc_filename))
938 except ParseChangesError, line:
939 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
940 except InvalidDscError, line:
941 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
942 except ChangesUnicodeError:
943 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
945 # Build up the file list of files mentioned by the .dsc
947 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
948 except NoFilesFieldError:
949 self.rejects.append("%s: no Files: field." % (dsc_filename))
951 except UnknownFormatError, format:
952 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
954 except ParseChangesError, line:
955 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
958 # Enforce mandatory fields
959 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
960 if not self.pkg.dsc.has_key(i):
961 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
964 # Validate the source and version fields
965 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
966 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
967 if not re_valid_version.match(self.pkg.dsc["version"]):
968 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
970 # Only a limited list of source formats are allowed in each suite
971 for dist in self.pkg.changes["distribution"].keys():
972 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
973 if self.pkg.dsc["format"] not in allowed:
974 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
976 # Validate the Maintainer field
978 # We ignore the return value
979 fix_maintainer(self.pkg.dsc["maintainer"])
980 except ParseMaintError, msg:
981 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
982 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
984 # Validate the build-depends field(s)
985 for field_name in [ "build-depends", "build-depends-indep" ]:
986 field = self.pkg.dsc.get(field_name)
988 # Have apt try to parse them...
990 apt_pkg.ParseSrcDepends(field)
992 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
994 # Ensure the version number in the .dsc matches the version number in the .changes
995 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
996 changes_version = self.pkg.files[dsc_filename]["version"]
998 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
999 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1001 # Ensure the Files field contain only what's expected
1002 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1004 # Ensure source is newer than existing source in target suites
1005 session = DBConn().session()
1006 self.check_source_against_db(dsc_filename, session)
1007 self.check_dsc_against_db(dsc_filename, session)
1012 ###########################################################################
1014 def get_changelog_versions(self, source_dir):
1015 """Extracts a the source package and (optionally) grabs the
1016 version history out of debian/changelog for the BTS."""
1020 # Find the .dsc (again)
1022 for f in self.pkg.files.keys():
1023 if self.pkg.files[f]["type"] == "dsc":
1026 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1027 if not dsc_filename:
1030 # Create a symlink mirror of the source files in our temporary directory
1031 for f in self.pkg.files.keys():
1032 m = re_issource.match(f)
1034 src = os.path.join(source_dir, f)
1035 # If a file is missing for whatever reason, give up.
1036 if not os.path.exists(src):
1039 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1040 self.pkg.orig_files[f].has_key("path"):
1042 dest = os.path.join(os.getcwd(), f)
1043 os.symlink(src, dest)
1045 # If the orig files are not a part of the upload, create symlinks to the
1047 for orig_file in self.pkg.orig_files.keys():
1048 if not self.pkg.orig_files[orig_file].has_key("path"):
1050 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1051 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1053 # Extract the source
1054 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1055 (result, output) = commands.getstatusoutput(cmd)
1057 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1058 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1061 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1064 # Get the upstream version
1065 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1066 if re_strip_revision.search(upstr_version):
1067 upstr_version = re_strip_revision.sub('', upstr_version)
1069 # Ensure the changelog file exists
1070 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1071 if not os.path.exists(changelog_filename):
1072 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1075 # Parse the changelog
1076 self.pkg.dsc["bts changelog"] = ""
1077 changelog_file = utils.open_file(changelog_filename)
1078 for line in changelog_file.readlines():
1079 m = re_changelog_versions.match(line)
1081 self.pkg.dsc["bts changelog"] += line
1082 changelog_file.close()
1084 # Check we found at least one revision in the changelog
1085 if not self.pkg.dsc["bts changelog"]:
1086 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1088 def check_source(self):
1089 # XXX: I'm fairly sure reprocess == 2 can never happen
1090 # AJT disabled the is_incoming check years ago - mhy
1091 # We should probably scrap or rethink the whole reprocess thing
1093 # a) there's no source
1094 # or b) reprocess is 2 - we will do this check next time when orig
1095 # tarball is in 'files'
1096 # or c) the orig files are MIA
1097 if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1098 or len(self.pkg.orig_files) == 0:
1101 tmpdir = utils.temp_dirname()
1103 # Move into the temporary directory
1107 # Get the changelog version history
1108 self.get_changelog_versions(cwd)
1110 # Move back and cleanup the temporary tree
1114 shutil.rmtree(tmpdir)
1116 if e.errno != errno.EACCES:
1118 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1120 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1121 # We probably have u-r or u-w directories so chmod everything
1123 cmd = "chmod -R u+rwx %s" % (tmpdir)
1124 result = os.system(cmd)
1126 utils.fubar("'%s' failed with result %s." % (cmd, result))
1127 shutil.rmtree(tmpdir)
1128 except Exception, e:
1129 print "foobar2 (%s)" % e
1130 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1132 ###########################################################################
1133 def ensure_hashes(self):
1134 # Make sure we recognise the format of the Files: field in the .changes
1135 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1136 if len(format) == 2:
1137 format = int(format[0]), int(format[1])
1139 format = int(float(format[0])), 0
1141 # We need to deal with the original changes blob, as the fields we need
1142 # might not be in the changes dict serialised into the .dak anymore.
1143 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1145 # Copy the checksums over to the current changes dict. This will keep
1146 # the existing modifications to it intact.
1147 for field in orig_changes:
1148 if field.startswith('checksums-'):
1149 self.pkg.changes[field] = orig_changes[field]
1151 # Check for unsupported hashes
1152 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1153 self.rejects.append(j)
1155 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1156 self.rejects.append(j)
1158 # We have to calculate the hash if we have an earlier changes version than
1159 # the hash appears in rather than require it exist in the changes file
1160 for hashname, hashfunc, version in utils.known_hashes:
1161 # TODO: Move _ensure_changes_hash into this class
1162 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1163 self.rejects.append(j)
1164 if "source" in self.pkg.changes["architecture"]:
1165 # TODO: Move _ensure_dsc_hash into this class
1166 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1167 self.rejects.append(j)
1169 def check_hashes(self):
1170 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1171 self.rejects.append(m)
1173 for m in utils.check_size(".changes", self.pkg.files):
1174 self.rejects.append(m)
1176 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1177 self.rejects.append(m)
1179 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1180 self.rejects.append(m)
1182 self.ensure_hashes()
1184 ###########################################################################
1186 def ensure_orig(self, target_dir='.', session=None):
1188 Ensures that all orig files mentioned in the changes file are present
1189 in target_dir. If they do not exist, they are symlinked into place.
1191 An list containing the symlinks that were created are returned (so they
1198 for filename, entry in self.pkg.dsc_files.iteritems():
1199 if not re_is_orig_source.match(filename):
1200 # File is not an orig; ignore
1203 if os.path.exists(filename):
1204 # File exists, no need to continue
1207 def symlink_if_valid(path):
1208 f = utils.open_file(path)
1209 md5sum = apt_pkg.md5sum(f)
1212 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1213 expected = (int(entry['size']), entry['md5sum'])
1215 if fingerprint != expected:
1218 dest = os.path.join(target_dir, filename)
1220 os.symlink(path, dest)
1221 symlinked.append(dest)
1227 session_ = DBConn().session()
1232 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1233 poolfile_path = os.path.join(
1234 poolfile.location.path, poolfile.filename
1237 if symlink_if_valid(poolfile_path):
1247 # Look in some other queues for the file
1248 queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1249 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1251 for queue in queues:
1252 if not cnf.get('Dir::Queue::%s' % queue):
1255 queuefile_path = os.path.join(
1256 cnf['Dir::Queue::%s' % queue], filename
1259 if not os.path.exists(queuefile_path):
1260 # Does not exist in this queue
1263 if symlink_if_valid(queuefile_path):
1268 ###########################################################################
1270 def check_lintian(self):
1273 # Don't reject binary uploads
1274 if not self.pkg.changes['architecture'].has_key('source'):
1277 # Only check some distributions
1279 for dist in ('unstable', 'experimental'):
1280 if dist in self.pkg.changes['distribution']:
1287 tagfile = cnf.get("Dinstall::LintianTags")
1289 # We don't have a tagfile, so just don't do anything.
1292 # Parse the yaml file
1293 sourcefile = file(tagfile, 'r')
1294 sourcecontent = sourcefile.read()
1297 lintiantags = yaml.load(sourcecontent)['lintian']
1298 except yaml.YAMLError, msg:
1299 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1302 # Try and find all orig mentioned in the .dsc
1303 symlinked = self.ensure_orig()
1305 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1306 # so put it together like it. We put all types of tags in one file and then sort
1307 # through lintians output later to see if its a fatal tag we detected, or not.
1308 # So we only run lintian once on all tags, even if we might reject on some, but not
1310 # Additionally build up a set of tags
1312 (fd, temp_filename) = utils.temp_filename()
1313 temptagfile = os.fdopen(fd, 'w')
1314 for tagtype in lintiantags:
1315 for tag in lintiantags[tagtype]:
1316 temptagfile.write("%s\n" % tag)
1320 # So now we should look at running lintian at the .changes file, capturing output
1322 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1323 (result, output) = commands.getstatusoutput(command)
1325 # We are done with lintian, remove our tempfile and any symlinks we created
1326 os.unlink(temp_filename)
1327 for symlink in symlinked:
1331 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1332 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1334 if len(output) == 0:
1339 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1341 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1342 # are having a victim for a reject.
1343 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1344 for line in output.split('\n'):
1345 m = re_parse_lintian.match(line)
1350 epackage = m.group(2)
1354 # So lets check if we know the tag at all.
1355 if etag not in tags:
1359 # We know it and it is overriden. Check that override is allowed.
1360 if etag in lintiantags['warning']:
1361 # The tag is overriden, and it is allowed to be overriden.
1362 # Don't add a reject message.
1364 elif etag in lintiantags['error']:
1365 # The tag is overriden - but is not allowed to be
1366 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1367 log("ftpmaster does not allow tag to be overridable", etag)
1369 # Tag is known, it is not overriden, direct reject.
1370 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1371 # Now tell if they *might* override it.
1372 if etag in lintiantags['warning']:
1373 log("auto rejecting", "overridable", etag)
1374 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1376 log("auto rejecting", "not overridable", etag)
1378 ###########################################################################
1379 def check_urgency(self):
1381 if self.pkg.changes["architecture"].has_key("source"):
1382 if not self.pkg.changes.has_key("urgency"):
1383 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1384 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1385 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1386 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1387 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1388 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1390 ###########################################################################
1392 # Sanity check the time stamps of files inside debs.
1393 # [Files in the near future cause ugly warnings and extreme time
1394 # travel can cause errors on extraction]
1396 def check_timestamps(self):
1399 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1400 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1401 tar = TarTime(future_cutoff, past_cutoff)
1403 for filename, entry in self.pkg.files.items():
1404 if entry["type"] == "deb":
1407 deb_file = utils.open_file(filename)
1408 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1411 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1412 except SystemError, e:
1413 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1414 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1417 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1421 future_files = tar.future_files.keys()
1423 num_future_files = len(future_files)
1424 future_file = future_files[0]
1425 future_date = tar.future_files[future_file]
1426 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1427 % (filename, num_future_files, future_file, time.ctime(future_date)))
1429 ancient_files = tar.ancient_files.keys()
1431 num_ancient_files = len(ancient_files)
1432 ancient_file = ancient_files[0]
1433 ancient_date = tar.ancient_files[ancient_file]
1434 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1435 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1437 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1439 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1440 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1442 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1448 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1449 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1450 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1451 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1452 self.pkg.changes["sponsoremail"] = uid_email
1457 ###########################################################################
1458 # check_signed_by_key checks
1459 ###########################################################################
1461 def check_signed_by_key(self):
1462 """Ensure the .changes is signed by an authorized uploader."""
1463 session = DBConn().session()
1465 # First of all we check that the person has proper upload permissions
1466 # and that this upload isn't blocked
1467 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1470 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1473 # TODO: Check that import-keyring adds UIDs properly
1475 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1478 # Check that the fingerprint which uploaded has permission to do so
1479 self.check_upload_permissions(fpr, session)
1481 # Check that this package is not in a transition
1482 self.check_transition(session)
1487 def check_upload_permissions(self, fpr, session):
1488 # Check any one-off upload blocks
1489 self.check_upload_blocks(fpr, session)
1491 # Start with DM as a special case
1492 # DM is a special case unfortunately, so we check it first
1493 # (keys with no source access get more access than DMs in one
1494 # way; DMs can only upload for their packages whether source
1495 # or binary, whereas keys with no access might be able to
1496 # upload some binaries)
1497 if fpr.source_acl.access_level == 'dm':
1498 self.check_dm_source_upload(fpr, session)
1500 # Check source-based permissions for other types
1501 if self.pkg.changes["architecture"].has_key("source"):
1502 if fpr.source_acl.access_level is None:
1503 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1504 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1505 self.rejects.append(rej)
1508 # If not a DM, we allow full upload rights
1509 uid_email = "%s@debian.org" % (fpr.uid.uid)
1510 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1513 # Check binary upload permissions
1514 # By this point we know that DMs can't have got here unless they
1515 # are allowed to deal with the package concerned so just apply
1517 if fpr.binary_acl.access_level == 'full':
1520 # Otherwise we're in the map case
1521 tmparches = self.pkg.changes["architecture"].copy()
1522 tmparches.pop('source', None)
1524 for bam in fpr.binary_acl_map:
1525 tmparches.pop(bam.architecture.arch_string, None)
1527 if len(tmparches.keys()) > 0:
1528 if fpr.binary_reject:
1529 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1530 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1531 self.rejects.append(rej)
1533 # TODO: This is where we'll implement reject vs throw away binaries later
1534 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1535 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1536 rej += "\nFingerprint: %s", (fpr.fingerprint)
1537 self.rejects.append(rej)
1540 def check_upload_blocks(self, fpr, session):
1541 """Check whether any upload blocks apply to this source, source
1542 version, uid / fpr combination"""
1544 def block_rej_template(fb):
1545 rej = 'Manual upload block in place for package %s' % fb.source
1546 if fb.version is not None:
1547 rej += ', version %s' % fb.version
1550 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1551 # version is None if the block applies to all versions
1552 if fb.version is None or fb.version == self.pkg.changes['version']:
1553 # Check both fpr and uid - either is enough to cause a reject
1554 if fb.fpr is not None:
1555 if fb.fpr.fingerprint == fpr.fingerprint:
1556 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1557 if fb.uid is not None:
1558 if fb.uid == fpr.uid:
1559 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1562 def check_dm_upload(self, fpr, session):
1563 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1564 ## none of the uploaded packages are NEW
1566 for f in self.pkg.files.keys():
1567 if self.pkg.files[f].has_key("byhand"):
1568 self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1570 if self.pkg.files[f].has_key("new"):
1571 self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1577 ## the most recent version of the package uploaded to unstable or
1578 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1579 ## section of its control file
1580 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1581 q = q.join(SrcAssociation)
1582 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1583 q = q.order_by(desc('source.version')).limit(1)
1588 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1589 self.rejects.append(rej)
1593 if not r.dm_upload_allowed:
1594 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1595 self.rejects.append(rej)
1598 ## the Maintainer: field of the uploaded .changes file corresponds with
1599 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1601 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1602 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1604 ## the most recent version of the package uploaded to unstable or
1605 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1606 ## non-developer maintainers cannot NMU or hijack packages)
1608 # srcuploaders includes the maintainer
1610 for sup in r.srcuploaders:
1611 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1612 # Eww - I hope we never have two people with the same name in Debian
1613 if email == fpr.uid.uid or name == fpr.uid.name:
1618 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1621 ## none of the packages are being taken over from other source packages
1622 for b in self.pkg.changes["binary"].keys():
1623 for suite in self.pkg.changes["distribution"].keys():
1624 q = session.query(DBSource)
1625 q = q.join(DBBinary).filter_by(package=b)
1626 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1629 if s.source != self.pkg.changes["source"]:
1630 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1634 def check_transition(self, session):
1637 sourcepkg = self.pkg.changes["source"]
1639 # No sourceful upload -> no need to do anything else, direct return
1640 # We also work with unstable uploads, not experimental or those going to some
1641 # proposed-updates queue
1642 if "source" not in self.pkg.changes["architecture"] or \
1643 "unstable" not in self.pkg.changes["distribution"]:
1646 # Also only check if there is a file defined (and existant) with
1648 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1649 if transpath == "" or not os.path.exists(transpath):
1652 # Parse the yaml file
1653 sourcefile = file(transpath, 'r')
1654 sourcecontent = sourcefile.read()
1656 transitions = yaml.load(sourcecontent)
1657 except yaml.YAMLError, msg:
1658 # This shouldn't happen, there is a wrapper to edit the file which
1659 # checks it, but we prefer to be safe than ending up rejecting
1661 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1664 # Now look through all defined transitions
1665 for trans in transitions:
1666 t = transitions[trans]
1667 source = t["source"]
1670 # Will be None if nothing is in testing.
1671 current = get_source_in_suite(source, "testing", session)
1672 if current is not None:
1673 compare = apt_pkg.VersionCompare(current.version, expected)
1675 if current is None or compare < 0:
1676 # This is still valid, the current version in testing is older than
1677 # the new version we wait for, or there is none in testing yet
1679 # Check if the source we look at is affected by this.
1680 if sourcepkg in t['packages']:
1681 # The source is affected, lets reject it.
1683 rejectmsg = "%s: part of the %s transition.\n\n" % (
1686 if current is not None:
1687 currentlymsg = "at version %s" % (current.version)
1689 currentlymsg = "not present in testing"
1691 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1693 rejectmsg += "\n".join(textwrap.wrap("""Your package
1694 is part of a testing transition designed to get %s migrated (it is
1695 currently %s, we need version %s). This transition is managed by the
1696 Release Team, and %s is the Release-Team member responsible for it.
1697 Please mail debian-release@lists.debian.org or contact %s directly if you
1698 need further assistance. You might want to upload to experimental until this
1699 transition is done."""
1700 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1702 self.rejects.append(rejectmsg)
1705 ###########################################################################
1706 # End check_signed_by_key checks
1707 ###########################################################################
1709 def build_summaries(self):
1710 """ Build a summary of changes the upload introduces. """
1712 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1714 short_summary = summary
1716 # This is for direport's benefit...
1717 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1720 summary += "Changes: " + f
1722 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1724 summary += self.announce(short_summary, 0)
1726 return (summary, short_summary)
1728 ###########################################################################
1730 def close_bugs(self, summary, action):
1732 Send mail to close bugs as instructed by the closes field in the changes file.
1733 Also add a line to summary if any work was done.
1735 @type summary: string
1736 @param summary: summary text, as given by L{build_summaries}
1739 @param action: Set to false no real action will be done.
1742 @return: summary. If action was taken, extended by the list of closed bugs.
1746 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1748 bugs = self.pkg.changes["closes"].keys()
1754 summary += "Closing bugs: "
1756 summary += "%s " % (bug)
1759 self.Subst["__BUG_NUMBER__"] = bug
1760 if self.pkg.changes["distribution"].has_key("stable"):
1761 self.Subst["__STABLE_WARNING__"] = """
1762 Note that this package is not part of the released stable Debian
1763 distribution. It may have dependencies on other unreleased software,
1764 or other instabilities. Please take care if you wish to install it.
1765 The update will eventually make its way into the next released Debian
1768 self.Subst["__STABLE_WARNING__"] = ""
1769 mail_message = utils.TemplateSubst(self.Subst, template)
1770 utils.send_mail(mail_message)
1772 # Clear up after ourselves
1773 del self.Subst["__BUG_NUMBER__"]
1774 del self.Subst["__STABLE_WARNING__"]
1776 if action and self.logger:
1777 self.logger.log(["closing bugs"] + bugs)
1783 ###########################################################################
1785 def announce(self, short_summary, action):
1787 Send an announce mail about a new upload.
1789 @type short_summary: string
1790 @param short_summary: Short summary text to include in the mail
1793 @param action: Set to false no real action will be done.
1796 @return: Textstring about action taken.
1801 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1803 # Only do announcements for source uploads with a recent dpkg-dev installed
1804 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1805 self.pkg.changes["architecture"].has_key("source"):
1811 self.Subst["__SHORT_SUMMARY__"] = short_summary
1813 for dist in self.pkg.changes["distribution"].keys():
1814 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1815 if announce_list == "" or lists_done.has_key(announce_list):
1818 lists_done[announce_list] = 1
1819 summary += "Announcing to %s\n" % (announce_list)
1823 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1824 if cnf.get("Dinstall::TrackingServer") and \
1825 self.pkg.changes["architecture"].has_key("source"):
1826 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1827 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1829 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1830 utils.send_mail(mail_message)
1832 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1834 if cnf.FindB("Dinstall::CloseBugs"):
1835 summary = self.close_bugs(summary, action)
1837 del self.Subst["__SHORT_SUMMARY__"]
1841 ###########################################################################
1843 def accept (self, summary, short_summary, targetdir=None):
1847 This moves all files referenced from the .changes into the I{accepted}
1848 queue, sends the accepted mail, announces to lists, closes bugs and
1849 also checks for override disparities. If enabled it will write out
1850 the version history for the BTS Version Tracking and will finally call
1853 @type summary: string
1854 @param summary: Summary text
1856 @type short_summary: string
1857 @param short_summary: Short summary
1862 stats = SummaryStats()
1864 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1866 if targetdir is None:
1867 targetdir = cnf["Dir::Queue::Accepted"]
1871 self.logger.log(["Accepting changes", self.pkg.changes_file])
1873 self.pkg.write_dot_dak(targetdir)
1875 # Move all the files into the accepted directory
1876 utils.move(self.pkg.changes_file, targetdir)
1878 for name, entry in sorted(self.pkg.files.items()):
1879 utils.move(name, targetdir)
1880 stats.accept_bytes += float(entry["size"])
1882 stats.accept_count += 1
1884 # Send accept mail, announce to lists, close bugs and check for
1885 # override disparities
1886 if not cnf["Dinstall::Options::No-Mail"]:
1888 self.Subst["__SUITE__"] = ""
1889 self.Subst["__SUMMARY__"] = summary
1890 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1891 utils.send_mail(mail_message)
1892 self.announce(short_summary, 1)
1894 ## Helper stuff for DebBugs Version Tracking
1895 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1896 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1897 # the conditionalization on dsc["bts changelog"] should be
1900 # Write out the version history from the changelog
1901 if self.pkg.changes["architecture"].has_key("source") and \
1902 self.pkg.dsc.has_key("bts changelog"):
1904 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1905 version_history = os.fdopen(fd, 'w')
1906 version_history.write(self.pkg.dsc["bts changelog"])
1907 version_history.close()
1908 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1909 self.pkg.changes_file[:-8]+".versions")
1910 os.rename(temp_filename, filename)
1911 os.chmod(filename, 0644)
1913 # Write out the binary -> source mapping.
1914 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1915 debinfo = os.fdopen(fd, 'w')
1916 for name, entry in sorted(self.pkg.files.items()):
1917 if entry["type"] == "deb":
1918 line = " ".join([entry["package"], entry["version"],
1919 entry["architecture"], entry["source package"],
1920 entry["source version"]])
1921 debinfo.write(line+"\n")
1923 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1924 self.pkg.changes_file[:-8]+".debinfo")
1925 os.rename(temp_filename, filename)
1926 os.chmod(filename, 0644)
1928 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1929 # <Ganneff> we do call queue_build too
1930 # <mhy> well yes, we'd have had to if we were inserting into accepted
1931 # <Ganneff> now. thats database only.
1932 # <mhy> urgh, that's going to get messy
1933 # <Ganneff> so i make the p-n call to it *also* using accepted/
1934 # <mhy> but then the packages will be in the queue_build table without the files being there
1935 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1936 # <mhy> ah, good point
1937 # <Ganneff> so it will work out, as unchecked move it over
1938 # <mhy> that's all completely sick
1941 # This routine returns None on success or an error on failure
1942 res = get_or_set_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1947 def check_override(self):
1949 Checks override entries for validity. Mails "Override disparity" warnings,
1950 if that feature is enabled.
1952 Abandons the check if
1953 - override disparity checks are disabled
1954 - mail sending is disabled
1959 # Abandon the check if:
1960 # a) override disparity checks have been disabled
1961 # b) we're not sending mail
1962 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1963 cnf["Dinstall::Options::No-Mail"]:
1966 summary = self.pkg.check_override()
1971 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1974 self.Subst["__SUMMARY__"] = summary
1975 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1976 utils.send_mail(mail_message)
1977 del self.Subst["__SUMMARY__"]
1979 ###########################################################################
1981 def remove(self, dir=None):
1983 Used (for instance) in p-u to remove the package from unchecked
1986 os.chdir(self.pkg.directory)
1990 for f in self.pkg.files.keys():
1992 os.unlink(self.pkg.changes_file)
1994 ###########################################################################
1996 def move_to_dir (self, dest, perms=0660, changesperms=0664):
1998 Move files to dest with certain perms/changesperms
2000 utils.move(self.pkg.changes_file, dest, perms=changesperms)
2001 for f in self.pkg.files.keys():
2002 utils.move(f, dest, perms=perms)
2004 ###########################################################################
2006 def force_reject(self, reject_files):
2008 Forcefully move files from the current directory to the
2009 reject directory. If any file already exists in the reject
2010 directory it will be moved to the morgue to make way for
2014 @param files: file dictionary
2020 for file_entry in reject_files:
2021 # Skip any files which don't exist or which we don't have permission to copy.
2022 if os.access(file_entry, os.R_OK) == 0:
2025 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2028 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2030 # File exists? Let's try and move it to the morgue
2031 if e.errno == errno.EEXIST:
2032 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2034 morgue_file = utils.find_next_free(morgue_file)
2035 except NoFreeFilenameError:
2036 # Something's either gone badly Pete Tong, or
2037 # someone is trying to exploit us.
2038 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2040 utils.move(dest_file, morgue_file, perms=0660)
2042 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2045 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2049 # If we got here, we own the destination file, so we can
2050 # safely overwrite it.
2051 utils.move(file_entry, dest_file, 1, perms=0660)
2054 ###########################################################################
2055 def do_reject (self, manual=0, reject_message="", note=""):
2057 Reject an upload. If called without a reject message or C{manual} is
2058 true, spawn an editor so the user can write one.
2061 @param manual: manual or automated rejection
2063 @type reject_message: string
2064 @param reject_message: A reject message
2069 # If we weren't given a manual rejection message, spawn an
2070 # editor so the user can add one in...
2071 if manual and not reject_message:
2072 (fd, temp_filename) = utils.temp_filename()
2073 temp_file = os.fdopen(fd, 'w')
2076 temp_file.write(line)
2078 editor = os.environ.get("EDITOR","vi")
2080 while answer == 'E':
2081 os.system("%s %s" % (editor, temp_filename))
2082 temp_fh = utils.open_file(temp_filename)
2083 reject_message = "".join(temp_fh.readlines())
2085 print "Reject message:"
2086 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2087 prompt = "[R]eject, Edit, Abandon, Quit ?"
2089 while prompt.find(answer) == -1:
2090 answer = utils.our_raw_input(prompt)
2091 m = re_default_answer.search(prompt)
2094 answer = answer[:1].upper()
2095 os.unlink(temp_filename)
2101 print "Rejecting.\n"
2105 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2106 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2108 # Move all the files into the reject directory
2109 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2110 self.force_reject(reject_files)
2112 # If we fail here someone is probably trying to exploit the race
2113 # so let's just raise an exception ...
2114 if os.path.exists(reason_filename):
2115 os.unlink(reason_filename)
2116 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2118 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2122 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2123 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2124 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2125 os.write(reason_fd, reject_message)
2126 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2128 # Build up the rejection email
2129 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2130 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2131 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2132 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2133 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2134 # Write the rejection email out as the <foo>.reason file
2135 os.write(reason_fd, reject_mail_message)
2137 del self.Subst["__REJECTOR_ADDRESS__"]
2138 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2139 del self.Subst["__CC__"]
2143 # Send the rejection mail if appropriate
2144 if not cnf["Dinstall::Options::No-Mail"]:
2145 utils.send_mail(reject_mail_message)
2148 self.logger.log(["rejected", self.pkg.changes_file])
2152 ################################################################################
2153 def in_override_p(self, package, component, suite, binary_type, file, session):
2155 Check if a package already has override entries in the DB
2157 @type package: string
2158 @param package: package name
2160 @type component: string
2161 @param component: database id of the component
2164 @param suite: database id of the suite
2166 @type binary_type: string
2167 @param binary_type: type of the package
2170 @param file: filename we check
2172 @return: the database result. But noone cares anyway.
2178 if binary_type == "": # must be source
2181 file_type = binary_type
2183 # Override suite name; used for example with proposed-updates
2184 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2185 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2187 result = get_override(package, suite, component, file_type, session)
2189 # If checking for a source package fall back on the binary override type
2190 if file_type == "dsc" and len(result) < 1:
2191 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2193 # Remember the section and priority so we can check them later if appropriate
2196 self.pkg.files[file]["override section"] = result.section.section
2197 self.pkg.files[file]["override priority"] = result.priority.priority
2202 ################################################################################
2203 def get_anyversion(self, sv_list, suite):
2206 @param sv_list: list of (suite, version) tuples to check
2209 @param suite: suite name
2215 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2216 for (s, v) in sv_list:
2217 if s in [ x.lower() for x in anysuite ]:
2218 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2223 ################################################################################
2225 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
2228 @param sv_list: list of (suite, version) tuples to check
2233 @type new_version: string
2234 @param new_version: XXX
2236 Ensure versions are newer than existing packages in target
2237 suites and that cross-suite version checking rules as
2238 set out in the conf file are satisfied.
2243 # Check versions for each target suite
2244 for target_suite in self.pkg.changes["distribution"].keys():
2245 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2246 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2248 # Enforce "must be newer than target suite" even if conffile omits it
2249 if target_suite not in must_be_newer_than:
2250 must_be_newer_than.append(target_suite)
2252 for (suite, existent_version) in sv_list:
2253 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2255 if suite in must_be_newer_than and sourceful and vercmp < 1:
2256 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2258 if suite in must_be_older_than and vercmp > -1:
2261 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2262 # we really use the other suite, ignoring the conflicting one ...
2263 addsuite = self.pkg.changes["distribution-version"][suite]
2265 add_version = self.get_anyversion(sv_list, addsuite)
2266 target_version = self.get_anyversion(sv_list, target_suite)
2269 # not add_version can only happen if we map to a suite
2270 # that doesn't enhance the suite we're propup'ing from.
2271 # so "propup-ver x a b c; map a d" is a problem only if
2272 # d doesn't enhance a.
2274 # i think we could always propagate in this case, rather
2275 # than complaining. either way, this isn't a REJECT issue
2277 # And - we really should complain to the dorks who configured dak
2278 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2279 self.pkg.changes.setdefault("propdistribution", {})
2280 self.pkg.changes["propdistribution"][addsuite] = 1
2282 elif not target_version:
2283 # not targets_version is true when the package is NEW
2284 # we could just stick with the "...old version..." REJECT
2285 # for this, I think.
2286 self.rejects.append("Won't propogate NEW packages.")
2287 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2288 # propogation would be redundant. no need to reject though.
2289 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2291 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2292 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2294 self.warnings.append("Propogating upload to %s" % (addsuite))
2295 self.pkg.changes.setdefault("propdistribution", {})
2296 self.pkg.changes["propdistribution"][addsuite] = 1
2300 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2302 ################################################################################
2303 def check_binary_against_db(self, file, session):
2304 # Ensure version is sane
2305 q = session.query(BinAssociation)
2306 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
2307 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
2309 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2310 file, self.pkg.files[file]["version"], sourceful=False)
2312 # Check for any existing copies of the file
2313 q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
2314 q = q.filter_by(version=self.pkg.files[file]["version"])
2315 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
2318 self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
2320 ################################################################################
2322 def check_source_against_db(self, file, session):
2325 source = self.pkg.dsc.get("source")
2326 version = self.pkg.dsc.get("version")
2328 # Ensure version is sane
2329 q = session.query(SrcAssociation)
2330 q = q.join(DBSource).filter(DBSource.source==source)
2332 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2333 file, version, sourceful=True)
2335 ################################################################################
2336 def check_dsc_against_db(self, file, session):
2339 @warning: NB: this function can remove entries from the 'files' index [if
2340 the orig tarball is a duplicate of the one in the archive]; if
2341 you're iterating over 'files' and call this function as part of
2342 the loop, be sure to add a check to the top of the loop to
2343 ensure you haven't just tried to dereference the deleted entry.
2348 self.pkg.orig_files = {} # XXX: do we need to clear it?
2349 orig_files = self.pkg.orig_files
2351 # Try and find all files mentioned in the .dsc. This has
2352 # to work harder to cope with the multiple possible
2353 # locations of an .orig.tar.gz.
2354 # The ordering on the select is needed to pick the newest orig
2355 # when it exists in multiple places.
2356 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2358 if self.pkg.files.has_key(dsc_name):
2359 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2360 actual_size = int(self.pkg.files[dsc_name]["size"])
2361 found = "%s in incoming" % (dsc_name)
2363 # Check the file does not already exist in the archive
2364 ql = get_poolfile_like_name(dsc_name, session)
2366 # Strip out anything that isn't '%s' or '/%s$'
2368 if not i.filename.endswith(dsc_name):
2371 # "[dak] has not broken them. [dak] has fixed a
2372 # brokenness. Your crappy hack exploited a bug in
2375 # "(Come on! I thought it was always obvious that
2376 # one just doesn't release different files with
2377 # the same name and version.)"
2378 # -- ajk@ on d-devel@l.d.o
2381 # Ignore exact matches for .orig.tar.gz
2383 if re_is_orig_source.match(dsc_name):
2385 if self.pkg.files.has_key(dsc_name) and \
2386 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2387 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2388 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2389 # TODO: Don't delete the entry, just mark it as not needed
2390 # This would fix the stupidity of changing something we often iterate over
2391 # whilst we're doing it
2392 del self.pkg.files[dsc_name]
2393 if not orig_files.has_key(dsc_name):
2394 orig_files[dsc_name] = {}
2395 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2399 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2401 elif re_is_orig_source.match(dsc_name):
2403 ql = get_poolfile_like_name(dsc_name, session)
2405 # Strip out anything that isn't '%s' or '/%s$'
2406 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2408 if not i.filename.endswith(dsc_name):
2412 # Unfortunately, we may get more than one match here if,
2413 # for example, the package was in potato but had an -sa
2414 # upload in woody. So we need to choose the right one.
2416 # default to something sane in case we don't match any or have only one
2421 old_file = os.path.join(i.location.path, i.filename)
2422 old_file_fh = utils.open_file(old_file)
2423 actual_md5 = apt_pkg.md5sum(old_file_fh)
2425 actual_size = os.stat(old_file)[stat.ST_SIZE]
2426 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2429 old_file = os.path.join(i.location.path, i.filename)
2430 old_file_fh = utils.open_file(old_file)
2431 actual_md5 = apt_pkg.md5sum(old_file_fh)
2433 actual_size = os.stat(old_file)[stat.ST_SIZE]
2435 suite_type = x.location.archive_type
2436 # need this for updating dsc_files in install()
2437 dsc_entry["files id"] = x.file_id
2438 # See install() in process-accepted...
2439 if not orig_files.has_key(dsc_name):
2440 orig_files[dsc_name] = {}
2441 orig_files[dsc_name]["id"] = x.file_id
2442 orig_files[dsc_name]["path"] = old_file
2443 orig_files[dsc_name]["location"] = x.location.location_id
2445 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2446 # Not there? Check the queue directories...
2447 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2448 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2450 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2451 if os.path.exists(in_otherdir):
2452 in_otherdir_fh = utils.open_file(in_otherdir)
2453 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2454 in_otherdir_fh.close()
2455 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2457 if not orig_files.has_key(dsc_name):
2458 orig_files[dsc_name] = {}
2459 orig_files[dsc_name]["path"] = in_otherdir
2462 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2465 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2467 if actual_md5 != dsc_entry["md5sum"]:
2468 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2469 if actual_size != int(dsc_entry["size"]):
2470 self.rejects.append("size for %s doesn't match %s." % (found, file))
2472 ################################################################################
2473 # This is used by process-new and process-holding to recheck a changes file
2474 # at the time we're running. It mainly wraps various other internal functions
2475 # and is similar to accepted_checks - these should probably be tidied up
2477 def recheck(self, session):
2479 for f in self.pkg.files.keys():
2480 # The .orig.tar.gz can disappear out from under us is it's a
2481 # duplicate of one in the archive.
2482 if not self.pkg.files.has_key(f):
2485 entry = self.pkg.files[f]
2487 # Check that the source still exists
2488 if entry["type"] == "deb":
2489 source_version = entry["source version"]
2490 source_package = entry["source package"]
2491 if not self.pkg.changes["architecture"].has_key("source") \
2492 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2493 source_epochless_version = re_no_epoch.sub('', source_version)
2494 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2496 for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
2497 if cnf.has_key("Dir::Queue::%s" % (q)):
2498 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2501 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2503 # Version and file overwrite checks
2504 if entry["type"] == "deb":
2505 self.check_binary_against_db(f, session)
2506 elif entry["type"] == "dsc":
2507 self.check_source_against_db(f, session)
2508 self.check_dsc_against_db(f, session)
2510 ################################################################################
2511 def accepted_checks(self, overwrite_checks, session):
2512 # Recheck anything that relies on the database; since that's not
2513 # frozen between accept and our run time when called from p-a.
2515 # overwrite_checks is set to False when installing to stable/oldstable
2520 # Find the .dsc (again)
2522 for f in self.pkg.files.keys():
2523 if self.pkg.files[f]["type"] == "dsc":
2526 for checkfile in self.pkg.files.keys():
2527 # The .orig.tar.gz can disappear out from under us is it's a
2528 # duplicate of one in the archive.
2529 if not self.pkg.files.has_key(checkfile):
2532 entry = self.pkg.files[checkfile]
2534 # Check that the source still exists
2535 if entry["type"] == "deb":
2536 source_version = entry["source version"]
2537 source_package = entry["source package"]
2538 if not self.pkg.changes["architecture"].has_key("source") \
2539 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2540 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2542 # Version and file overwrite checks
2543 if overwrite_checks:
2544 if entry["type"] == "deb":
2545 self.check_binary_against_db(checkfile, session)
2546 elif entry["type"] == "dsc":
2547 self.check_source_against_db(checkfile, session)
2548 self.check_dsc_against_db(dsc_filename, session)
2550 # propogate in the case it is in the override tables:
2551 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2552 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2553 propogate[suite] = 1
2555 nopropogate[suite] = 1
2557 for suite in propogate.keys():
2558 if suite in nopropogate:
2560 self.pkg.changes["distribution"][suite] = 1
2562 for checkfile in self.pkg.files.keys():
2563 # Check the package is still in the override tables
2564 for suite in self.pkg.changes["distribution"].keys():
2565 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2566 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2568 ################################################################################
2569 # This is not really a reject, but an unaccept, but since a) the code for
2570 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2571 # extremely rare, for now we'll go with whining at our admin folks...
2573 def do_unaccept(self):
2577 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2578 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2579 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2580 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2581 if cnf.has_key("Dinstall::Bcc"):
2582 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2584 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2586 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2588 # Write the rejection email out as the <foo>.reason file
2589 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2590 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2592 # If we fail here someone is probably trying to exploit the race
2593 # so let's just raise an exception ...
2594 if os.path.exists(reject_filename):
2595 os.unlink(reject_filename)
2597 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2598 os.write(fd, reject_mail_message)
2601 utils.send_mail(reject_mail_message)
2603 del self.Subst["__REJECTOR_ADDRESS__"]
2604 del self.Subst["__REJECT_MESSAGE__"]
2605 del self.Subst["__CC__"]
2607 ################################################################################
2608 # If any file of an upload has a recent mtime then chances are good
2609 # the file is still being uploaded.
2611 def upload_too_new(self):
2614 # Move back to the original directory to get accurate time stamps
2616 os.chdir(self.pkg.directory)
2617 file_list = self.pkg.files.keys()
2618 file_list.extend(self.pkg.dsc_files.keys())
2619 file_list.append(self.pkg.changes_file)
2622 last_modified = time.time()-os.path.getmtime(f)
2623 if last_modified < int(cnf["Dinstall::SkipTime"]):