5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
45 from dak_exceptions import *
48 from config import Config
49 from holding import Holding
51 from summarystats import SummaryStats
52 from utils import parse_changes, check_dsc_files
53 from textutils import fix_maintainer
54 from binary import Binary
56 ###############################################################################
58 def get_type(f, session):
60 Get the file type of C{f}
63 @param f: file entry from Changes object
65 @type session: SQLA Session
66 @param session: SQL Alchemy session object
73 if f.has_key("dbtype"):
74 file_type = f["dbtype"]
75 elif re_source_ext.match(f["type"]):
78 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
80 # Validate the override type
81 type_id = get_override_type(file_type, session)
83 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
87 ################################################################################
89 # Determine what parts in a .changes are NEW
91 def determine_new(changes, files, warn=1):
93 Determine what parts in a C{changes} file are NEW.
95 @type changes: Upload.Pkg.changes dict
96 @param changes: Changes dictionary
98 @type files: Upload.Pkg.files dict
99 @param files: Files dictionary
102 @param warn: Warn if overrides are added for (old)stable
105 @return: dictionary of NEW components.
110 session = DBConn().session()
112 # Build up a list of potentially new things
113 for name, f in files.items():
114 # Skip byhand elements
115 if f["type"] == "byhand":
118 priority = f["priority"]
119 section = f["section"]
120 file_type = get_type(f, session)
121 component = f["component"]
123 if file_type == "dsc":
126 if not new.has_key(pkg):
128 new[pkg]["priority"] = priority
129 new[pkg]["section"] = section
130 new[pkg]["type"] = file_type
131 new[pkg]["component"] = component
132 new[pkg]["files"] = []
134 old_type = new[pkg]["type"]
135 if old_type != file_type:
136 # source gets trumped by deb or udeb
137 if old_type == "dsc":
138 new[pkg]["priority"] = priority
139 new[pkg]["section"] = section
140 new[pkg]["type"] = file_type
141 new[pkg]["component"] = component
143 new[pkg]["files"].append(name)
145 if f.has_key("othercomponents"):
146 new[pkg]["othercomponents"] = f["othercomponents"]
148 for suite in changes["suite"].keys():
149 for pkg in new.keys():
150 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
152 for file_entry in new[pkg]["files"]:
153 if files[file_entry].has_key("new"):
154 del files[file_entry]["new"]
158 for s in ['stable', 'oldstable']:
159 if changes["suite"].has_key(s):
160 print "WARNING: overrides will be added for %s!" % s
161 for pkg in new.keys():
162 if new[pkg].has_key("othercomponents"):
163 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
169 ################################################################################
171 def check_valid(new):
173 Check if section and priority for NEW packages exist in database.
174 Additionally does sanity checks:
175 - debian-installer packages have to be udeb (or source)
176 - non debian-installer packages can not be udeb
177 - source priority can only be assigned to dsc file types
180 @param new: Dict of new packages with their section, priority and type.
183 for pkg in new.keys():
184 section_name = new[pkg]["section"]
185 priority_name = new[pkg]["priority"]
186 file_type = new[pkg]["type"]
188 section = get_section(section_name)
190 new[pkg]["section id"] = -1
192 new[pkg]["section id"] = section.section_id
194 priority = get_priority(priority_name)
196 new[pkg]["priority id"] = -1
198 new[pkg]["priority id"] = priority.priority_id
201 di = section_name.find("debian-installer") != -1
203 # If d-i, we must be udeb and vice-versa
204 if (di and file_type not in ("udeb", "dsc")) or \
205 (not di and file_type == "udeb"):
206 new[pkg]["section id"] = -1
208 # If dsc we need to be source and vice-versa
209 if (priority == "source" and file_type != "dsc") or \
210 (priority != "source" and file_type == "dsc"):
211 new[pkg]["priority id"] = -1
213 ###############################################################################
215 def check_status(files):
217 for f in files.keys():
218 if files[f]["type"] == "byhand":
220 elif files[f].has_key("new"):
224 ###############################################################################
226 # Used by Upload.check_timestamps
227 class TarTime(object):
228 def __init__(self, future_cutoff, past_cutoff):
230 self.future_cutoff = future_cutoff
231 self.past_cutoff = past_cutoff
234 self.future_files = {}
235 self.ancient_files = {}
237 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
238 if MTime > self.future_cutoff:
239 self.future_files[Name] = MTime
240 if MTime < self.past_cutoff:
241 self.ancient_files[Name] = MTime
243 ###############################################################################
245 class Upload(object):
247 Everything that has to do with an upload processed.
255 ###########################################################################
258 """ Reset a number of internal variables."""
260 # Initialize the substitution template map
263 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
264 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
265 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
266 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
274 def package_info(self):
276 Format various messages from this Upload to send to the maintainer.
280 ('Reject Reasons', self.rejects),
281 ('Warnings', self.warnings),
282 ('Notes', self.notes),
286 for title, messages in msgs:
288 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
292 ###########################################################################
293 def update_subst(self):
294 """ Set up the per-package template substitution mappings """
298 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
299 if not self.pkg.changes.has_key("architecture") or not \
300 isinstance(self.pkg.changes["architecture"], dict):
301 self.pkg.changes["architecture"] = { "Unknown" : "" }
303 # and maintainer2047 may not exist.
304 if not self.pkg.changes.has_key("maintainer2047"):
305 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
307 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
308 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
309 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
311 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
312 if self.pkg.changes["architecture"].has_key("source") and \
313 self.pkg.changes["changedby822"] != "" and \
314 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
316 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
317 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
318 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
320 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
321 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
322 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
324 if "sponsoremail" in self.pkg.changes:
325 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
327 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
328 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
330 # Apply any global override of the Maintainer field
331 if cnf.get("Dinstall::OverrideMaintainer"):
332 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
333 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
335 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
336 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
337 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
339 ###########################################################################
340 def load_changes(self, filename):
343 @rvalue: whether the changes file was valid or not. We may want to
344 reject even if this is True (see what gets put in self.rejects).
345 This is simply to prevent us even trying things later which will
346 fail because we couldn't properly parse the file.
349 self.pkg.changes_file = filename
351 # Parse the .changes field into a dictionary
353 self.pkg.changes.update(parse_changes(filename))
354 except CantOpenError:
355 self.rejects.append("%s: can't read file." % (filename))
357 except ParseChangesError, line:
358 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
360 except ChangesUnicodeError:
361 self.rejects.append("%s: changes file not proper utf-8" % (filename))
364 # Parse the Files field from the .changes into another dictionary
366 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
367 except ParseChangesError, line:
368 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
370 except UnknownFormatError, format:
371 self.rejects.append("%s: unknown format '%s'." % (filename, format))
374 # Check for mandatory fields
375 for i in ("distribution", "source", "binary", "architecture",
376 "version", "maintainer", "files", "changes", "description"):
377 if not self.pkg.changes.has_key(i):
378 # Avoid undefined errors later
379 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
382 # Strip a source version in brackets from the source field
383 if re_strip_srcver.search(self.pkg.changes["source"]):
384 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
386 # Ensure the source field is a valid package name.
387 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
388 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
390 # Split multi-value fields into a lower-level dictionary
391 for i in ("architecture", "distribution", "binary", "closes"):
392 o = self.pkg.changes.get(i, "")
394 del self.pkg.changes[i]
396 self.pkg.changes[i] = {}
399 self.pkg.changes[i][j] = 1
401 # Fix the Maintainer: field to be RFC822/2047 compatible
403 (self.pkg.changes["maintainer822"],
404 self.pkg.changes["maintainer2047"],
405 self.pkg.changes["maintainername"],
406 self.pkg.changes["maintaineremail"]) = \
407 fix_maintainer (self.pkg.changes["maintainer"])
408 except ParseMaintError, msg:
409 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
410 % (filename, self.pkg.changes["maintainer"], msg))
412 # ...likewise for the Changed-By: field if it exists.
414 (self.pkg.changes["changedby822"],
415 self.pkg.changes["changedby2047"],
416 self.pkg.changes["changedbyname"],
417 self.pkg.changes["changedbyemail"]) = \
418 fix_maintainer (self.pkg.changes.get("changed-by", ""))
419 except ParseMaintError, msg:
420 self.pkg.changes["changedby822"] = ""
421 self.pkg.changes["changedby2047"] = ""
422 self.pkg.changes["changedbyname"] = ""
423 self.pkg.changes["changedbyemail"] = ""
425 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
426 % (filename, changes["changed-by"], msg))
428 # Ensure all the values in Closes: are numbers
429 if self.pkg.changes.has_key("closes"):
430 for i in self.pkg.changes["closes"].keys():
431 if re_isanum.match (i) == None:
432 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
434 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
435 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
436 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
438 # Check there isn't already a changes file of the same name in one
439 # of the queue directories.
440 base_filename = os.path.basename(filename)
441 if get_knownchange(base_filename):
442 self.rejects.append("%s: a file with this name already exists." % (base_filename))
444 # Check the .changes is non-empty
445 if not self.pkg.files:
446 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
449 # Changes was syntactically valid even if we'll reject
452 ###########################################################################
454 def check_distributions(self):
455 "Check and map the Distribution field"
459 # Handle suite mappings
460 for m in Cnf.ValueList("SuiteMappings"):
463 if mtype == "map" or mtype == "silent-map":
464 (source, dest) = args[1:3]
465 if self.pkg.changes["distribution"].has_key(source):
466 del self.pkg.changes["distribution"][source]
467 self.pkg.changes["distribution"][dest] = 1
468 if mtype != "silent-map":
469 self.notes.append("Mapping %s to %s." % (source, dest))
470 if self.pkg.changes.has_key("distribution-version"):
471 if self.pkg.changes["distribution-version"].has_key(source):
472 self.pkg.changes["distribution-version"][source]=dest
473 elif mtype == "map-unreleased":
474 (source, dest) = args[1:3]
475 if self.pkg.changes["distribution"].has_key(source):
476 for arch in self.pkg.changes["architecture"].keys():
477 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
478 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
479 del self.pkg.changes["distribution"][source]
480 self.pkg.changes["distribution"][dest] = 1
482 elif mtype == "ignore":
484 if self.pkg.changes["distribution"].has_key(suite):
485 del self.pkg.changes["distribution"][suite]
486 self.warnings.append("Ignoring %s as a target suite." % (suite))
487 elif mtype == "reject":
489 if self.pkg.changes["distribution"].has_key(suite):
490 self.rejects.append("Uploads to %s are not accepted." % (suite))
491 elif mtype == "propup-version":
492 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
494 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
495 if self.pkg.changes["distribution"].has_key(args[1]):
496 self.pkg.changes.setdefault("distribution-version", {})
497 for suite in args[2:]:
498 self.pkg.changes["distribution-version"][suite] = suite
500 # Ensure there is (still) a target distribution
501 if len(self.pkg.changes["distribution"].keys()) < 1:
502 self.rejects.append("No valid distribution remaining.")
504 # Ensure target distributions exist
505 for suite in self.pkg.changes["distribution"].keys():
506 if not Cnf.has_key("Suite::%s" % (suite)):
507 self.rejects.append("Unknown distribution `%s'." % (suite))
509 ###########################################################################
511 def binary_file_checks(self, f, session):
513 entry = self.pkg.files[f]
515 # Extract package control information
516 deb_file = utils.open_file(f)
518 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
520 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
522 # Can't continue, none of the checks on control would work.
525 # Check for mandantory "Description:"
528 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
530 self.rejects.append("%s: Missing Description in binary package" % (f))
535 # Check for mandatory fields
536 for field in [ "Package", "Architecture", "Version" ]:
537 if control.Find(field) == None:
539 self.rejects.append("%s: No %s field in control." % (f, field))
542 # Ensure the package name matches the one give in the .changes
543 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
544 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
546 # Validate the package field
547 package = control.Find("Package")
548 if not re_valid_pkg_name.match(package):
549 self.rejects.append("%s: invalid package name '%s'." % (f, package))
551 # Validate the version field
552 version = control.Find("Version")
553 if not re_valid_version.match(version):
554 self.rejects.append("%s: invalid version number '%s'." % (f, version))
556 # Ensure the architecture of the .deb is one we know about.
557 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
558 architecture = control.Find("Architecture")
559 upload_suite = self.pkg.changes["distribution"].keys()[0]
561 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
562 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
563 self.rejects.append("Unknown architecture '%s'." % (architecture))
565 # Ensure the architecture of the .deb is one of the ones
566 # listed in the .changes.
567 if not self.pkg.changes["architecture"].has_key(architecture):
568 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
570 # Sanity-check the Depends field
571 depends = control.Find("Depends")
573 self.rejects.append("%s: Depends field is empty." % (f))
575 # Sanity-check the Provides field
576 provides = control.Find("Provides")
578 provide = re_spacestrip.sub('', provides)
580 self.rejects.append("%s: Provides field is empty." % (f))
581 prov_list = provide.split(",")
582 for prov in prov_list:
583 if not re_valid_pkg_name.match(prov):
584 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
586 # Check the section & priority match those given in the .changes (non-fatal)
587 if control.Find("Section") and entry["section"] != "" \
588 and entry["section"] != control.Find("Section"):
589 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
590 (f, control.Find("Section", ""), entry["section"]))
591 if control.Find("Priority") and entry["priority"] != "" \
592 and entry["priority"] != control.Find("Priority"):
593 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
594 (f, control.Find("Priority", ""), entry["priority"]))
596 entry["package"] = package
597 entry["architecture"] = architecture
598 entry["version"] = version
599 entry["maintainer"] = control.Find("Maintainer", "")
601 if f.endswith(".udeb"):
602 self.pkg.files[f]["dbtype"] = "udeb"
603 elif f.endswith(".deb"):
604 self.pkg.files[f]["dbtype"] = "deb"
606 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
608 entry["source"] = control.Find("Source", entry["package"])
610 # Get the source version
611 source = entry["source"]
614 if source.find("(") != -1:
615 m = re_extract_src_version.match(source)
617 source_version = m.group(2)
619 if not source_version:
620 source_version = self.pkg.files[f]["version"]
622 entry["source package"] = source
623 entry["source version"] = source_version
625 # Ensure the filename matches the contents of the .deb
626 m = re_isadeb.match(f)
629 file_package = m.group(1)
630 if entry["package"] != file_package:
631 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
632 (f, file_package, entry["dbtype"], entry["package"]))
633 epochless_version = re_no_epoch.sub('', control.Find("Version"))
636 file_version = m.group(2)
637 if epochless_version != file_version:
638 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
639 (f, file_version, entry["dbtype"], epochless_version))
642 file_architecture = m.group(3)
643 if entry["architecture"] != file_architecture:
644 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
645 (f, file_architecture, entry["dbtype"], entry["architecture"]))
647 # Check for existent source
648 source_version = entry["source version"]
649 source_package = entry["source package"]
650 if self.pkg.changes["architecture"].has_key("source"):
651 if source_version != self.pkg.changes["version"]:
652 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
653 (source_version, f, self.pkg.changes["version"]))
655 # Check in the SQL database
656 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
657 # Check in one of the other directories
658 source_epochless_version = re_no_epoch.sub('', source_version)
659 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
660 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
662 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
665 dsc_file_exists = False
666 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
667 if cnf.has_key("Dir::Queue::%s" % (myq)):
668 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
669 dsc_file_exists = True
672 if not dsc_file_exists:
673 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
675 # Check the version and for file overwrites
676 self.check_binary_against_db(f, session)
678 # Temporarily disable contents generation until we change the table storage layout
681 #if len(b.rejects) > 0:
682 # for j in b.rejects:
683 # self.rejects.append(j)
685 def source_file_checks(self, f, session):
686 entry = self.pkg.files[f]
688 m = re_issource.match(f)
692 entry["package"] = m.group(1)
693 entry["version"] = m.group(2)
694 entry["type"] = m.group(3)
696 # Ensure the source package name matches the Source filed in the .changes
697 if self.pkg.changes["source"] != entry["package"]:
698 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
700 # Ensure the source version matches the version in the .changes file
701 if re_is_orig_source.match(f):
702 changes_version = self.pkg.changes["chopversion2"]
704 changes_version = self.pkg.changes["chopversion"]
706 if changes_version != entry["version"]:
707 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
709 # Ensure the .changes lists source in the Architecture field
710 if not self.pkg.changes["architecture"].has_key("source"):
711 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
713 # Check the signature of a .dsc file
714 if entry["type"] == "dsc":
715 # check_signature returns either:
716 # (None, [list, of, rejects]) or (signature, [])
717 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
719 self.rejects.append(j)
721 entry["architecture"] = "source"
723 def per_suite_file_checks(self, f, suite, session):
725 entry = self.pkg.files[f]
726 archive = utils.where_am_i()
729 if entry.has_key("byhand"):
732 # Check we have fields we need to do these checks
734 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
735 if not entry.has_key(m):
736 self.rejects.append("file '%s' does not have field %s set" % (f, m))
742 # Handle component mappings
743 for m in cnf.ValueList("ComponentMappings"):
744 (source, dest) = m.split()
745 if entry["component"] == source:
746 entry["original component"] = source
747 entry["component"] = dest
749 # Ensure the component is valid for the target suite
750 if cnf.has_key("Suite:%s::Components" % (suite)) and \
751 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
752 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
755 # Validate the component
756 if not get_component(entry["component"], session):
757 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
760 # See if the package is NEW
761 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
764 # Validate the priority
765 if entry["priority"].find('/') != -1:
766 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
768 # Determine the location
769 location = cnf["Dir::Pool"]
770 l = get_location(location, entry["component"], archive, session)
772 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (entry["component"], archive))
773 entry["location id"] = -1
775 entry["location id"] = l.location_id
777 # Check the md5sum & size against existing files (if any)
778 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
780 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
781 entry["size"], entry["md5sum"], entry["location id"])
784 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
785 elif found is False and poolfile is not None:
786 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
789 entry["files id"] = None
791 entry["files id"] = poolfile.file_id
793 # Check for packages that have moved from one component to another
794 entry['suite'] = suite
795 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
797 entry["othercomponents"] = res.fetchone()[0]
799 def check_files(self, action=True):
800 file_keys = self.pkg.files.keys()
806 os.chdir(self.pkg.directory)
808 ret = holding.copy_to_holding(f)
810 # XXX: Should we bail out here or try and continue?
811 self.rejects.append(ret)
815 # Check there isn't already a .changes file of the same name in
816 # the proposed-updates "CopyChanges" storage directories.
817 # [NB: this check must be done post-suite mapping]
818 base_filename = os.path.basename(self.pkg.changes_file)
820 for suite in self.pkg.changes["distribution"].keys():
821 copychanges = "Suite::%s::CopyChanges" % (suite)
822 if cnf.has_key(copychanges) and \
823 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
824 self.rejects.append("%s: a file with this name already exists in %s" \
825 % (base_filename, cnf[copychanges]))
830 session = DBConn().session()
832 for f, entry in self.pkg.files.items():
833 # Ensure the file does not already exist in one of the accepted directories
834 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
835 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
836 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
837 self.rejects.append("%s file already exists in the %s directory." % (f, d))
839 if not re_taint_free.match(f):
840 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
842 # Check the file is readable
843 if os.access(f, os.R_OK) == 0:
844 # When running in -n, copy_to_holding() won't have
845 # generated the reject_message, so we need to.
847 if os.path.exists(f):
848 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
850 self.rejects.append("Can't read `%s'. [file not found]" % (f))
851 entry["type"] = "unreadable"
854 # If it's byhand skip remaining checks
855 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
857 entry["type"] = "byhand"
859 # Checks for a binary package...
860 elif re_isadeb.match(f):
862 entry["type"] = "deb"
864 # This routine appends to self.rejects/warnings as appropriate
865 self.binary_file_checks(f, session)
867 # Checks for a source package...
868 elif re_issource.match(f):
871 # This routine appends to self.rejects/warnings as appropriate
872 self.source_file_checks(f, session)
874 # Not a binary or source package? Assume byhand...
877 entry["type"] = "byhand"
879 # Per-suite file checks
880 entry["oldfiles"] = {}
881 for suite in self.pkg.changes["distribution"].keys():
882 self.per_suite_file_checks(f, suite, session)
886 # If the .changes file says it has source, it must have source.
887 if self.pkg.changes["architecture"].has_key("source"):
889 self.rejects.append("no source found and Architecture line in changes mention source.")
891 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
892 self.rejects.append("source only uploads are not supported.")
894 ###########################################################################
895 def check_dsc(self, action=True, session=None):
896 """Returns bool indicating whether or not the source changes are valid"""
897 # Ensure there is source to check
898 if not self.pkg.changes["architecture"].has_key("source"):
903 for f, entry in self.pkg.files.items():
904 if entry["type"] == "dsc":
906 self.rejects.append("can not process a .changes file with multiple .dsc's.")
911 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
913 self.rejects.append("source uploads must contain a dsc file")
916 # Parse the .dsc file
918 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
919 except CantOpenError:
920 # if not -n copy_to_holding() will have done this for us...
922 self.rejects.append("%s: can't read file." % (dsc_filename))
923 except ParseChangesError, line:
924 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
925 except InvalidDscError, line:
926 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
927 except ChangesUnicodeError:
928 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
930 # Build up the file list of files mentioned by the .dsc
932 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
933 except NoFilesFieldError:
934 self.rejects.append("%s: no Files: field." % (dsc_filename))
936 except UnknownFormatError, format:
937 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
939 except ParseChangesError, line:
940 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
943 # Enforce mandatory fields
944 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
945 if not self.pkg.dsc.has_key(i):
946 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
949 # Validate the source and version fields
950 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
951 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
952 if not re_valid_version.match(self.pkg.dsc["version"]):
953 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
955 # Only a limited list of source formats are allowed in each suite
956 for dist in self.pkg.changes["distribution"].keys():
957 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
958 if self.pkg.dsc["format"] not in allowed:
959 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
961 # Validate the Maintainer field
963 # We ignore the return value
964 fix_maintainer(self.pkg.dsc["maintainer"])
965 except ParseMaintError, msg:
966 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
967 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
969 # Validate the build-depends field(s)
970 for field_name in [ "build-depends", "build-depends-indep" ]:
971 field = self.pkg.dsc.get(field_name)
973 # Have apt try to parse them...
975 apt_pkg.ParseSrcDepends(field)
977 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
979 # Ensure the version number in the .dsc matches the version number in the .changes
980 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
981 changes_version = self.pkg.files[dsc_filename]["version"]
983 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
984 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
986 # Ensure the Files field contain only what's expected
987 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
989 # Ensure source is newer than existing source in target suites
990 session = DBConn().session()
991 self.check_source_against_db(dsc_filename, session)
992 self.check_dsc_against_db(dsc_filename, session)
997 ###########################################################################
999 def get_changelog_versions(self, source_dir):
1000 """Extracts a the source package and (optionally) grabs the
1001 version history out of debian/changelog for the BTS."""
1005 # Find the .dsc (again)
1007 for f in self.pkg.files.keys():
1008 if self.pkg.files[f]["type"] == "dsc":
1011 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1012 if not dsc_filename:
1015 # Create a symlink mirror of the source files in our temporary directory
1016 for f in self.pkg.files.keys():
1017 m = re_issource.match(f)
1019 src = os.path.join(source_dir, f)
1020 # If a file is missing for whatever reason, give up.
1021 if not os.path.exists(src):
1024 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1025 self.pkg.orig_files[f].has_key("path"):
1027 dest = os.path.join(os.getcwd(), f)
1028 os.symlink(src, dest)
1030 # If the orig files are not a part of the upload, create symlinks to the
1032 for orig_file in self.pkg.orig_files.keys():
1033 if not self.pkg.orig_files[orig_file].has_key("path"):
1035 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1036 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1038 # Extract the source
1039 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1040 (result, output) = commands.getstatusoutput(cmd)
1042 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1043 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1046 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1049 # Get the upstream version
1050 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1051 if re_strip_revision.search(upstr_version):
1052 upstr_version = re_strip_revision.sub('', upstr_version)
1054 # Ensure the changelog file exists
1055 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1056 if not os.path.exists(changelog_filename):
1057 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1060 # Parse the changelog
1061 self.pkg.dsc["bts changelog"] = ""
1062 changelog_file = utils.open_file(changelog_filename)
1063 for line in changelog_file.readlines():
1064 m = re_changelog_versions.match(line)
1066 self.pkg.dsc["bts changelog"] += line
1067 changelog_file.close()
1069 # Check we found at least one revision in the changelog
1070 if not self.pkg.dsc["bts changelog"]:
1071 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1073 def check_source(self):
1075 # a) there's no source
1076 # or c) the orig files are MIA
1077 if not self.pkg.changes["architecture"].has_key("source") \
1078 or len(self.pkg.orig_files) == 0:
1081 tmpdir = utils.temp_dirname()
1083 # Move into the temporary directory
1087 # Get the changelog version history
1088 self.get_changelog_versions(cwd)
1090 # Move back and cleanup the temporary tree
1094 shutil.rmtree(tmpdir)
1096 if e.errno != errno.EACCES:
1098 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1100 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1101 # We probably have u-r or u-w directories so chmod everything
1103 cmd = "chmod -R u+rwx %s" % (tmpdir)
1104 result = os.system(cmd)
1106 utils.fubar("'%s' failed with result %s." % (cmd, result))
1107 shutil.rmtree(tmpdir)
1108 except Exception, e:
1109 print "foobar2 (%s)" % e
1110 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1112 ###########################################################################
1113 def ensure_hashes(self):
1114 # Make sure we recognise the format of the Files: field in the .changes
1115 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1116 if len(format) == 2:
1117 format = int(format[0]), int(format[1])
1119 format = int(float(format[0])), 0
1121 # We need to deal with the original changes blob, as the fields we need
1122 # might not be in the changes dict serialised into the .dak anymore.
1123 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1125 # Copy the checksums over to the current changes dict. This will keep
1126 # the existing modifications to it intact.
1127 for field in orig_changes:
1128 if field.startswith('checksums-'):
1129 self.pkg.changes[field] = orig_changes[field]
1131 # Check for unsupported hashes
1132 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1133 self.rejects.append(j)
1135 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1136 self.rejects.append(j)
1138 # We have to calculate the hash if we have an earlier changes version than
1139 # the hash appears in rather than require it exist in the changes file
1140 for hashname, hashfunc, version in utils.known_hashes:
1141 # TODO: Move _ensure_changes_hash into this class
1142 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1143 self.rejects.append(j)
1144 if "source" in self.pkg.changes["architecture"]:
1145 # TODO: Move _ensure_dsc_hash into this class
1146 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1147 self.rejects.append(j)
1149 def check_hashes(self):
1150 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1151 self.rejects.append(m)
1153 for m in utils.check_size(".changes", self.pkg.files):
1154 self.rejects.append(m)
1156 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1157 self.rejects.append(m)
1159 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1160 self.rejects.append(m)
1162 self.ensure_hashes()
1164 ###########################################################################
1166 def ensure_orig(self, target_dir='.', session=None):
1168 Ensures that all orig files mentioned in the changes file are present
1169 in target_dir. If they do not exist, they are symlinked into place.
1171 An list containing the symlinks that were created are returned (so they
1178 for filename, entry in self.pkg.dsc_files.iteritems():
1179 if not re_is_orig_source.match(filename):
1180 # File is not an orig; ignore
1183 if os.path.exists(filename):
1184 # File exists, no need to continue
1187 def symlink_if_valid(path):
1188 f = utils.open_file(path)
1189 md5sum = apt_pkg.md5sum(f)
1192 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1193 expected = (int(entry['size']), entry['md5sum'])
1195 if fingerprint != expected:
1198 dest = os.path.join(target_dir, filename)
1200 os.symlink(path, dest)
1201 symlinked.append(dest)
1207 session_ = DBConn().session()
1212 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1213 poolfile_path = os.path.join(
1214 poolfile.location.path, poolfile.filename
1217 if symlink_if_valid(poolfile_path):
1227 # Look in some other queues for the file
1228 queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1229 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1231 for queue in queues:
1232 if not cnf.get('Dir::Queue::%s' % queue):
1235 queuefile_path = os.path.join(
1236 cnf['Dir::Queue::%s' % queue], filename
1239 if not os.path.exists(queuefile_path):
1240 # Does not exist in this queue
1243 if symlink_if_valid(queuefile_path):
1248 ###########################################################################
1250 def check_lintian(self):
1253 # Don't reject binary uploads
1254 if not self.pkg.changes['architecture'].has_key('source'):
1257 # Only check some distributions
1259 for dist in ('unstable', 'experimental'):
1260 if dist in self.pkg.changes['distribution']:
1267 tagfile = cnf.get("Dinstall::LintianTags")
1269 # We don't have a tagfile, so just don't do anything.
1272 # Parse the yaml file
1273 sourcefile = file(tagfile, 'r')
1274 sourcecontent = sourcefile.read()
1277 lintiantags = yaml.load(sourcecontent)['lintian']
1278 except yaml.YAMLError, msg:
1279 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1282 # Try and find all orig mentioned in the .dsc
1283 symlinked = self.ensure_orig()
1285 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1286 # so put it together like it. We put all types of tags in one file and then sort
1287 # through lintians output later to see if its a fatal tag we detected, or not.
1288 # So we only run lintian once on all tags, even if we might reject on some, but not
1290 # Additionally build up a set of tags
1292 (fd, temp_filename) = utils.temp_filename()
1293 temptagfile = os.fdopen(fd, 'w')
1294 for tagtype in lintiantags:
1295 for tag in lintiantags[tagtype]:
1296 temptagfile.write("%s\n" % tag)
1300 # So now we should look at running lintian at the .changes file, capturing output
1302 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1303 (result, output) = commands.getstatusoutput(command)
1305 # We are done with lintian, remove our tempfile and any symlinks we created
1306 os.unlink(temp_filename)
1307 for symlink in symlinked:
1311 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1312 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1314 if len(output) == 0:
1319 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1321 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1322 # are having a victim for a reject.
1323 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1324 for line in output.split('\n'):
1325 m = re_parse_lintian.match(line)
1330 epackage = m.group(2)
1334 # So lets check if we know the tag at all.
1335 if etag not in tags:
1339 # We know it and it is overriden. Check that override is allowed.
1340 if etag in lintiantags['warning']:
1341 # The tag is overriden, and it is allowed to be overriden.
1342 # Don't add a reject message.
1344 elif etag in lintiantags['error']:
1345 # The tag is overriden - but is not allowed to be
1346 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1347 log("ftpmaster does not allow tag to be overridable", etag)
1349 # Tag is known, it is not overriden, direct reject.
1350 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1351 # Now tell if they *might* override it.
1352 if etag in lintiantags['warning']:
1353 log("auto rejecting", "overridable", etag)
1354 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1356 log("auto rejecting", "not overridable", etag)
1358 ###########################################################################
1359 def check_urgency(self):
1361 if self.pkg.changes["architecture"].has_key("source"):
1362 if not self.pkg.changes.has_key("urgency"):
1363 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1364 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1365 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1366 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1367 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1368 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1370 ###########################################################################
1372 # Sanity check the time stamps of files inside debs.
1373 # [Files in the near future cause ugly warnings and extreme time
1374 # travel can cause errors on extraction]
1376 def check_timestamps(self):
1379 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1380 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1381 tar = TarTime(future_cutoff, past_cutoff)
1383 for filename, entry in self.pkg.files.items():
1384 if entry["type"] == "deb":
1387 deb_file = utils.open_file(filename)
1388 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1391 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1392 except SystemError, e:
1393 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1394 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1397 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1401 future_files = tar.future_files.keys()
1403 num_future_files = len(future_files)
1404 future_file = future_files[0]
1405 future_date = tar.future_files[future_file]
1406 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1407 % (filename, num_future_files, future_file, time.ctime(future_date)))
1409 ancient_files = tar.ancient_files.keys()
1411 num_ancient_files = len(ancient_files)
1412 ancient_file = ancient_files[0]
1413 ancient_date = tar.ancient_files[ancient_file]
1414 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1415 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1417 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1419 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1420 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1422 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1428 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1429 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1430 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1431 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1432 self.pkg.changes["sponsoremail"] = uid_email
1437 ###########################################################################
1438 # check_signed_by_key checks
1439 ###########################################################################
1441 def check_signed_by_key(self):
1442 """Ensure the .changes is signed by an authorized uploader."""
1443 session = DBConn().session()
1445 # First of all we check that the person has proper upload permissions
1446 # and that this upload isn't blocked
1447 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1450 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1453 # TODO: Check that import-keyring adds UIDs properly
1455 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1458 # Check that the fingerprint which uploaded has permission to do so
1459 self.check_upload_permissions(fpr, session)
1461 # Check that this package is not in a transition
1462 self.check_transition(session)
1467 def check_upload_permissions(self, fpr, session):
1468 # Check any one-off upload blocks
1469 self.check_upload_blocks(fpr, session)
1471 # Start with DM as a special case
1472 # DM is a special case unfortunately, so we check it first
1473 # (keys with no source access get more access than DMs in one
1474 # way; DMs can only upload for their packages whether source
1475 # or binary, whereas keys with no access might be able to
1476 # upload some binaries)
1477 if fpr.source_acl.access_level == 'dm':
1478 self.check_dm_upload(fpr, session)
1480 # Check source-based permissions for other types
1481 if self.pkg.changes["architecture"].has_key("source"):
1482 if fpr.source_acl.access_level is None:
1483 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1484 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1485 self.rejects.append(rej)
1488 # If not a DM, we allow full upload rights
1489 uid_email = "%s@debian.org" % (fpr.uid.uid)
1490 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1493 # Check binary upload permissions
1494 # By this point we know that DMs can't have got here unless they
1495 # are allowed to deal with the package concerned so just apply
1497 if fpr.binary_acl.access_level == 'full':
1500 # Otherwise we're in the map case
1501 tmparches = self.pkg.changes["architecture"].copy()
1502 tmparches.pop('source', None)
1504 for bam in fpr.binary_acl_map:
1505 tmparches.pop(bam.architecture.arch_string, None)
1507 if len(tmparches.keys()) > 0:
1508 if fpr.binary_reject:
1509 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1510 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1511 self.rejects.append(rej)
1513 # TODO: This is where we'll implement reject vs throw away binaries later
1514 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1515 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1516 rej += "\nFingerprint: %s", (fpr.fingerprint)
1517 self.rejects.append(rej)
1520 def check_upload_blocks(self, fpr, session):
1521 """Check whether any upload blocks apply to this source, source
1522 version, uid / fpr combination"""
1524 def block_rej_template(fb):
1525 rej = 'Manual upload block in place for package %s' % fb.source
1526 if fb.version is not None:
1527 rej += ', version %s' % fb.version
1530 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1531 # version is None if the block applies to all versions
1532 if fb.version is None or fb.version == self.pkg.changes['version']:
1533 # Check both fpr and uid - either is enough to cause a reject
1534 if fb.fpr is not None:
1535 if fb.fpr.fingerprint == fpr.fingerprint:
1536 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1537 if fb.uid is not None:
1538 if fb.uid == fpr.uid:
1539 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1542 def check_dm_upload(self, fpr, session):
1543 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1544 ## none of the uploaded packages are NEW
1546 for f in self.pkg.files.keys():
1547 if self.pkg.files[f].has_key("byhand"):
1548 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1550 if self.pkg.files[f].has_key("new"):
1551 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1557 ## the most recent version of the package uploaded to unstable or
1558 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1559 ## section of its control file
1560 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1561 q = q.join(SrcAssociation)
1562 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1563 q = q.order_by(desc('source.version')).limit(1)
1568 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1569 self.rejects.append(rej)
1573 if not r.dm_upload_allowed:
1574 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1575 self.rejects.append(rej)
1578 ## the Maintainer: field of the uploaded .changes file corresponds with
1579 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1581 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1582 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1584 ## the most recent version of the package uploaded to unstable or
1585 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1586 ## non-developer maintainers cannot NMU or hijack packages)
1588 # srcuploaders includes the maintainer
1590 for sup in r.srcuploaders:
1591 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1592 # Eww - I hope we never have two people with the same name in Debian
1593 if email == fpr.uid.uid or name == fpr.uid.name:
1598 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1601 ## none of the packages are being taken over from other source packages
1602 for b in self.pkg.changes["binary"].keys():
1603 for suite in self.pkg.changes["distribution"].keys():
1604 q = session.query(DBSource)
1605 q = q.join(DBBinary).filter_by(package=b)
1606 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1609 if s.source != self.pkg.changes["source"]:
1610 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1614 def check_transition(self, session):
1617 sourcepkg = self.pkg.changes["source"]
1619 # No sourceful upload -> no need to do anything else, direct return
1620 # We also work with unstable uploads, not experimental or those going to some
1621 # proposed-updates queue
1622 if "source" not in self.pkg.changes["architecture"] or \
1623 "unstable" not in self.pkg.changes["distribution"]:
1626 # Also only check if there is a file defined (and existant) with
1628 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1629 if transpath == "" or not os.path.exists(transpath):
1632 # Parse the yaml file
1633 sourcefile = file(transpath, 'r')
1634 sourcecontent = sourcefile.read()
1636 transitions = yaml.load(sourcecontent)
1637 except yaml.YAMLError, msg:
1638 # This shouldn't happen, there is a wrapper to edit the file which
1639 # checks it, but we prefer to be safe than ending up rejecting
1641 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1644 # Now look through all defined transitions
1645 for trans in transitions:
1646 t = transitions[trans]
1647 source = t["source"]
1650 # Will be None if nothing is in testing.
1651 current = get_source_in_suite(source, "testing", session)
1652 if current is not None:
1653 compare = apt_pkg.VersionCompare(current.version, expected)
1655 if current is None or compare < 0:
1656 # This is still valid, the current version in testing is older than
1657 # the new version we wait for, or there is none in testing yet
1659 # Check if the source we look at is affected by this.
1660 if sourcepkg in t['packages']:
1661 # The source is affected, lets reject it.
1663 rejectmsg = "%s: part of the %s transition.\n\n" % (
1666 if current is not None:
1667 currentlymsg = "at version %s" % (current.version)
1669 currentlymsg = "not present in testing"
1671 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1673 rejectmsg += "\n".join(textwrap.wrap("""Your package
1674 is part of a testing transition designed to get %s migrated (it is
1675 currently %s, we need version %s). This transition is managed by the
1676 Release Team, and %s is the Release-Team member responsible for it.
1677 Please mail debian-release@lists.debian.org or contact %s directly if you
1678 need further assistance. You might want to upload to experimental until this
1679 transition is done."""
1680 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1682 self.rejects.append(rejectmsg)
1685 ###########################################################################
1686 # End check_signed_by_key checks
1687 ###########################################################################
1689 def build_summaries(self):
1690 """ Build a summary of changes the upload introduces. """
1692 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1694 short_summary = summary
1696 # This is for direport's benefit...
1697 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1700 summary += "Changes: " + f
1702 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1704 summary += self.announce(short_summary, 0)
1706 return (summary, short_summary)
1708 ###########################################################################
1710 def close_bugs(self, summary, action):
1712 Send mail to close bugs as instructed by the closes field in the changes file.
1713 Also add a line to summary if any work was done.
1715 @type summary: string
1716 @param summary: summary text, as given by L{build_summaries}
1719 @param action: Set to false no real action will be done.
1722 @return: summary. If action was taken, extended by the list of closed bugs.
1726 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1728 bugs = self.pkg.changes["closes"].keys()
1734 summary += "Closing bugs: "
1736 summary += "%s " % (bug)
1739 self.Subst["__BUG_NUMBER__"] = bug
1740 if self.pkg.changes["distribution"].has_key("stable"):
1741 self.Subst["__STABLE_WARNING__"] = """
1742 Note that this package is not part of the released stable Debian
1743 distribution. It may have dependencies on other unreleased software,
1744 or other instabilities. Please take care if you wish to install it.
1745 The update will eventually make its way into the next released Debian
1748 self.Subst["__STABLE_WARNING__"] = ""
1749 mail_message = utils.TemplateSubst(self.Subst, template)
1750 utils.send_mail(mail_message)
1752 # Clear up after ourselves
1753 del self.Subst["__BUG_NUMBER__"]
1754 del self.Subst["__STABLE_WARNING__"]
1756 if action and self.logger:
1757 self.logger.log(["closing bugs"] + bugs)
1763 ###########################################################################
1765 def announce(self, short_summary, action):
1767 Send an announce mail about a new upload.
1769 @type short_summary: string
1770 @param short_summary: Short summary text to include in the mail
1773 @param action: Set to false no real action will be done.
1776 @return: Textstring about action taken.
1781 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1783 # Only do announcements for source uploads with a recent dpkg-dev installed
1784 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1785 self.pkg.changes["architecture"].has_key("source"):
1791 self.Subst["__SHORT_SUMMARY__"] = short_summary
1793 for dist in self.pkg.changes["distribution"].keys():
1794 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1795 if announce_list == "" or lists_done.has_key(announce_list):
1798 lists_done[announce_list] = 1
1799 summary += "Announcing to %s\n" % (announce_list)
1803 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1804 if cnf.get("Dinstall::TrackingServer") and \
1805 self.pkg.changes["architecture"].has_key("source"):
1806 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1807 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1809 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1810 utils.send_mail(mail_message)
1812 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1814 if cnf.FindB("Dinstall::CloseBugs"):
1815 summary = self.close_bugs(summary, action)
1817 del self.Subst["__SHORT_SUMMARY__"]
1821 ###########################################################################
1823 def accept (self, summary, short_summary, session):
1827 This moves all files referenced from the .changes into the pool,
1828 sends the accepted mail, announces to lists, closes bugs and
1829 also checks for override disparities. If enabled it will write out
1830 the version history for the BTS Version Tracking and will finally call
1833 @type summary: string
1834 @param summary: Summary text
1836 @type short_summary: string
1837 @param short_summary: Short summary
1841 stats = SummaryStats()
1844 Logger.log(["installing changes", u.pkg.changes_file])
1846 # Add the .dsc file to the DB first
1847 for newfile, entry in u.pkg.files.items():
1848 if entry["type"] == "dsc":
1849 dsc_component, dsc_location_id = add_dsc_to_db(u, newfile, session)
1851 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1852 for newfile, entry in u.pkg.files.items():
1853 if entry["type"] == "deb":
1854 add_deb_to_db(u, newfile, session)
1856 # If this is a sourceful diff only upload that is moving
1857 # cross-component we need to copy the .orig files into the new
1858 # component too for the same reasons as above.
1859 if u.pkg.changes["architecture"].has_key("source"):
1860 for orig_file in u.pkg.orig_files.keys():
1861 if not u.pkg.orig_files[orig_file].has_key("id"):
1862 continue # Skip if it's not in the pool
1863 orig_file_id = u.pkg.orig_files[orig_file]["id"]
1864 if u.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1865 continue # Skip if the location didn't change
1868 oldf = get_poolfile_by_id(orig_file_id, session)
1869 old_filename = os.path.join(oldf.location.path, oldf.filename)
1870 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1871 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1873 new_filename = os.path.join(utils.poolify(u.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1875 # TODO: Care about size/md5sum collisions etc
1876 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
1879 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1880 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1882 # TODO: Check that there's only 1 here
1883 source = get_sources_from_name(u.pkg.changes["source"], u.pkg.changes["version"])[0]
1884 dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
1885 dscf.poolfile_id = newf.file_id
1889 # Install the files into the pool
1890 for newfile, entry in u.pkg.files.items():
1891 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1892 utils.move(newfile, destination)
1893 Logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1894 summarystats.accept_bytes += float(entry["size"])
1896 # Copy the .changes file across for suite which need it.
1898 for suite_name in u.pkg.changes["distribution"].keys():
1899 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1900 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1902 for dest in copy_changes.keys():
1903 utils.copy(u.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1905 # We're done - commit the database changes
1907 # Our SQL session will automatically start a new transaction after
1910 # Move the .changes into the 'done' directory
1911 utils.move(u.pkg.changes_file,
1912 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(u.pkg.changes_file)))
1914 if u.pkg.changes["architecture"].has_key("source") and log_urgency:
1915 UrgencyLog().log(u.pkg.dsc["source"], u.pkg.dsc["version"], u.pkg.changes["urgency"])
1917 # Send accept mail, announce to lists, close bugs and check for
1918 # override disparities
1919 if not cnf["Dinstall::Options::No-Mail"]:
1921 self.Subst["__SUITE__"] = ""
1922 self.Subst["__SUMMARY__"] = summary
1923 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1924 utils.send_mail(mail_message)
1925 self.announce(short_summary, 1)
1927 ## Helper stuff for DebBugs Version Tracking
1928 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1929 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1930 # the conditionalization on dsc["bts changelog"] should be
1933 # Write out the version history from the changelog
1934 if self.pkg.changes["architecture"].has_key("source") and \
1935 self.pkg.dsc.has_key("bts changelog"):
1937 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1938 version_history = os.fdopen(fd, 'w')
1939 version_history.write(self.pkg.dsc["bts changelog"])
1940 version_history.close()
1941 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1942 self.pkg.changes_file[:-8]+".versions")
1943 os.rename(temp_filename, filename)
1944 os.chmod(filename, 0644)
1946 # Write out the binary -> source mapping.
1947 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1948 debinfo = os.fdopen(fd, 'w')
1949 for name, entry in sorted(self.pkg.files.items()):
1950 if entry["type"] == "deb":
1951 line = " ".join([entry["package"], entry["version"],
1952 entry["architecture"], entry["source package"],
1953 entry["source version"]])
1954 debinfo.write(line+"\n")
1956 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1957 self.pkg.changes_file[:-8]+".debinfo")
1958 os.rename(temp_filename, filename)
1959 os.chmod(filename, 0644)
1962 # res = get_or_set_queue('buildd', session).autobuild_upload(self.pkg, session)
1965 # now_date = datetime.now()
1970 summarystats.accept_count += 1
1972 def check_override(self):
1974 Checks override entries for validity. Mails "Override disparity" warnings,
1975 if that feature is enabled.
1977 Abandons the check if
1978 - override disparity checks are disabled
1979 - mail sending is disabled
1984 # Abandon the check if:
1985 # a) override disparity checks have been disabled
1986 # b) we're not sending mail
1987 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1988 cnf["Dinstall::Options::No-Mail"]:
1991 summary = self.pkg.check_override()
1996 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1999 self.Subst["__SUMMARY__"] = summary
2000 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2001 utils.send_mail(mail_message)
2002 del self.Subst["__SUMMARY__"]
2004 ###########################################################################
2006 def remove(self, from_dir=None):
2008 Used (for instance) in p-u to remove the package from unchecked
2010 Also removes the package from holding area.
2012 if from_dir is None:
2013 from_dir = self.pkg.directory
2016 for f in self.pkg.files.keys():
2017 os.unlink(os.path.join(from_dir, f))
2018 if os.path.exists(os.path.join(h.holding_dir, f)):
2019 os.unlink(os.path.join(h.holding_dir, f))
2021 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2022 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2023 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2025 ###########################################################################
2027 def move_to_dir (self, dest, perms=0660, changesperms=0664):
2029 Move files to dest with certain perms/changesperms
2032 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2033 dest, perms=changesperms)
2034 for f in self.pkg.files.keys():
2035 utils.move(os.path.join(h.holding_dir, f), dest, perms=perms)
2037 ###########################################################################
2039 def force_reject(self, reject_files):
2041 Forcefully move files from the current directory to the
2042 reject directory. If any file already exists in the reject
2043 directory it will be moved to the morgue to make way for
2047 @param files: file dictionary
2053 for file_entry in reject_files:
2054 # Skip any files which don't exist or which we don't have permission to copy.
2055 if os.access(file_entry, os.R_OK) == 0:
2058 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2061 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2063 # File exists? Let's try and move it to the morgue
2064 if e.errno == errno.EEXIST:
2065 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2067 morgue_file = utils.find_next_free(morgue_file)
2068 except NoFreeFilenameError:
2069 # Something's either gone badly Pete Tong, or
2070 # someone is trying to exploit us.
2071 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2073 utils.move(dest_file, morgue_file, perms=0660)
2075 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2078 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2082 # If we got here, we own the destination file, so we can
2083 # safely overwrite it.
2084 utils.move(file_entry, dest_file, 1, perms=0660)
2087 ###########################################################################
2088 def do_reject (self, manual=0, reject_message="", note=""):
2090 Reject an upload. If called without a reject message or C{manual} is
2091 true, spawn an editor so the user can write one.
2094 @param manual: manual or automated rejection
2096 @type reject_message: string
2097 @param reject_message: A reject message
2102 # If we weren't given a manual rejection message, spawn an
2103 # editor so the user can add one in...
2104 if manual and not reject_message:
2105 (fd, temp_filename) = utils.temp_filename()
2106 temp_file = os.fdopen(fd, 'w')
2109 temp_file.write(line)
2111 editor = os.environ.get("EDITOR","vi")
2113 while answer == 'E':
2114 os.system("%s %s" % (editor, temp_filename))
2115 temp_fh = utils.open_file(temp_filename)
2116 reject_message = "".join(temp_fh.readlines())
2118 print "Reject message:"
2119 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2120 prompt = "[R]eject, Edit, Abandon, Quit ?"
2122 while prompt.find(answer) == -1:
2123 answer = utils.our_raw_input(prompt)
2124 m = re_default_answer.search(prompt)
2127 answer = answer[:1].upper()
2128 os.unlink(temp_filename)
2134 print "Rejecting.\n"
2138 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2139 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2141 # Move all the files into the reject directory
2142 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2143 self.force_reject(reject_files)
2145 # If we fail here someone is probably trying to exploit the race
2146 # so let's just raise an exception ...
2147 if os.path.exists(reason_filename):
2148 os.unlink(reason_filename)
2149 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2151 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2155 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2156 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2157 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2158 os.write(reason_fd, reject_message)
2159 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2161 # Build up the rejection email
2162 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2163 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2164 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2165 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2166 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2167 # Write the rejection email out as the <foo>.reason file
2168 os.write(reason_fd, reject_mail_message)
2170 del self.Subst["__REJECTOR_ADDRESS__"]
2171 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2172 del self.Subst["__CC__"]
2176 # Send the rejection mail if appropriate
2177 if not cnf["Dinstall::Options::No-Mail"]:
2178 utils.send_mail(reject_mail_message)
2181 self.logger.log(["rejected", self.pkg.changes_file])
2185 ################################################################################
2186 def in_override_p(self, package, component, suite, binary_type, filename, session):
2188 Check if a package already has override entries in the DB
2190 @type package: string
2191 @param package: package name
2193 @type component: string
2194 @param component: database id of the component
2197 @param suite: database id of the suite
2199 @type binary_type: string
2200 @param binary_type: type of the package
2202 @type filename: string
2203 @param filename: filename we check
2205 @return: the database result. But noone cares anyway.
2211 if binary_type == "": # must be source
2214 file_type = binary_type
2216 # Override suite name; used for example with proposed-updates
2217 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2218 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2220 result = get_override(package, suite, component, file_type, session)
2222 # If checking for a source package fall back on the binary override type
2223 if file_type == "dsc" and len(result) < 1:
2224 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2226 # Remember the section and priority so we can check them later if appropriate
2229 self.pkg.files[filename]["override section"] = result.section.section
2230 self.pkg.files[filename]["override priority"] = result.priority.priority
2235 ################################################################################
2236 def get_anyversion(self, sv_list, suite):
2239 @param sv_list: list of (suite, version) tuples to check
2242 @param suite: suite name
2248 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2249 for (s, v) in sv_list:
2250 if s in [ x.lower() for x in anysuite ]:
2251 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2256 ################################################################################
2258 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2261 @param sv_list: list of (suite, version) tuples to check
2263 @type filename: string
2264 @param filename: XXX
2266 @type new_version: string
2267 @param new_version: XXX
2269 Ensure versions are newer than existing packages in target
2270 suites and that cross-suite version checking rules as
2271 set out in the conf file are satisfied.
2276 # Check versions for each target suite
2277 for target_suite in self.pkg.changes["distribution"].keys():
2278 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2279 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2281 # Enforce "must be newer than target suite" even if conffile omits it
2282 if target_suite not in must_be_newer_than:
2283 must_be_newer_than.append(target_suite)
2285 for (suite, existent_version) in sv_list:
2286 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2288 if suite in must_be_newer_than and sourceful and vercmp < 1:
2289 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2291 if suite in must_be_older_than and vercmp > -1:
2294 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2295 # we really use the other suite, ignoring the conflicting one ...
2296 addsuite = self.pkg.changes["distribution-version"][suite]
2298 add_version = self.get_anyversion(sv_list, addsuite)
2299 target_version = self.get_anyversion(sv_list, target_suite)
2302 # not add_version can only happen if we map to a suite
2303 # that doesn't enhance the suite we're propup'ing from.
2304 # so "propup-ver x a b c; map a d" is a problem only if
2305 # d doesn't enhance a.
2307 # i think we could always propagate in this case, rather
2308 # than complaining. either way, this isn't a REJECT issue
2310 # And - we really should complain to the dorks who configured dak
2311 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2312 self.pkg.changes.setdefault("propdistribution", {})
2313 self.pkg.changes["propdistribution"][addsuite] = 1
2315 elif not target_version:
2316 # not targets_version is true when the package is NEW
2317 # we could just stick with the "...old version..." REJECT
2318 # for this, I think.
2319 self.rejects.append("Won't propogate NEW packages.")
2320 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2321 # propogation would be redundant. no need to reject though.
2322 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2324 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2325 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2327 self.warnings.append("Propogating upload to %s" % (addsuite))
2328 self.pkg.changes.setdefault("propdistribution", {})
2329 self.pkg.changes["propdistribution"][addsuite] = 1
2333 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2335 ################################################################################
2336 def check_binary_against_db(self, filename, session):
2337 # Ensure version is sane
2338 q = session.query(BinAssociation)
2339 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2340 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2342 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2343 filename, self.pkg.files[filename]["version"], sourceful=False)
2345 # Check for any existing copies of the file
2346 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2347 q = q.filter_by(version=self.pkg.files[filename]["version"])
2348 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2351 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2353 ################################################################################
2355 def check_source_against_db(self, filename, session):
2358 source = self.pkg.dsc.get("source")
2359 version = self.pkg.dsc.get("version")
2361 # Ensure version is sane
2362 q = session.query(SrcAssociation)
2363 q = q.join(DBSource).filter(DBSource.source==source)
2365 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2366 filename, version, sourceful=True)
2368 ################################################################################
2369 def check_dsc_against_db(self, filename, session):
2372 @warning: NB: this function can remove entries from the 'files' index [if
2373 the orig tarball is a duplicate of the one in the archive]; if
2374 you're iterating over 'files' and call this function as part of
2375 the loop, be sure to add a check to the top of the loop to
2376 ensure you haven't just tried to dereference the deleted entry.
2381 self.pkg.orig_files = {} # XXX: do we need to clear it?
2382 orig_files = self.pkg.orig_files
2384 # Try and find all files mentioned in the .dsc. This has
2385 # to work harder to cope with the multiple possible
2386 # locations of an .orig.tar.gz.
2387 # The ordering on the select is needed to pick the newest orig
2388 # when it exists in multiple places.
2389 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2391 if self.pkg.files.has_key(dsc_name):
2392 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2393 actual_size = int(self.pkg.files[dsc_name]["size"])
2394 found = "%s in incoming" % (dsc_name)
2396 # Check the file does not already exist in the archive
2397 ql = get_poolfile_like_name(dsc_name, session)
2399 # Strip out anything that isn't '%s' or '/%s$'
2401 if not i.filename.endswith(dsc_name):
2404 # "[dak] has not broken them. [dak] has fixed a
2405 # brokenness. Your crappy hack exploited a bug in
2408 # "(Come on! I thought it was always obvious that
2409 # one just doesn't release different files with
2410 # the same name and version.)"
2411 # -- ajk@ on d-devel@l.d.o
2414 # Ignore exact matches for .orig.tar.gz
2416 if re_is_orig_source.match(dsc_name):
2418 if self.pkg.files.has_key(dsc_name) and \
2419 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2420 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2421 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2422 # TODO: Don't delete the entry, just mark it as not needed
2423 # This would fix the stupidity of changing something we often iterate over
2424 # whilst we're doing it
2425 del self.pkg.files[dsc_name]
2426 if not orig_files.has_key(dsc_name):
2427 orig_files[dsc_name] = {}
2428 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2432 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2434 elif re_is_orig_source.match(dsc_name):
2436 ql = get_poolfile_like_name(dsc_name, session)
2438 # Strip out anything that isn't '%s' or '/%s$'
2439 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2441 if not i.filename.endswith(dsc_name):
2445 # Unfortunately, we may get more than one match here if,
2446 # for example, the package was in potato but had an -sa
2447 # upload in woody. So we need to choose the right one.
2449 # default to something sane in case we don't match any or have only one
2454 old_file = os.path.join(i.location.path, i.filename)
2455 old_file_fh = utils.open_file(old_file)
2456 actual_md5 = apt_pkg.md5sum(old_file_fh)
2458 actual_size = os.stat(old_file)[stat.ST_SIZE]
2459 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2462 old_file = os.path.join(i.location.path, i.filename)
2463 old_file_fh = utils.open_file(old_file)
2464 actual_md5 = apt_pkg.md5sum(old_file_fh)
2466 actual_size = os.stat(old_file)[stat.ST_SIZE]
2468 suite_type = x.location.archive_type
2469 # need this for updating dsc_files in install()
2470 dsc_entry["files id"] = x.file_id
2471 # See install() in process-accepted...
2472 if not orig_files.has_key(dsc_name):
2473 orig_files[dsc_name] = {}
2474 orig_files[dsc_name]["id"] = x.file_id
2475 orig_files[dsc_name]["path"] = old_file
2476 orig_files[dsc_name]["location"] = x.location.location_id
2478 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2479 # Not there? Check the queue directories...
2480 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2481 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2483 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2484 if os.path.exists(in_otherdir):
2485 in_otherdir_fh = utils.open_file(in_otherdir)
2486 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2487 in_otherdir_fh.close()
2488 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2490 if not orig_files.has_key(dsc_name):
2491 orig_files[dsc_name] = {}
2492 orig_files[dsc_name]["path"] = in_otherdir
2495 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2498 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2500 if actual_md5 != dsc_entry["md5sum"]:
2501 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2502 if actual_size != int(dsc_entry["size"]):
2503 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2505 ################################################################################
2506 # This is used by process-new and process-holding to recheck a changes file
2507 # at the time we're running. It mainly wraps various other internal functions
2508 # and is similar to accepted_checks - these should probably be tidied up
2510 def recheck(self, session):
2512 for f in self.pkg.files.keys():
2513 # The .orig.tar.gz can disappear out from under us is it's a
2514 # duplicate of one in the archive.
2515 if not self.pkg.files.has_key(f):
2518 entry = self.pkg.files[f]
2520 # Check that the source still exists
2521 if entry["type"] == "deb":
2522 source_version = entry["source version"]
2523 source_package = entry["source package"]
2524 if not self.pkg.changes["architecture"].has_key("source") \
2525 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2526 source_epochless_version = re_no_epoch.sub('', source_version)
2527 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2529 for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
2530 if cnf.has_key("Dir::Queue::%s" % (q)):
2531 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2534 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2536 # Version and file overwrite checks
2537 if entry["type"] == "deb":
2538 self.check_binary_against_db(f, session)
2539 elif entry["type"] == "dsc":
2540 self.check_source_against_db(f, session)
2541 self.check_dsc_against_db(f, session)
2543 ################################################################################
2544 def accepted_checks(self, overwrite_checks, session):
2545 # Recheck anything that relies on the database; since that's not
2546 # frozen between accept and our run time when called from p-a.
2548 # overwrite_checks is set to False when installing to stable/oldstable
2553 # Find the .dsc (again)
2555 for f in self.pkg.files.keys():
2556 if self.pkg.files[f]["type"] == "dsc":
2559 for checkfile in self.pkg.files.keys():
2560 # The .orig.tar.gz can disappear out from under us is it's a
2561 # duplicate of one in the archive.
2562 if not self.pkg.files.has_key(checkfile):
2565 entry = self.pkg.files[checkfile]
2567 # Check that the source still exists
2568 if entry["type"] == "deb":
2569 source_version = entry["source version"]
2570 source_package = entry["source package"]
2571 if not self.pkg.changes["architecture"].has_key("source") \
2572 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2573 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2575 # Version and file overwrite checks
2576 if overwrite_checks:
2577 if entry["type"] == "deb":
2578 self.check_binary_against_db(checkfile, session)
2579 elif entry["type"] == "dsc":
2580 self.check_source_against_db(checkfile, session)
2581 self.check_dsc_against_db(dsc_filename, session)
2583 # propogate in the case it is in the override tables:
2584 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2585 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2586 propogate[suite] = 1
2588 nopropogate[suite] = 1
2590 for suite in propogate.keys():
2591 if suite in nopropogate:
2593 self.pkg.changes["distribution"][suite] = 1
2595 for checkfile in self.pkg.files.keys():
2596 # Check the package is still in the override tables
2597 for suite in self.pkg.changes["distribution"].keys():
2598 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2599 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2601 ################################################################################
2602 # This is not really a reject, but an unaccept, but since a) the code for
2603 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2604 # extremely rare, for now we'll go with whining at our admin folks...
2606 def do_unaccept(self):
2610 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2611 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2612 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2613 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2614 if cnf.has_key("Dinstall::Bcc"):
2615 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2617 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2619 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2621 # Write the rejection email out as the <foo>.reason file
2622 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2623 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2625 # If we fail here someone is probably trying to exploit the race
2626 # so let's just raise an exception ...
2627 if os.path.exists(reject_filename):
2628 os.unlink(reject_filename)
2630 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2631 os.write(fd, reject_mail_message)
2634 utils.send_mail(reject_mail_message)
2636 del self.Subst["__REJECTOR_ADDRESS__"]
2637 del self.Subst["__REJECT_MESSAGE__"]
2638 del self.Subst["__CC__"]
2640 ################################################################################
2641 # If any file of an upload has a recent mtime then chances are good
2642 # the file is still being uploaded.
2644 def upload_too_new(self):
2647 # Move back to the original directory to get accurate time stamps
2649 os.chdir(self.pkg.directory)
2650 file_list = self.pkg.files.keys()
2651 file_list.extend(self.pkg.dsc_files.keys())
2652 file_list.append(self.pkg.changes_file)
2655 last_modified = time.time()-os.path.getmtime(f)
2656 if last_modified < int(cnf["Dinstall::SkipTime"]):