5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
81 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
83 # Validate the override type
84 type_id = get_override_type(file_type, session)
86 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
90 ################################################################################
92 # Determine what parts in a .changes are NEW
94 def determine_new(changes, files, warn=1):
96 Determine what parts in a C{changes} file are NEW.
98 @type changes: Upload.Pkg.changes dict
99 @param changes: Changes dictionary
101 @type files: Upload.Pkg.files dict
102 @param files: Files dictionary
105 @param warn: Warn if overrides are added for (old)stable
108 @return: dictionary of NEW components.
113 session = DBConn().session()
115 # Build up a list of potentially new things
116 for name, f in files.items():
117 # Skip byhand elements
118 # if f["type"] == "byhand":
121 priority = f["priority"]
122 section = f["section"]
123 file_type = get_type(f, session)
124 component = f["component"]
126 if file_type == "dsc":
129 if not new.has_key(pkg):
131 new[pkg]["priority"] = priority
132 new[pkg]["section"] = section
133 new[pkg]["type"] = file_type
134 new[pkg]["component"] = component
135 new[pkg]["files"] = []
137 old_type = new[pkg]["type"]
138 if old_type != file_type:
139 # source gets trumped by deb or udeb
140 if old_type == "dsc":
141 new[pkg]["priority"] = priority
142 new[pkg]["section"] = section
143 new[pkg]["type"] = file_type
144 new[pkg]["component"] = component
146 new[pkg]["files"].append(name)
148 if f.has_key("othercomponents"):
149 new[pkg]["othercomponents"] = f["othercomponents"]
151 # Fix up the list of target suites
153 for suite in changes["suite"].keys():
154 override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
156 (olderr, newerr) = (get_suite(suite, session) == None,
157 get_suite(override, session) == None)
159 (oinv, newinv) = ("", "")
160 if olderr: oinv = "invalid "
161 if newerr: ninv = "invalid "
162 print "warning: overriding %ssuite %s to %ssuite %s" % (
163 oinv, suite, ninv, override)
164 del changes["suite"][suite]
165 changes["suite"][override] = 1
167 for suite in changes["suite"].keys():
168 for pkg in new.keys():
169 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
171 for file_entry in new[pkg]["files"]:
172 if files[file_entry].has_key("new"):
173 del files[file_entry]["new"]
177 for s in ['stable', 'oldstable']:
178 if changes["suite"].has_key(s):
179 print "WARNING: overrides will be added for %s!" % s
180 for pkg in new.keys():
181 if new[pkg].has_key("othercomponents"):
182 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
188 ################################################################################
190 def check_valid(new):
192 Check if section and priority for NEW packages exist in database.
193 Additionally does sanity checks:
194 - debian-installer packages have to be udeb (or source)
195 - non debian-installer packages can not be udeb
196 - source priority can only be assigned to dsc file types
199 @param new: Dict of new packages with their section, priority and type.
202 for pkg in new.keys():
203 section_name = new[pkg]["section"]
204 priority_name = new[pkg]["priority"]
205 file_type = new[pkg]["type"]
207 section = get_section(section_name)
209 new[pkg]["section id"] = -1
211 new[pkg]["section id"] = section.section_id
213 priority = get_priority(priority_name)
215 new[pkg]["priority id"] = -1
217 new[pkg]["priority id"] = priority.priority_id
220 di = section_name.find("debian-installer") != -1
222 # If d-i, we must be udeb and vice-versa
223 if (di and file_type not in ("udeb", "dsc")) or \
224 (not di and file_type == "udeb"):
225 new[pkg]["section id"] = -1
227 # If dsc we need to be source and vice-versa
228 if (priority == "source" and file_type != "dsc") or \
229 (priority != "source" and file_type == "dsc"):
230 new[pkg]["priority id"] = -1
232 ###############################################################################
234 # Used by Upload.check_timestamps
235 class TarTime(object):
236 def __init__(self, future_cutoff, past_cutoff):
238 self.future_cutoff = future_cutoff
239 self.past_cutoff = past_cutoff
242 self.future_files = {}
243 self.ancient_files = {}
245 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
246 if MTime > self.future_cutoff:
247 self.future_files[Name] = MTime
248 if MTime < self.past_cutoff:
249 self.ancient_files[Name] = MTime
251 ###############################################################################
253 class Upload(object):
255 Everything that has to do with an upload processed.
263 ###########################################################################
266 """ Reset a number of internal variables."""
268 # Initialize the substitution template map
271 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
272 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
273 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
274 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
280 self.later_check_files = []
284 def package_info(self):
286 Format various messages from this Upload to send to the maintainer.
290 ('Reject Reasons', self.rejects),
291 ('Warnings', self.warnings),
292 ('Notes', self.notes),
296 for title, messages in msgs:
298 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
303 ###########################################################################
304 def update_subst(self):
305 """ Set up the per-package template substitution mappings """
309 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
310 if not self.pkg.changes.has_key("architecture") or not \
311 isinstance(self.pkg.changes["architecture"], dict):
312 self.pkg.changes["architecture"] = { "Unknown" : "" }
314 # and maintainer2047 may not exist.
315 if not self.pkg.changes.has_key("maintainer2047"):
316 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
318 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
319 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
320 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
322 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
323 if self.pkg.changes["architecture"].has_key("source") and \
324 self.pkg.changes["changedby822"] != "" and \
325 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
327 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
328 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
329 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
331 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
332 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
333 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
335 if "sponsoremail" in self.pkg.changes:
336 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
338 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
339 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
341 # Apply any global override of the Maintainer field
342 if cnf.get("Dinstall::OverrideMaintainer"):
343 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
344 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
346 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
347 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
348 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
350 ###########################################################################
351 def load_changes(self, filename):
353 Load a changes file and setup a dictionary around it. Also checks for mandantory
356 @type filename: string
357 @param filename: Changes filename, full path.
360 @return: whether the changes file was valid or not. We may want to
361 reject even if this is True (see what gets put in self.rejects).
362 This is simply to prevent us even trying things later which will
363 fail because we couldn't properly parse the file.
366 self.pkg.changes_file = filename
368 # Parse the .changes field into a dictionary
370 self.pkg.changes.update(parse_changes(filename))
371 except CantOpenError:
372 self.rejects.append("%s: can't read file." % (filename))
374 except ParseChangesError, line:
375 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
377 except ChangesUnicodeError:
378 self.rejects.append("%s: changes file not proper utf-8" % (filename))
381 # Parse the Files field from the .changes into another dictionary
383 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
384 except ParseChangesError, line:
385 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
387 except UnknownFormatError, format:
388 self.rejects.append("%s: unknown format '%s'." % (filename, format))
391 # Check for mandatory fields
392 for i in ("distribution", "source", "binary", "architecture",
393 "version", "maintainer", "files", "changes", "description"):
394 if not self.pkg.changes.has_key(i):
395 # Avoid undefined errors later
396 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
399 # Strip a source version in brackets from the source field
400 if re_strip_srcver.search(self.pkg.changes["source"]):
401 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
403 # Ensure the source field is a valid package name.
404 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
405 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
407 # Split multi-value fields into a lower-level dictionary
408 for i in ("architecture", "distribution", "binary", "closes"):
409 o = self.pkg.changes.get(i, "")
411 del self.pkg.changes[i]
413 self.pkg.changes[i] = {}
416 self.pkg.changes[i][j] = 1
418 # Fix the Maintainer: field to be RFC822/2047 compatible
420 (self.pkg.changes["maintainer822"],
421 self.pkg.changes["maintainer2047"],
422 self.pkg.changes["maintainername"],
423 self.pkg.changes["maintaineremail"]) = \
424 fix_maintainer (self.pkg.changes["maintainer"])
425 except ParseMaintError, msg:
426 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
427 % (filename, self.pkg.changes["maintainer"], msg))
429 # ...likewise for the Changed-By: field if it exists.
431 (self.pkg.changes["changedby822"],
432 self.pkg.changes["changedby2047"],
433 self.pkg.changes["changedbyname"],
434 self.pkg.changes["changedbyemail"]) = \
435 fix_maintainer (self.pkg.changes.get("changed-by", ""))
436 except ParseMaintError, msg:
437 self.pkg.changes["changedby822"] = ""
438 self.pkg.changes["changedby2047"] = ""
439 self.pkg.changes["changedbyname"] = ""
440 self.pkg.changes["changedbyemail"] = ""
442 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
443 % (filename, self.pkg.changes["changed-by"], msg))
445 # Ensure all the values in Closes: are numbers
446 if self.pkg.changes.has_key("closes"):
447 for i in self.pkg.changes["closes"].keys():
448 if re_isanum.match (i) == None:
449 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
451 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
452 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
453 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
455 # Check the .changes is non-empty
456 if not self.pkg.files:
457 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
460 # Changes was syntactically valid even if we'll reject
463 ###########################################################################
465 def check_distributions(self):
466 "Check and map the Distribution field"
470 # Handle suite mappings
471 for m in Cnf.ValueList("SuiteMappings"):
474 if mtype == "map" or mtype == "silent-map":
475 (source, dest) = args[1:3]
476 if self.pkg.changes["distribution"].has_key(source):
477 del self.pkg.changes["distribution"][source]
478 self.pkg.changes["distribution"][dest] = 1
479 if mtype != "silent-map":
480 self.notes.append("Mapping %s to %s." % (source, dest))
481 if self.pkg.changes.has_key("distribution-version"):
482 if self.pkg.changes["distribution-version"].has_key(source):
483 self.pkg.changes["distribution-version"][source]=dest
484 elif mtype == "map-unreleased":
485 (source, dest) = args[1:3]
486 if self.pkg.changes["distribution"].has_key(source):
487 for arch in self.pkg.changes["architecture"].keys():
488 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
489 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
490 del self.pkg.changes["distribution"][source]
491 self.pkg.changes["distribution"][dest] = 1
493 elif mtype == "ignore":
495 if self.pkg.changes["distribution"].has_key(suite):
496 del self.pkg.changes["distribution"][suite]
497 self.warnings.append("Ignoring %s as a target suite." % (suite))
498 elif mtype == "reject":
500 if self.pkg.changes["distribution"].has_key(suite):
501 self.rejects.append("Uploads to %s are not accepted." % (suite))
502 elif mtype == "propup-version":
503 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
505 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
506 if self.pkg.changes["distribution"].has_key(args[1]):
507 self.pkg.changes.setdefault("distribution-version", {})
508 for suite in args[2:]:
509 self.pkg.changes["distribution-version"][suite] = suite
511 # Ensure there is (still) a target distribution
512 if len(self.pkg.changes["distribution"].keys()) < 1:
513 self.rejects.append("No valid distribution remaining.")
515 # Ensure target distributions exist
516 for suite in self.pkg.changes["distribution"].keys():
517 if not Cnf.has_key("Suite::%s" % (suite)):
518 self.rejects.append("Unknown distribution `%s'." % (suite))
520 ###########################################################################
522 def binary_file_checks(self, f, session):
524 entry = self.pkg.files[f]
526 # Extract package control information
527 deb_file = utils.open_file(f)
529 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
531 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
533 # Can't continue, none of the checks on control would work.
536 # Check for mandantory "Description:"
539 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
541 self.rejects.append("%s: Missing Description in binary package" % (f))
546 # Check for mandatory fields
547 for field in [ "Package", "Architecture", "Version" ]:
548 if control.Find(field) == None:
550 self.rejects.append("%s: No %s field in control." % (f, field))
553 # Ensure the package name matches the one give in the .changes
554 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
555 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
557 # Validate the package field
558 package = control.Find("Package")
559 if not re_valid_pkg_name.match(package):
560 self.rejects.append("%s: invalid package name '%s'." % (f, package))
562 # Validate the version field
563 version = control.Find("Version")
564 if not re_valid_version.match(version):
565 self.rejects.append("%s: invalid version number '%s'." % (f, version))
567 # Ensure the architecture of the .deb is one we know about.
568 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
569 architecture = control.Find("Architecture")
570 upload_suite = self.pkg.changes["distribution"].keys()[0]
572 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
573 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
574 self.rejects.append("Unknown architecture '%s'." % (architecture))
576 # Ensure the architecture of the .deb is one of the ones
577 # listed in the .changes.
578 if not self.pkg.changes["architecture"].has_key(architecture):
579 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
581 # Sanity-check the Depends field
582 depends = control.Find("Depends")
584 self.rejects.append("%s: Depends field is empty." % (f))
586 # Sanity-check the Provides field
587 provides = control.Find("Provides")
589 provide = re_spacestrip.sub('', provides)
591 self.rejects.append("%s: Provides field is empty." % (f))
592 prov_list = provide.split(",")
593 for prov in prov_list:
594 if not re_valid_pkg_name.match(prov):
595 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
597 # Check the section & priority match those given in the .changes (non-fatal)
598 if control.Find("Section") and entry["section"] != "" \
599 and entry["section"] != control.Find("Section"):
600 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
601 (f, control.Find("Section", ""), entry["section"]))
602 if control.Find("Priority") and entry["priority"] != "" \
603 and entry["priority"] != control.Find("Priority"):
604 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
605 (f, control.Find("Priority", ""), entry["priority"]))
607 entry["package"] = package
608 entry["architecture"] = architecture
609 entry["version"] = version
610 entry["maintainer"] = control.Find("Maintainer", "")
612 if f.endswith(".udeb"):
613 self.pkg.files[f]["dbtype"] = "udeb"
614 elif f.endswith(".deb"):
615 self.pkg.files[f]["dbtype"] = "deb"
617 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
619 entry["source"] = control.Find("Source", entry["package"])
621 # Get the source version
622 source = entry["source"]
625 if source.find("(") != -1:
626 m = re_extract_src_version.match(source)
628 source_version = m.group(2)
630 if not source_version:
631 source_version = self.pkg.files[f]["version"]
633 entry["source package"] = source
634 entry["source version"] = source_version
636 # Ensure the filename matches the contents of the .deb
637 m = re_isadeb.match(f)
640 file_package = m.group(1)
641 if entry["package"] != file_package:
642 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
643 (f, file_package, entry["dbtype"], entry["package"]))
644 epochless_version = re_no_epoch.sub('', control.Find("Version"))
647 file_version = m.group(2)
648 if epochless_version != file_version:
649 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
650 (f, file_version, entry["dbtype"], epochless_version))
653 file_architecture = m.group(3)
654 if entry["architecture"] != file_architecture:
655 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
656 (f, file_architecture, entry["dbtype"], entry["architecture"]))
658 # Check for existent source
659 source_version = entry["source version"]
660 source_package = entry["source package"]
661 if self.pkg.changes["architecture"].has_key("source"):
662 if source_version != self.pkg.changes["version"]:
663 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
664 (source_version, f, self.pkg.changes["version"]))
666 # Check in the SQL database
667 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
668 # Check in one of the other directories
669 source_epochless_version = re_no_epoch.sub('', source_version)
670 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
671 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
673 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
676 dsc_file_exists = False
677 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
678 if cnf.has_key("Dir::Queue::%s" % (myq)):
679 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
680 dsc_file_exists = True
683 if not dsc_file_exists:
684 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
686 # Check the version and for file overwrites
687 self.check_binary_against_db(f, session)
689 # Temporarily disable contents generation until we change the table storage layout
692 #if len(b.rejects) > 0:
693 # for j in b.rejects:
694 # self.rejects.append(j)
696 def source_file_checks(self, f, session):
697 entry = self.pkg.files[f]
699 m = re_issource.match(f)
703 entry["package"] = m.group(1)
704 entry["version"] = m.group(2)
705 entry["type"] = m.group(3)
707 # Ensure the source package name matches the Source filed in the .changes
708 if self.pkg.changes["source"] != entry["package"]:
709 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
711 # Ensure the source version matches the version in the .changes file
712 if re_is_orig_source.match(f):
713 changes_version = self.pkg.changes["chopversion2"]
715 changes_version = self.pkg.changes["chopversion"]
717 if changes_version != entry["version"]:
718 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
720 # Ensure the .changes lists source in the Architecture field
721 if not self.pkg.changes["architecture"].has_key("source"):
722 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
724 # Check the signature of a .dsc file
725 if entry["type"] == "dsc":
726 # check_signature returns either:
727 # (None, [list, of, rejects]) or (signature, [])
728 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
730 self.rejects.append(j)
732 entry["architecture"] = "source"
734 def per_suite_file_checks(self, f, suite, session):
736 entry = self.pkg.files[f]
739 if entry.has_key("byhand"):
742 # Check we have fields we need to do these checks
744 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
745 if not entry.has_key(m):
746 self.rejects.append("file '%s' does not have field %s set" % (f, m))
752 # Handle component mappings
753 for m in cnf.ValueList("ComponentMappings"):
754 (source, dest) = m.split()
755 if entry["component"] == source:
756 entry["original component"] = source
757 entry["component"] = dest
759 # Ensure the component is valid for the target suite
760 if cnf.has_key("Suite:%s::Components" % (suite)) and \
761 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
762 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
765 # Validate the component
766 if not get_component(entry["component"], session):
767 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
770 # See if the package is NEW
771 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
774 # Validate the priority
775 if entry["priority"].find('/') != -1:
776 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
778 # Determine the location
779 location = cnf["Dir::Pool"]
780 l = get_location(location, entry["component"], session=session)
782 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
783 entry["location id"] = -1
785 entry["location id"] = l.location_id
787 # Check the md5sum & size against existing files (if any)
788 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
790 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
791 entry["size"], entry["md5sum"], entry["location id"])
794 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
795 elif found is False and poolfile is not None:
796 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
799 entry["files id"] = None
801 entry["files id"] = poolfile.file_id
803 # Check for packages that have moved from one component to another
804 entry['suite'] = suite
805 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
807 entry["othercomponents"] = res.fetchone()[0]
809 def check_files(self, action=True):
810 file_keys = self.pkg.files.keys()
816 os.chdir(self.pkg.directory)
818 ret = holding.copy_to_holding(f)
820 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
824 # check we already know the changes file
825 # [NB: this check must be done post-suite mapping]
826 base_filename = os.path.basename(self.pkg.changes_file)
828 session = DBConn().session()
831 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
832 # if in the pool or in a queue other than unchecked, reject
833 if (dbc.in_queue is None) \
834 or (dbc.in_queue is not None
835 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
836 self.rejects.append("%s file already known to dak" % base_filename)
837 except NoResultFound, e:
844 for f, entry in self.pkg.files.items():
845 # Ensure the file does not already exist in one of the accepted directories
846 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
847 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
848 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
849 self.rejects.append("%s file already exists in the %s directory." % (f, d))
851 if not re_taint_free.match(f):
852 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
854 # Check the file is readable
855 if os.access(f, os.R_OK) == 0:
856 # When running in -n, copy_to_holding() won't have
857 # generated the reject_message, so we need to.
859 if os.path.exists(f):
860 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
862 # Don't directly reject, mark to check later to deal with orig's
863 # we can find in the pool
864 self.later_check_files.append(f)
865 entry["type"] = "unreadable"
868 # If it's byhand skip remaining checks
869 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
871 entry["type"] = "byhand"
873 # Checks for a binary package...
874 elif re_isadeb.match(f):
876 entry["type"] = "deb"
878 # This routine appends to self.rejects/warnings as appropriate
879 self.binary_file_checks(f, session)
881 # Checks for a source package...
882 elif re_issource.match(f):
885 # This routine appends to self.rejects/warnings as appropriate
886 self.source_file_checks(f, session)
888 # Not a binary or source package? Assume byhand...
891 entry["type"] = "byhand"
893 # Per-suite file checks
894 entry["oldfiles"] = {}
895 for suite in self.pkg.changes["distribution"].keys():
896 self.per_suite_file_checks(f, suite, session)
900 # If the .changes file says it has source, it must have source.
901 if self.pkg.changes["architecture"].has_key("source"):
903 self.rejects.append("no source found and Architecture line in changes mention source.")
905 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
906 self.rejects.append("source only uploads are not supported.")
908 ###########################################################################
909 def check_dsc(self, action=True, session=None):
910 """Returns bool indicating whether or not the source changes are valid"""
911 # Ensure there is source to check
912 if not self.pkg.changes["architecture"].has_key("source"):
917 for f, entry in self.pkg.files.items():
918 if entry["type"] == "dsc":
920 self.rejects.append("can not process a .changes file with multiple .dsc's.")
925 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
927 self.rejects.append("source uploads must contain a dsc file")
930 # Parse the .dsc file
932 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
933 except CantOpenError:
934 # if not -n copy_to_holding() will have done this for us...
936 self.rejects.append("%s: can't read file." % (dsc_filename))
937 except ParseChangesError, line:
938 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
939 except InvalidDscError, line:
940 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
941 except ChangesUnicodeError:
942 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
944 # Build up the file list of files mentioned by the .dsc
946 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
947 except NoFilesFieldError:
948 self.rejects.append("%s: no Files: field." % (dsc_filename))
950 except UnknownFormatError, format:
951 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
953 except ParseChangesError, line:
954 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
957 # Enforce mandatory fields
958 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
959 if not self.pkg.dsc.has_key(i):
960 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
963 # Validate the source and version fields
964 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
965 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
966 if not re_valid_version.match(self.pkg.dsc["version"]):
967 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
969 # Only a limited list of source formats are allowed in each suite
970 for dist in self.pkg.changes["distribution"].keys():
971 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
972 if self.pkg.dsc["format"] not in allowed:
973 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
975 # Validate the Maintainer field
977 # We ignore the return value
978 fix_maintainer(self.pkg.dsc["maintainer"])
979 except ParseMaintError, msg:
980 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
981 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
983 # Validate the build-depends field(s)
984 for field_name in [ "build-depends", "build-depends-indep" ]:
985 field = self.pkg.dsc.get(field_name)
987 # Have apt try to parse them...
989 apt_pkg.ParseSrcDepends(field)
991 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
993 # Ensure the version number in the .dsc matches the version number in the .changes
994 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
995 changes_version = self.pkg.files[dsc_filename]["version"]
997 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
998 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1000 # Ensure the Files field contain only what's expected
1001 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1003 # Ensure source is newer than existing source in target suites
1004 session = DBConn().session()
1005 self.check_source_against_db(dsc_filename, session)
1006 self.check_dsc_against_db(dsc_filename, session)
1009 # Finally, check if we're missing any files
1010 for f in self.later_check_files:
1011 self.rejects.append("Could not find file %s references in changes" % f)
1015 ###########################################################################
1017 def get_changelog_versions(self, source_dir):
1018 """Extracts a the source package and (optionally) grabs the
1019 version history out of debian/changelog for the BTS."""
1023 # Find the .dsc (again)
1025 for f in self.pkg.files.keys():
1026 if self.pkg.files[f]["type"] == "dsc":
1029 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1030 if not dsc_filename:
1033 # Create a symlink mirror of the source files in our temporary directory
1034 for f in self.pkg.files.keys():
1035 m = re_issource.match(f)
1037 src = os.path.join(source_dir, f)
1038 # If a file is missing for whatever reason, give up.
1039 if not os.path.exists(src):
1042 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1043 self.pkg.orig_files[f].has_key("path"):
1045 dest = os.path.join(os.getcwd(), f)
1046 os.symlink(src, dest)
1048 # If the orig files are not a part of the upload, create symlinks to the
1050 for orig_file in self.pkg.orig_files.keys():
1051 if not self.pkg.orig_files[orig_file].has_key("path"):
1053 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1054 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1056 # Extract the source
1057 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1058 (result, output) = commands.getstatusoutput(cmd)
1060 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1061 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1064 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1067 # Get the upstream version
1068 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1069 if re_strip_revision.search(upstr_version):
1070 upstr_version = re_strip_revision.sub('', upstr_version)
1072 # Ensure the changelog file exists
1073 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1074 if not os.path.exists(changelog_filename):
1075 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1078 # Parse the changelog
1079 self.pkg.dsc["bts changelog"] = ""
1080 changelog_file = utils.open_file(changelog_filename)
1081 for line in changelog_file.readlines():
1082 m = re_changelog_versions.match(line)
1084 self.pkg.dsc["bts changelog"] += line
1085 changelog_file.close()
1087 # Check we found at least one revision in the changelog
1088 if not self.pkg.dsc["bts changelog"]:
1089 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1091 def check_source(self):
1093 # a) there's no source
1094 if not self.pkg.changes["architecture"].has_key("source"):
1097 tmpdir = utils.temp_dirname()
1099 # Move into the temporary directory
1103 # Get the changelog version history
1104 self.get_changelog_versions(cwd)
1106 # Move back and cleanup the temporary tree
1110 shutil.rmtree(tmpdir)
1112 if e.errno != errno.EACCES:
1114 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1116 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1117 # We probably have u-r or u-w directories so chmod everything
1119 cmd = "chmod -R u+rwx %s" % (tmpdir)
1120 result = os.system(cmd)
1122 utils.fubar("'%s' failed with result %s." % (cmd, result))
1123 shutil.rmtree(tmpdir)
1124 except Exception, e:
1125 print "foobar2 (%s)" % e
1126 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1128 ###########################################################################
1129 def ensure_hashes(self):
1130 # Make sure we recognise the format of the Files: field in the .changes
1131 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1132 if len(format) == 2:
1133 format = int(format[0]), int(format[1])
1135 format = int(float(format[0])), 0
1137 # We need to deal with the original changes blob, as the fields we need
1138 # might not be in the changes dict serialised into the .dak anymore.
1139 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1141 # Copy the checksums over to the current changes dict. This will keep
1142 # the existing modifications to it intact.
1143 for field in orig_changes:
1144 if field.startswith('checksums-'):
1145 self.pkg.changes[field] = orig_changes[field]
1147 # Check for unsupported hashes
1148 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1149 self.rejects.append(j)
1151 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1152 self.rejects.append(j)
1154 # We have to calculate the hash if we have an earlier changes version than
1155 # the hash appears in rather than require it exist in the changes file
1156 for hashname, hashfunc, version in utils.known_hashes:
1157 # TODO: Move _ensure_changes_hash into this class
1158 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1159 self.rejects.append(j)
1160 if "source" in self.pkg.changes["architecture"]:
1161 # TODO: Move _ensure_dsc_hash into this class
1162 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1163 self.rejects.append(j)
1165 def check_hashes(self):
1166 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1167 self.rejects.append(m)
1169 for m in utils.check_size(".changes", self.pkg.files):
1170 self.rejects.append(m)
1172 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1173 self.rejects.append(m)
1175 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1176 self.rejects.append(m)
1178 self.ensure_hashes()
1180 ###########################################################################
1182 def ensure_orig(self, target_dir='.', session=None):
1184 Ensures that all orig files mentioned in the changes file are present
1185 in target_dir. If they do not exist, they are symlinked into place.
1187 An list containing the symlinks that were created are returned (so they
1194 for filename, entry in self.pkg.dsc_files.iteritems():
1195 if not re_is_orig_source.match(filename):
1196 # File is not an orig; ignore
1199 if os.path.exists(filename):
1200 # File exists, no need to continue
1203 def symlink_if_valid(path):
1204 f = utils.open_file(path)
1205 md5sum = apt_pkg.md5sum(f)
1208 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1209 expected = (int(entry['size']), entry['md5sum'])
1211 if fingerprint != expected:
1214 dest = os.path.join(target_dir, filename)
1216 os.symlink(path, dest)
1217 symlinked.append(dest)
1223 session_ = DBConn().session()
1228 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1229 poolfile_path = os.path.join(
1230 poolfile.location.path, poolfile.filename
1233 if symlink_if_valid(poolfile_path):
1243 # Look in some other queues for the file
1244 queues = ('New', 'Byhand', 'ProposedUpdates',
1245 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1247 for queue in queues:
1248 if not cnf.get('Dir::Queue::%s' % queue):
1251 queuefile_path = os.path.join(
1252 cnf['Dir::Queue::%s' % queue], filename
1255 if not os.path.exists(queuefile_path):
1256 # Does not exist in this queue
1259 if symlink_if_valid(queuefile_path):
1264 ###########################################################################
1266 def check_lintian(self):
1268 Extends self.rejects by checking the output of lintian against tags
1269 specified in Dinstall::LintianTags.
1274 # Don't reject binary uploads
1275 if not self.pkg.changes['architecture'].has_key('source'):
1278 # Only check some distributions
1279 for dist in ('unstable', 'experimental'):
1280 if dist in self.pkg.changes['distribution']:
1285 # If we do not have a tagfile, don't do anything
1286 tagfile = cnf.get("Dinstall::LintianTags")
1290 # Parse the yaml file
1291 sourcefile = file(tagfile, 'r')
1292 sourcecontent = sourcefile.read()
1296 lintiantags = yaml.load(sourcecontent)['lintian']
1297 except yaml.YAMLError, msg:
1298 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1301 # Try and find all orig mentioned in the .dsc
1302 symlinked = self.ensure_orig()
1304 # Setup the input file for lintian
1305 fd, temp_filename = utils.temp_filename()
1306 temptagfile = os.fdopen(fd, 'w')
1307 for tags in lintiantags.values():
1308 temptagfile.writelines(['%s\n' % x for x in tags])
1312 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1313 (temp_filename, self.pkg.changes_file)
1315 result, output = commands.getstatusoutput(cmd)
1317 # Remove our tempfile and any symlinks we created
1318 os.unlink(temp_filename)
1320 for symlink in symlinked:
1324 utils.warn("lintian failed for %s [return code: %s]." % \
1325 (self.pkg.changes_file, result))
1326 utils.warn(utils.prefix_multi_line_string(output, \
1327 " [possible output:] "))
1332 [self.pkg.changes_file, "check_lintian"] + list(txt)
1336 parsed_tags = parse_lintian_output(output)
1337 self.rejects.extend(
1338 generate_reject_messages(parsed_tags, lintiantags, log=log)
1341 ###########################################################################
1342 def check_urgency(self):
1344 if self.pkg.changes["architecture"].has_key("source"):
1345 if not self.pkg.changes.has_key("urgency"):
1346 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1347 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1348 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1349 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1350 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1351 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1353 ###########################################################################
1355 # Sanity check the time stamps of files inside debs.
1356 # [Files in the near future cause ugly warnings and extreme time
1357 # travel can cause errors on extraction]
1359 def check_timestamps(self):
1362 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1363 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1364 tar = TarTime(future_cutoff, past_cutoff)
1366 for filename, entry in self.pkg.files.items():
1367 if entry["type"] == "deb":
1370 deb_file = utils.open_file(filename)
1371 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1374 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1375 except SystemError, e:
1376 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1377 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1380 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1384 future_files = tar.future_files.keys()
1386 num_future_files = len(future_files)
1387 future_file = future_files[0]
1388 future_date = tar.future_files[future_file]
1389 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1390 % (filename, num_future_files, future_file, time.ctime(future_date)))
1392 ancient_files = tar.ancient_files.keys()
1394 num_ancient_files = len(ancient_files)
1395 ancient_file = ancient_files[0]
1396 ancient_date = tar.ancient_files[ancient_file]
1397 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1398 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1400 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1402 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1403 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1405 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1411 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1412 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1413 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1414 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1415 self.pkg.changes["sponsoremail"] = uid_email
1420 ###########################################################################
1421 # check_signed_by_key checks
1422 ###########################################################################
1424 def check_signed_by_key(self):
1425 """Ensure the .changes is signed by an authorized uploader."""
1426 session = DBConn().session()
1428 # First of all we check that the person has proper upload permissions
1429 # and that this upload isn't blocked
1430 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1433 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1436 # TODO: Check that import-keyring adds UIDs properly
1438 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1441 # Check that the fingerprint which uploaded has permission to do so
1442 self.check_upload_permissions(fpr, session)
1444 # Check that this package is not in a transition
1445 self.check_transition(session)
1450 def check_upload_permissions(self, fpr, session):
1451 # Check any one-off upload blocks
1452 self.check_upload_blocks(fpr, session)
1454 # Start with DM as a special case
1455 # DM is a special case unfortunately, so we check it first
1456 # (keys with no source access get more access than DMs in one
1457 # way; DMs can only upload for their packages whether source
1458 # or binary, whereas keys with no access might be able to
1459 # upload some binaries)
1460 if fpr.source_acl.access_level == 'dm':
1461 self.check_dm_upload(fpr, session)
1463 # Check source-based permissions for other types
1464 if self.pkg.changes["architecture"].has_key("source") and \
1465 fpr.source_acl.access_level is None:
1466 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1467 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1468 self.rejects.append(rej)
1470 # If not a DM, we allow full upload rights
1471 uid_email = "%s@debian.org" % (fpr.uid.uid)
1472 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1475 # Check binary upload permissions
1476 # By this point we know that DMs can't have got here unless they
1477 # are allowed to deal with the package concerned so just apply
1479 if fpr.binary_acl.access_level == 'full':
1482 # Otherwise we're in the map case
1483 tmparches = self.pkg.changes["architecture"].copy()
1484 tmparches.pop('source', None)
1486 for bam in fpr.binary_acl_map:
1487 tmparches.pop(bam.architecture.arch_string, None)
1489 if len(tmparches.keys()) > 0:
1490 if fpr.binary_reject:
1491 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1492 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1493 self.rejects.append(rej)
1495 # TODO: This is where we'll implement reject vs throw away binaries later
1496 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1497 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1498 rej += "\nFingerprint: %s", (fpr.fingerprint)
1499 self.rejects.append(rej)
1502 def check_upload_blocks(self, fpr, session):
1503 """Check whether any upload blocks apply to this source, source
1504 version, uid / fpr combination"""
1506 def block_rej_template(fb):
1507 rej = 'Manual upload block in place for package %s' % fb.source
1508 if fb.version is not None:
1509 rej += ', version %s' % fb.version
1512 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1513 # version is None if the block applies to all versions
1514 if fb.version is None or fb.version == self.pkg.changes['version']:
1515 # Check both fpr and uid - either is enough to cause a reject
1516 if fb.fpr is not None:
1517 if fb.fpr.fingerprint == fpr.fingerprint:
1518 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1519 if fb.uid is not None:
1520 if fb.uid == fpr.uid:
1521 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1524 def check_dm_upload(self, fpr, session):
1525 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1526 ## none of the uploaded packages are NEW
1528 for f in self.pkg.files.keys():
1529 if self.pkg.files[f].has_key("byhand"):
1530 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1532 if self.pkg.files[f].has_key("new"):
1533 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1539 ## the most recent version of the package uploaded to unstable or
1540 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1541 ## section of its control file
1542 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1543 q = q.join(SrcAssociation)
1544 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1545 q = q.order_by(desc('source.version')).limit(1)
1550 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1551 self.rejects.append(rej)
1555 if not r.dm_upload_allowed:
1556 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1557 self.rejects.append(rej)
1560 ## the Maintainer: field of the uploaded .changes file corresponds with
1561 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1563 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1564 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1566 ## the most recent version of the package uploaded to unstable or
1567 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1568 ## non-developer maintainers cannot NMU or hijack packages)
1570 # srcuploaders includes the maintainer
1572 for sup in r.srcuploaders:
1573 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1574 # Eww - I hope we never have two people with the same name in Debian
1575 if email == fpr.uid.uid or name == fpr.uid.name:
1580 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1583 ## none of the packages are being taken over from other source packages
1584 for b in self.pkg.changes["binary"].keys():
1585 for suite in self.pkg.changes["distribution"].keys():
1586 q = session.query(DBSource)
1587 q = q.join(DBBinary).filter_by(package=b)
1588 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1591 if s.source != self.pkg.changes["source"]:
1592 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1596 def check_transition(self, session):
1599 sourcepkg = self.pkg.changes["source"]
1601 # No sourceful upload -> no need to do anything else, direct return
1602 # We also work with unstable uploads, not experimental or those going to some
1603 # proposed-updates queue
1604 if "source" not in self.pkg.changes["architecture"] or \
1605 "unstable" not in self.pkg.changes["distribution"]:
1608 # Also only check if there is a file defined (and existant) with
1610 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1611 if transpath == "" or not os.path.exists(transpath):
1614 # Parse the yaml file
1615 sourcefile = file(transpath, 'r')
1616 sourcecontent = sourcefile.read()
1618 transitions = yaml.load(sourcecontent)
1619 except yaml.YAMLError, msg:
1620 # This shouldn't happen, there is a wrapper to edit the file which
1621 # checks it, but we prefer to be safe than ending up rejecting
1623 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1626 # Now look through all defined transitions
1627 for trans in transitions:
1628 t = transitions[trans]
1629 source = t["source"]
1632 # Will be None if nothing is in testing.
1633 current = get_source_in_suite(source, "testing", session)
1634 if current is not None:
1635 compare = apt_pkg.VersionCompare(current.version, expected)
1637 if current is None or compare < 0:
1638 # This is still valid, the current version in testing is older than
1639 # the new version we wait for, or there is none in testing yet
1641 # Check if the source we look at is affected by this.
1642 if sourcepkg in t['packages']:
1643 # The source is affected, lets reject it.
1645 rejectmsg = "%s: part of the %s transition.\n\n" % (
1648 if current is not None:
1649 currentlymsg = "at version %s" % (current.version)
1651 currentlymsg = "not present in testing"
1653 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1655 rejectmsg += "\n".join(textwrap.wrap("""Your package
1656 is part of a testing transition designed to get %s migrated (it is
1657 currently %s, we need version %s). This transition is managed by the
1658 Release Team, and %s is the Release-Team member responsible for it.
1659 Please mail debian-release@lists.debian.org or contact %s directly if you
1660 need further assistance. You might want to upload to experimental until this
1661 transition is done."""
1662 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1664 self.rejects.append(rejectmsg)
1667 ###########################################################################
1668 # End check_signed_by_key checks
1669 ###########################################################################
1671 def build_summaries(self):
1672 """ Build a summary of changes the upload introduces. """
1674 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1676 short_summary = summary
1678 # This is for direport's benefit...
1679 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1682 summary += "Changes: " + f
1684 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1686 summary += self.announce(short_summary, 0)
1688 return (summary, short_summary)
1690 ###########################################################################
1692 def close_bugs(self, summary, action):
1694 Send mail to close bugs as instructed by the closes field in the changes file.
1695 Also add a line to summary if any work was done.
1697 @type summary: string
1698 @param summary: summary text, as given by L{build_summaries}
1701 @param action: Set to false no real action will be done.
1704 @return: summary. If action was taken, extended by the list of closed bugs.
1708 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1710 bugs = self.pkg.changes["closes"].keys()
1716 summary += "Closing bugs: "
1718 summary += "%s " % (bug)
1721 self.Subst["__BUG_NUMBER__"] = bug
1722 if self.pkg.changes["distribution"].has_key("stable"):
1723 self.Subst["__STABLE_WARNING__"] = """
1724 Note that this package is not part of the released stable Debian
1725 distribution. It may have dependencies on other unreleased software,
1726 or other instabilities. Please take care if you wish to install it.
1727 The update will eventually make its way into the next released Debian
1730 self.Subst["__STABLE_WARNING__"] = ""
1731 mail_message = utils.TemplateSubst(self.Subst, template)
1732 utils.send_mail(mail_message)
1734 # Clear up after ourselves
1735 del self.Subst["__BUG_NUMBER__"]
1736 del self.Subst["__STABLE_WARNING__"]
1738 if action and self.logger:
1739 self.logger.log(["closing bugs"] + bugs)
1745 ###########################################################################
1747 def announce(self, short_summary, action):
1749 Send an announce mail about a new upload.
1751 @type short_summary: string
1752 @param short_summary: Short summary text to include in the mail
1755 @param action: Set to false no real action will be done.
1758 @return: Textstring about action taken.
1763 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1765 # Only do announcements for source uploads with a recent dpkg-dev installed
1766 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1767 self.pkg.changes["architecture"].has_key("source"):
1773 self.Subst["__SHORT_SUMMARY__"] = short_summary
1775 for dist in self.pkg.changes["distribution"].keys():
1776 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1777 if announce_list == "" or lists_done.has_key(announce_list):
1780 lists_done[announce_list] = 1
1781 summary += "Announcing to %s\n" % (announce_list)
1785 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1786 if cnf.get("Dinstall::TrackingServer") and \
1787 self.pkg.changes["architecture"].has_key("source"):
1788 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1789 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1791 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1792 utils.send_mail(mail_message)
1794 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1796 if cnf.FindB("Dinstall::CloseBugs"):
1797 summary = self.close_bugs(summary, action)
1799 del self.Subst["__SHORT_SUMMARY__"]
1803 ###########################################################################
1805 def accept (self, summary, short_summary, session=None):
1809 This moves all files referenced from the .changes into the pool,
1810 sends the accepted mail, announces to lists, closes bugs and
1811 also checks for override disparities. If enabled it will write out
1812 the version history for the BTS Version Tracking and will finally call
1815 @type summary: string
1816 @param summary: Summary text
1818 @type short_summary: string
1819 @param short_summary: Short summary
1823 stats = SummaryStats()
1826 self.logger.log(["installing changes", self.pkg.changes_file])
1830 # Add the .dsc file to the DB first
1831 for newfile, entry in self.pkg.files.items():
1832 if entry["type"] == "dsc":
1833 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1837 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1838 for newfile, entry in self.pkg.files.items():
1839 if entry["type"] == "deb":
1840 poolfiles.append(add_deb_to_db(self, newfile, session))
1842 # If this is a sourceful diff only upload that is moving
1843 # cross-component we need to copy the .orig files into the new
1844 # component too for the same reasons as above.
1845 # XXX: mhy: I think this should be in add_dsc_to_db
1846 if self.pkg.changes["architecture"].has_key("source"):
1847 for orig_file in self.pkg.orig_files.keys():
1848 if not self.pkg.orig_files[orig_file].has_key("id"):
1849 continue # Skip if it's not in the pool
1850 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1851 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1852 continue # Skip if the location didn't change
1855 oldf = get_poolfile_by_id(orig_file_id, session)
1856 old_filename = os.path.join(oldf.location.path, oldf.filename)
1857 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1858 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1860 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1862 # TODO: Care about size/md5sum collisions etc
1863 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1865 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1867 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1868 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1872 # Don't reference the old file from this changes
1874 if p.file_id == oldf.file_id:
1877 poolfiles.append(newf)
1879 # Fix up the DSC references
1882 for df in source.srcfiles:
1883 if df.poolfile.file_id == oldf.file_id:
1884 # Add a new DSC entry and mark the old one for deletion
1885 # Don't do it in the loop so we don't change the thing we're iterating over
1887 newdscf.source_id = source.source_id
1888 newdscf.poolfile_id = newf.file_id
1889 session.add(newdscf)
1899 # Make sure that our source object is up-to-date
1900 session.expire(source)
1902 # Add changelog information to the database
1903 self.store_changelog()
1905 # Install the files into the pool
1906 for newfile, entry in self.pkg.files.items():
1907 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1908 utils.move(newfile, destination)
1909 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1910 stats.accept_bytes += float(entry["size"])
1912 # Copy the .changes file across for suite which need it.
1914 for suite_name in self.pkg.changes["distribution"].keys():
1915 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1916 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1918 for dest in copy_changes.keys():
1919 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1921 # We're done - commit the database changes
1923 # Our SQL session will automatically start a new transaction after
1926 # Move the .changes into the 'done' directory
1927 utils.move(self.pkg.changes_file,
1928 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1930 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1931 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1934 self.Subst["__SUITE__"] = ""
1935 self.Subst["__SUMMARY__"] = summary
1936 mail_message = utils.TemplateSubst(self.Subst,
1937 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1938 utils.send_mail(mail_message)
1939 self.announce(short_summary, 1)
1941 ## Helper stuff for DebBugs Version Tracking
1942 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1943 if self.pkg.changes["architecture"].has_key("source"):
1944 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1945 version_history = os.fdopen(fd, 'w')
1946 version_history.write(self.pkg.dsc["bts changelog"])
1947 version_history.close()
1948 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1949 self.pkg.changes_file[:-8]+".versions")
1950 os.rename(temp_filename, filename)
1951 os.chmod(filename, 0644)
1953 # Write out the binary -> source mapping.
1954 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1955 debinfo = os.fdopen(fd, 'w')
1956 for name, entry in sorted(self.pkg.files.items()):
1957 if entry["type"] == "deb":
1958 line = " ".join([entry["package"], entry["version"],
1959 entry["architecture"], entry["source package"],
1960 entry["source version"]])
1961 debinfo.write(line+"\n")
1963 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1964 self.pkg.changes_file[:-8]+".debinfo")
1965 os.rename(temp_filename, filename)
1966 os.chmod(filename, 0644)
1970 # Set up our copy queues (e.g. buildd queues)
1971 for suite_name in self.pkg.changes["distribution"].keys():
1972 suite = get_suite(suite_name, session)
1973 for q in suite.copy_queues:
1975 q.add_file_from_pool(f)
1980 stats.accept_count += 1
1982 def check_override(self):
1984 Checks override entries for validity. Mails "Override disparity" warnings,
1985 if that feature is enabled.
1987 Abandons the check if
1988 - override disparity checks are disabled
1989 - mail sending is disabled
1994 # Abandon the check if override disparity checks have been disabled
1995 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
1998 summary = self.pkg.check_override()
2003 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2006 self.Subst["__SUMMARY__"] = summary
2007 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2008 utils.send_mail(mail_message)
2009 del self.Subst["__SUMMARY__"]
2011 ###########################################################################
2013 def remove(self, from_dir=None):
2015 Used (for instance) in p-u to remove the package from unchecked
2017 Also removes the package from holding area.
2019 if from_dir is None:
2020 from_dir = self.pkg.directory
2023 for f in self.pkg.files.keys():
2024 os.unlink(os.path.join(from_dir, f))
2025 if os.path.exists(os.path.join(h.holding_dir, f)):
2026 os.unlink(os.path.join(h.holding_dir, f))
2028 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2029 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2030 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2032 ###########################################################################
2034 def move_to_queue (self, queue):
2036 Move files to a destination queue using the permissions in the table
2039 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2040 queue.path, perms=int(queue.change_perms, 8))
2041 for f in self.pkg.files.keys():
2042 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2044 ###########################################################################
2046 def force_reject(self, reject_files):
2048 Forcefully move files from the current directory to the
2049 reject directory. If any file already exists in the reject
2050 directory it will be moved to the morgue to make way for
2053 @type reject_files: dict
2054 @param reject_files: file dictionary
2060 for file_entry in reject_files:
2061 # Skip any files which don't exist or which we don't have permission to copy.
2062 if os.access(file_entry, os.R_OK) == 0:
2065 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2068 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2070 # File exists? Let's find a new name by adding a number
2071 if e.errno == errno.EEXIST:
2073 dest_file = utils.find_next_free(dest_file, 255)
2074 except NoFreeFilenameError:
2075 # Something's either gone badly Pete Tong, or
2076 # someone is trying to exploit us.
2077 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2080 # Make sure we really got it
2082 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2085 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2089 # If we got here, we own the destination file, so we can
2090 # safely overwrite it.
2091 utils.move(file_entry, dest_file, 1, perms=0660)
2094 ###########################################################################
2095 def do_reject (self, manual=0, reject_message="", notes=""):
2097 Reject an upload. If called without a reject message or C{manual} is
2098 true, spawn an editor so the user can write one.
2101 @param manual: manual or automated rejection
2103 @type reject_message: string
2104 @param reject_message: A reject message
2109 # If we weren't given a manual rejection message, spawn an
2110 # editor so the user can add one in...
2111 if manual and not reject_message:
2112 (fd, temp_filename) = utils.temp_filename()
2113 temp_file = os.fdopen(fd, 'w')
2116 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2117 % (note.author, note.version, note.notedate, note.comment))
2119 editor = os.environ.get("EDITOR","vi")
2121 while answer == 'E':
2122 os.system("%s %s" % (editor, temp_filename))
2123 temp_fh = utils.open_file(temp_filename)
2124 reject_message = "".join(temp_fh.readlines())
2126 print "Reject message:"
2127 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2128 prompt = "[R]eject, Edit, Abandon, Quit ?"
2130 while prompt.find(answer) == -1:
2131 answer = utils.our_raw_input(prompt)
2132 m = re_default_answer.search(prompt)
2135 answer = answer[:1].upper()
2136 os.unlink(temp_filename)
2142 print "Rejecting.\n"
2146 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2147 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2149 # Move all the files into the reject directory
2150 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2151 self.force_reject(reject_files)
2153 # If we fail here someone is probably trying to exploit the race
2154 # so let's just raise an exception ...
2155 if os.path.exists(reason_filename):
2156 os.unlink(reason_filename)
2157 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2159 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2163 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2164 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2165 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2166 os.write(reason_fd, reject_message)
2167 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2169 # Build up the rejection email
2170 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2171 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2172 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2173 self.Subst["__REJECT_MESSAGE__"] = ""
2174 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2175 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2176 # Write the rejection email out as the <foo>.reason file
2177 os.write(reason_fd, reject_mail_message)
2179 del self.Subst["__REJECTOR_ADDRESS__"]
2180 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2181 del self.Subst["__CC__"]
2185 # Send the rejection mail
2186 utils.send_mail(reject_mail_message)
2189 self.logger.log(["rejected", self.pkg.changes_file])
2193 ################################################################################
2194 def in_override_p(self, package, component, suite, binary_type, filename, session):
2196 Check if a package already has override entries in the DB
2198 @type package: string
2199 @param package: package name
2201 @type component: string
2202 @param component: database id of the component
2205 @param suite: database id of the suite
2207 @type binary_type: string
2208 @param binary_type: type of the package
2210 @type filename: string
2211 @param filename: filename we check
2213 @return: the database result. But noone cares anyway.
2219 if binary_type == "": # must be source
2222 file_type = binary_type
2224 # Override suite name; used for example with proposed-updates
2225 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2226 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2228 result = get_override(package, suite, component, file_type, session)
2230 # If checking for a source package fall back on the binary override type
2231 if file_type == "dsc" and len(result) < 1:
2232 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2234 # Remember the section and priority so we can check them later if appropriate
2237 self.pkg.files[filename]["override section"] = result.section.section
2238 self.pkg.files[filename]["override priority"] = result.priority.priority
2243 ################################################################################
2244 def get_anyversion(self, sv_list, suite):
2247 @param sv_list: list of (suite, version) tuples to check
2250 @param suite: suite name
2256 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2257 for (s, v) in sv_list:
2258 if s in [ x.lower() for x in anysuite ]:
2259 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2264 ################################################################################
2266 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2269 @param sv_list: list of (suite, version) tuples to check
2271 @type filename: string
2272 @param filename: XXX
2274 @type new_version: string
2275 @param new_version: XXX
2277 Ensure versions are newer than existing packages in target
2278 suites and that cross-suite version checking rules as
2279 set out in the conf file are satisfied.
2284 # Check versions for each target suite
2285 for target_suite in self.pkg.changes["distribution"].keys():
2286 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2287 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2289 # Enforce "must be newer than target suite" even if conffile omits it
2290 if target_suite not in must_be_newer_than:
2291 must_be_newer_than.append(target_suite)
2293 for (suite, existent_version) in sv_list:
2294 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2296 if suite in must_be_newer_than and sourceful and vercmp < 1:
2297 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2299 if suite in must_be_older_than and vercmp > -1:
2302 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2303 # we really use the other suite, ignoring the conflicting one ...
2304 addsuite = self.pkg.changes["distribution-version"][suite]
2306 add_version = self.get_anyversion(sv_list, addsuite)
2307 target_version = self.get_anyversion(sv_list, target_suite)
2310 # not add_version can only happen if we map to a suite
2311 # that doesn't enhance the suite we're propup'ing from.
2312 # so "propup-ver x a b c; map a d" is a problem only if
2313 # d doesn't enhance a.
2315 # i think we could always propagate in this case, rather
2316 # than complaining. either way, this isn't a REJECT issue
2318 # And - we really should complain to the dorks who configured dak
2319 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2320 self.pkg.changes.setdefault("propdistribution", {})
2321 self.pkg.changes["propdistribution"][addsuite] = 1
2323 elif not target_version:
2324 # not targets_version is true when the package is NEW
2325 # we could just stick with the "...old version..." REJECT
2326 # for this, I think.
2327 self.rejects.append("Won't propogate NEW packages.")
2328 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2329 # propogation would be redundant. no need to reject though.
2330 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2332 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2333 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2335 self.warnings.append("Propogating upload to %s" % (addsuite))
2336 self.pkg.changes.setdefault("propdistribution", {})
2337 self.pkg.changes["propdistribution"][addsuite] = 1
2341 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2343 ################################################################################
2344 def check_binary_against_db(self, filename, session):
2345 # Ensure version is sane
2346 q = session.query(BinAssociation)
2347 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2348 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2350 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2351 filename, self.pkg.files[filename]["version"], sourceful=False)
2353 # Check for any existing copies of the file
2354 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2355 q = q.filter_by(version=self.pkg.files[filename]["version"])
2356 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2359 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2361 ################################################################################
2363 def check_source_against_db(self, filename, session):
2364 source = self.pkg.dsc.get("source")
2365 version = self.pkg.dsc.get("version")
2367 # Ensure version is sane
2368 q = session.query(SrcAssociation)
2369 q = q.join(DBSource).filter(DBSource.source==source)
2371 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2372 filename, version, sourceful=True)
2374 ################################################################################
2375 def check_dsc_against_db(self, filename, session):
2378 @warning: NB: this function can remove entries from the 'files' index [if
2379 the orig tarball is a duplicate of the one in the archive]; if
2380 you're iterating over 'files' and call this function as part of
2381 the loop, be sure to add a check to the top of the loop to
2382 ensure you haven't just tried to dereference the deleted entry.
2387 self.pkg.orig_files = {} # XXX: do we need to clear it?
2388 orig_files = self.pkg.orig_files
2390 # Try and find all files mentioned in the .dsc. This has
2391 # to work harder to cope with the multiple possible
2392 # locations of an .orig.tar.gz.
2393 # The ordering on the select is needed to pick the newest orig
2394 # when it exists in multiple places.
2395 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2397 if self.pkg.files.has_key(dsc_name):
2398 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2399 actual_size = int(self.pkg.files[dsc_name]["size"])
2400 found = "%s in incoming" % (dsc_name)
2402 # Check the file does not already exist in the archive
2403 ql = get_poolfile_like_name(dsc_name, session)
2405 # Strip out anything that isn't '%s' or '/%s$'
2407 if not i.filename.endswith(dsc_name):
2410 # "[dak] has not broken them. [dak] has fixed a
2411 # brokenness. Your crappy hack exploited a bug in
2414 # "(Come on! I thought it was always obvious that
2415 # one just doesn't release different files with
2416 # the same name and version.)"
2417 # -- ajk@ on d-devel@l.d.o
2420 # Ignore exact matches for .orig.tar.gz
2422 if re_is_orig_source.match(dsc_name):
2424 if self.pkg.files.has_key(dsc_name) and \
2425 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2426 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2427 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2428 # TODO: Don't delete the entry, just mark it as not needed
2429 # This would fix the stupidity of changing something we often iterate over
2430 # whilst we're doing it
2431 del self.pkg.files[dsc_name]
2432 dsc_entry["files id"] = i.file_id
2433 if not orig_files.has_key(dsc_name):
2434 orig_files[dsc_name] = {}
2435 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2438 # Don't bitch that we couldn't find this file later
2440 self.later_check_files.remove(dsc_name)
2446 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2448 elif re_is_orig_source.match(dsc_name):
2450 ql = get_poolfile_like_name(dsc_name, session)
2452 # Strip out anything that isn't '%s' or '/%s$'
2453 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2455 if not i.filename.endswith(dsc_name):
2459 # Unfortunately, we may get more than one match here if,
2460 # for example, the package was in potato but had an -sa
2461 # upload in woody. So we need to choose the right one.
2463 # default to something sane in case we don't match any or have only one
2468 old_file = os.path.join(i.location.path, i.filename)
2469 old_file_fh = utils.open_file(old_file)
2470 actual_md5 = apt_pkg.md5sum(old_file_fh)
2472 actual_size = os.stat(old_file)[stat.ST_SIZE]
2473 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2476 old_file = os.path.join(i.location.path, i.filename)
2477 old_file_fh = utils.open_file(old_file)
2478 actual_md5 = apt_pkg.md5sum(old_file_fh)
2480 actual_size = os.stat(old_file)[stat.ST_SIZE]
2482 suite_type = x.location.archive_type
2483 # need this for updating dsc_files in install()
2484 dsc_entry["files id"] = x.file_id
2485 # See install() in process-accepted...
2486 if not orig_files.has_key(dsc_name):
2487 orig_files[dsc_name] = {}
2488 orig_files[dsc_name]["id"] = x.file_id
2489 orig_files[dsc_name]["path"] = old_file
2490 orig_files[dsc_name]["location"] = x.location.location_id
2492 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2493 # Not there? Check the queue directories...
2494 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2495 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2497 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2498 if os.path.exists(in_otherdir):
2499 in_otherdir_fh = utils.open_file(in_otherdir)
2500 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2501 in_otherdir_fh.close()
2502 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2504 if not orig_files.has_key(dsc_name):
2505 orig_files[dsc_name] = {}
2506 orig_files[dsc_name]["path"] = in_otherdir
2509 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2512 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2514 if actual_md5 != dsc_entry["md5sum"]:
2515 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2516 if actual_size != int(dsc_entry["size"]):
2517 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2519 ################################################################################
2520 # This is used by process-new and process-holding to recheck a changes file
2521 # at the time we're running. It mainly wraps various other internal functions
2522 # and is similar to accepted_checks - these should probably be tidied up
2524 def recheck(self, session):
2526 for f in self.pkg.files.keys():
2527 # The .orig.tar.gz can disappear out from under us is it's a
2528 # duplicate of one in the archive.
2529 if not self.pkg.files.has_key(f):
2532 entry = self.pkg.files[f]
2534 # Check that the source still exists
2535 if entry["type"] == "deb":
2536 source_version = entry["source version"]
2537 source_package = entry["source package"]
2538 if not self.pkg.changes["architecture"].has_key("source") \
2539 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2540 source_epochless_version = re_no_epoch.sub('', source_version)
2541 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2543 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2544 if cnf.has_key("Dir::Queue::%s" % (q)):
2545 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2548 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2550 # Version and file overwrite checks
2551 if entry["type"] == "deb":
2552 self.check_binary_against_db(f, session)
2553 elif entry["type"] == "dsc":
2554 self.check_source_against_db(f, session)
2555 self.check_dsc_against_db(f, session)
2557 ################################################################################
2558 def accepted_checks(self, overwrite_checks, session):
2559 # Recheck anything that relies on the database; since that's not
2560 # frozen between accept and our run time when called from p-a.
2562 # overwrite_checks is set to False when installing to stable/oldstable
2567 # Find the .dsc (again)
2569 for f in self.pkg.files.keys():
2570 if self.pkg.files[f]["type"] == "dsc":
2573 for checkfile in self.pkg.files.keys():
2574 # The .orig.tar.gz can disappear out from under us is it's a
2575 # duplicate of one in the archive.
2576 if not self.pkg.files.has_key(checkfile):
2579 entry = self.pkg.files[checkfile]
2581 # Check that the source still exists
2582 if entry["type"] == "deb":
2583 source_version = entry["source version"]
2584 source_package = entry["source package"]
2585 if not self.pkg.changes["architecture"].has_key("source") \
2586 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2587 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2589 # Version and file overwrite checks
2590 if overwrite_checks:
2591 if entry["type"] == "deb":
2592 self.check_binary_against_db(checkfile, session)
2593 elif entry["type"] == "dsc":
2594 self.check_source_against_db(checkfile, session)
2595 self.check_dsc_against_db(dsc_filename, session)
2597 # propogate in the case it is in the override tables:
2598 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2599 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2600 propogate[suite] = 1
2602 nopropogate[suite] = 1
2604 for suite in propogate.keys():
2605 if suite in nopropogate:
2607 self.pkg.changes["distribution"][suite] = 1
2609 for checkfile in self.pkg.files.keys():
2610 # Check the package is still in the override tables
2611 for suite in self.pkg.changes["distribution"].keys():
2612 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2613 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2615 ################################################################################
2616 # This is not really a reject, but an unaccept, but since a) the code for
2617 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2618 # extremely rare, for now we'll go with whining at our admin folks...
2620 def do_unaccept(self):
2624 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2625 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2626 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2627 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2628 if cnf.has_key("Dinstall::Bcc"):
2629 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2631 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2633 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2635 # Write the rejection email out as the <foo>.reason file
2636 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2637 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2639 # If we fail here someone is probably trying to exploit the race
2640 # so let's just raise an exception ...
2641 if os.path.exists(reject_filename):
2642 os.unlink(reject_filename)
2644 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2645 os.write(fd, reject_mail_message)
2648 utils.send_mail(reject_mail_message)
2650 del self.Subst["__REJECTOR_ADDRESS__"]
2651 del self.Subst["__REJECT_MESSAGE__"]
2652 del self.Subst["__CC__"]
2654 ################################################################################
2655 # If any file of an upload has a recent mtime then chances are good
2656 # the file is still being uploaded.
2658 def upload_too_new(self):
2661 # Move back to the original directory to get accurate time stamps
2663 os.chdir(self.pkg.directory)
2664 file_list = self.pkg.files.keys()
2665 file_list.extend(self.pkg.dsc_files.keys())
2666 file_list.append(self.pkg.changes_file)
2669 last_modified = time.time()-os.path.getmtime(f)
2670 if last_modified < int(cnf["Dinstall::SkipTime"]):
2679 def store_changelog(self):
2681 # Skip binary-only upload if it is not a bin-NMU
2682 if not self.pkg.changes['architecture'].has_key('source'):
2683 from daklib.regexes import re_bin_only_nmu
2684 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2687 session = DBConn().session()
2689 # Check if upload already has a changelog entry
2690 query = """SELECT changelog_id FROM changes WHERE source = :source
2691 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2692 if session.execute(query, {'source': self.pkg.changes['source'], \
2693 'version': self.pkg.changes['version'], \
2694 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2698 # Add current changelog text into changelogs_text table, return created ID
2699 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2700 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2702 # Link ID to the upload available in changes table
2703 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2704 AND version = :version AND architecture = :architecture"""
2705 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2706 'version': self.pkg.changes['version'], \
2707 'architecture': " ".join(self.pkg.changes['architecture'].keys())})