5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
81 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
83 # Validate the override type
84 type_id = get_override_type(file_type, session)
86 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
90 ################################################################################
92 # Determine what parts in a .changes are NEW
94 def determine_new(changes, files, warn=1):
96 Determine what parts in a C{changes} file are NEW.
98 @type changes: Upload.Pkg.changes dict
99 @param changes: Changes dictionary
101 @type files: Upload.Pkg.files dict
102 @param files: Files dictionary
105 @param warn: Warn if overrides are added for (old)stable
108 @return: dictionary of NEW components.
113 session = DBConn().session()
115 # Build up a list of potentially new things
116 for name, f in files.items():
117 # Skip byhand elements
118 # if f["type"] == "byhand":
121 priority = f["priority"]
122 section = f["section"]
123 file_type = get_type(f, session)
124 component = f["component"]
126 if file_type == "dsc":
129 if not new.has_key(pkg):
131 new[pkg]["priority"] = priority
132 new[pkg]["section"] = section
133 new[pkg]["type"] = file_type
134 new[pkg]["component"] = component
135 new[pkg]["files"] = []
137 old_type = new[pkg]["type"]
138 if old_type != file_type:
139 # source gets trumped by deb or udeb
140 if old_type == "dsc":
141 new[pkg]["priority"] = priority
142 new[pkg]["section"] = section
143 new[pkg]["type"] = file_type
144 new[pkg]["component"] = component
146 new[pkg]["files"].append(name)
148 if f.has_key("othercomponents"):
149 new[pkg]["othercomponents"] = f["othercomponents"]
151 for suite in changes["suite"].keys():
152 for pkg in new.keys():
153 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
155 for file_entry in new[pkg]["files"]:
156 if files[file_entry].has_key("new"):
157 del files[file_entry]["new"]
161 for s in ['stable', 'oldstable']:
162 if changes["suite"].has_key(s):
163 print "WARNING: overrides will be added for %s!" % s
164 for pkg in new.keys():
165 if new[pkg].has_key("othercomponents"):
166 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
172 ################################################################################
174 def check_valid(new):
176 Check if section and priority for NEW packages exist in database.
177 Additionally does sanity checks:
178 - debian-installer packages have to be udeb (or source)
179 - non debian-installer packages can not be udeb
180 - source priority can only be assigned to dsc file types
183 @param new: Dict of new packages with their section, priority and type.
186 for pkg in new.keys():
187 section_name = new[pkg]["section"]
188 priority_name = new[pkg]["priority"]
189 file_type = new[pkg]["type"]
191 section = get_section(section_name)
193 new[pkg]["section id"] = -1
195 new[pkg]["section id"] = section.section_id
197 priority = get_priority(priority_name)
199 new[pkg]["priority id"] = -1
201 new[pkg]["priority id"] = priority.priority_id
204 di = section_name.find("debian-installer") != -1
206 # If d-i, we must be udeb and vice-versa
207 if (di and file_type not in ("udeb", "dsc")) or \
208 (not di and file_type == "udeb"):
209 new[pkg]["section id"] = -1
211 # If dsc we need to be source and vice-versa
212 if (priority == "source" and file_type != "dsc") or \
213 (priority != "source" and file_type == "dsc"):
214 new[pkg]["priority id"] = -1
216 ###############################################################################
218 def check_status(files):
220 for f in files.keys():
221 if files[f].has_key("byhand"):
223 elif files[f].has_key("new"):
227 ###############################################################################
229 # Used by Upload.check_timestamps
230 class TarTime(object):
231 def __init__(self, future_cutoff, past_cutoff):
233 self.future_cutoff = future_cutoff
234 self.past_cutoff = past_cutoff
237 self.future_files = {}
238 self.ancient_files = {}
240 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
241 if MTime > self.future_cutoff:
242 self.future_files[Name] = MTime
243 if MTime < self.past_cutoff:
244 self.ancient_files[Name] = MTime
246 ###############################################################################
248 class Upload(object):
250 Everything that has to do with an upload processed.
258 ###########################################################################
261 """ Reset a number of internal variables."""
263 # Initialize the substitution template map
266 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
267 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
268 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
269 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
277 def package_info(self):
279 Format various messages from this Upload to send to the maintainer.
283 ('Reject Reasons', self.rejects),
284 ('Warnings', self.warnings),
285 ('Notes', self.notes),
289 for title, messages in msgs:
291 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
296 ###########################################################################
297 def update_subst(self):
298 """ Set up the per-package template substitution mappings """
302 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
303 if not self.pkg.changes.has_key("architecture") or not \
304 isinstance(self.pkg.changes["architecture"], dict):
305 self.pkg.changes["architecture"] = { "Unknown" : "" }
307 # and maintainer2047 may not exist.
308 if not self.pkg.changes.has_key("maintainer2047"):
309 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
311 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
312 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
313 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
315 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
316 if self.pkg.changes["architecture"].has_key("source") and \
317 self.pkg.changes["changedby822"] != "" and \
318 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
320 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
321 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
322 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
324 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
325 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
326 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
328 if "sponsoremail" in self.pkg.changes:
329 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
331 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
332 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
334 # Apply any global override of the Maintainer field
335 if cnf.get("Dinstall::OverrideMaintainer"):
336 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
337 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
339 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
340 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
341 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
343 ###########################################################################
344 def load_changes(self, filename):
347 @rvalue: whether the changes file was valid or not. We may want to
348 reject even if this is True (see what gets put in self.rejects).
349 This is simply to prevent us even trying things later which will
350 fail because we couldn't properly parse the file.
353 self.pkg.changes_file = filename
355 # Parse the .changes field into a dictionary
357 self.pkg.changes.update(parse_changes(filename))
358 except CantOpenError:
359 self.rejects.append("%s: can't read file." % (filename))
361 except ParseChangesError, line:
362 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
364 except ChangesUnicodeError:
365 self.rejects.append("%s: changes file not proper utf-8" % (filename))
368 # Parse the Files field from the .changes into another dictionary
370 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
371 except ParseChangesError, line:
372 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
374 except UnknownFormatError, format:
375 self.rejects.append("%s: unknown format '%s'." % (filename, format))
378 # Check for mandatory fields
379 for i in ("distribution", "source", "binary", "architecture",
380 "version", "maintainer", "files", "changes", "description"):
381 if not self.pkg.changes.has_key(i):
382 # Avoid undefined errors later
383 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
386 # Strip a source version in brackets from the source field
387 if re_strip_srcver.search(self.pkg.changes["source"]):
388 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
390 # Ensure the source field is a valid package name.
391 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
392 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
394 # Split multi-value fields into a lower-level dictionary
395 for i in ("architecture", "distribution", "binary", "closes"):
396 o = self.pkg.changes.get(i, "")
398 del self.pkg.changes[i]
400 self.pkg.changes[i] = {}
403 self.pkg.changes[i][j] = 1
405 # Fix the Maintainer: field to be RFC822/2047 compatible
407 (self.pkg.changes["maintainer822"],
408 self.pkg.changes["maintainer2047"],
409 self.pkg.changes["maintainername"],
410 self.pkg.changes["maintaineremail"]) = \
411 fix_maintainer (self.pkg.changes["maintainer"])
412 except ParseMaintError, msg:
413 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
414 % (filename, self.pkg.changes["maintainer"], msg))
416 # ...likewise for the Changed-By: field if it exists.
418 (self.pkg.changes["changedby822"],
419 self.pkg.changes["changedby2047"],
420 self.pkg.changes["changedbyname"],
421 self.pkg.changes["changedbyemail"]) = \
422 fix_maintainer (self.pkg.changes.get("changed-by", ""))
423 except ParseMaintError, msg:
424 self.pkg.changes["changedby822"] = ""
425 self.pkg.changes["changedby2047"] = ""
426 self.pkg.changes["changedbyname"] = ""
427 self.pkg.changes["changedbyemail"] = ""
429 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
430 % (filename, changes["changed-by"], msg))
432 # Ensure all the values in Closes: are numbers
433 if self.pkg.changes.has_key("closes"):
434 for i in self.pkg.changes["closes"].keys():
435 if re_isanum.match (i) == None:
436 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
438 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
439 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
440 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
442 # Check the .changes is non-empty
443 if not self.pkg.files:
444 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
447 # Changes was syntactically valid even if we'll reject
450 ###########################################################################
452 def check_distributions(self):
453 "Check and map the Distribution field"
457 # Handle suite mappings
458 for m in Cnf.ValueList("SuiteMappings"):
461 if mtype == "map" or mtype == "silent-map":
462 (source, dest) = args[1:3]
463 if self.pkg.changes["distribution"].has_key(source):
464 del self.pkg.changes["distribution"][source]
465 self.pkg.changes["distribution"][dest] = 1
466 if mtype != "silent-map":
467 self.notes.append("Mapping %s to %s." % (source, dest))
468 if self.pkg.changes.has_key("distribution-version"):
469 if self.pkg.changes["distribution-version"].has_key(source):
470 self.pkg.changes["distribution-version"][source]=dest
471 elif mtype == "map-unreleased":
472 (source, dest) = args[1:3]
473 if self.pkg.changes["distribution"].has_key(source):
474 for arch in self.pkg.changes["architecture"].keys():
475 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
476 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
477 del self.pkg.changes["distribution"][source]
478 self.pkg.changes["distribution"][dest] = 1
480 elif mtype == "ignore":
482 if self.pkg.changes["distribution"].has_key(suite):
483 del self.pkg.changes["distribution"][suite]
484 self.warnings.append("Ignoring %s as a target suite." % (suite))
485 elif mtype == "reject":
487 if self.pkg.changes["distribution"].has_key(suite):
488 self.rejects.append("Uploads to %s are not accepted." % (suite))
489 elif mtype == "propup-version":
490 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
492 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
493 if self.pkg.changes["distribution"].has_key(args[1]):
494 self.pkg.changes.setdefault("distribution-version", {})
495 for suite in args[2:]:
496 self.pkg.changes["distribution-version"][suite] = suite
498 # Ensure there is (still) a target distribution
499 if len(self.pkg.changes["distribution"].keys()) < 1:
500 self.rejects.append("No valid distribution remaining.")
502 # Ensure target distributions exist
503 for suite in self.pkg.changes["distribution"].keys():
504 if not Cnf.has_key("Suite::%s" % (suite)):
505 self.rejects.append("Unknown distribution `%s'." % (suite))
507 ###########################################################################
509 def binary_file_checks(self, f, session):
511 entry = self.pkg.files[f]
513 # Extract package control information
514 deb_file = utils.open_file(f)
516 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
518 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
520 # Can't continue, none of the checks on control would work.
523 # Check for mandantory "Description:"
526 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
528 self.rejects.append("%s: Missing Description in binary package" % (f))
533 # Check for mandatory fields
534 for field in [ "Package", "Architecture", "Version" ]:
535 if control.Find(field) == None:
537 self.rejects.append("%s: No %s field in control." % (f, field))
540 # Ensure the package name matches the one give in the .changes
541 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
542 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
544 # Validate the package field
545 package = control.Find("Package")
546 if not re_valid_pkg_name.match(package):
547 self.rejects.append("%s: invalid package name '%s'." % (f, package))
549 # Validate the version field
550 version = control.Find("Version")
551 if not re_valid_version.match(version):
552 self.rejects.append("%s: invalid version number '%s'." % (f, version))
554 # Ensure the architecture of the .deb is one we know about.
555 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
556 architecture = control.Find("Architecture")
557 upload_suite = self.pkg.changes["distribution"].keys()[0]
559 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
560 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
561 self.rejects.append("Unknown architecture '%s'." % (architecture))
563 # Ensure the architecture of the .deb is one of the ones
564 # listed in the .changes.
565 if not self.pkg.changes["architecture"].has_key(architecture):
566 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
568 # Sanity-check the Depends field
569 depends = control.Find("Depends")
571 self.rejects.append("%s: Depends field is empty." % (f))
573 # Sanity-check the Provides field
574 provides = control.Find("Provides")
576 provide = re_spacestrip.sub('', provides)
578 self.rejects.append("%s: Provides field is empty." % (f))
579 prov_list = provide.split(",")
580 for prov in prov_list:
581 if not re_valid_pkg_name.match(prov):
582 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
584 # Check the section & priority match those given in the .changes (non-fatal)
585 if control.Find("Section") and entry["section"] != "" \
586 and entry["section"] != control.Find("Section"):
587 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
588 (f, control.Find("Section", ""), entry["section"]))
589 if control.Find("Priority") and entry["priority"] != "" \
590 and entry["priority"] != control.Find("Priority"):
591 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
592 (f, control.Find("Priority", ""), entry["priority"]))
594 entry["package"] = package
595 entry["architecture"] = architecture
596 entry["version"] = version
597 entry["maintainer"] = control.Find("Maintainer", "")
599 if f.endswith(".udeb"):
600 self.pkg.files[f]["dbtype"] = "udeb"
601 elif f.endswith(".deb"):
602 self.pkg.files[f]["dbtype"] = "deb"
604 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
606 entry["source"] = control.Find("Source", entry["package"])
608 # Get the source version
609 source = entry["source"]
612 if source.find("(") != -1:
613 m = re_extract_src_version.match(source)
615 source_version = m.group(2)
617 if not source_version:
618 source_version = self.pkg.files[f]["version"]
620 entry["source package"] = source
621 entry["source version"] = source_version
623 # Ensure the filename matches the contents of the .deb
624 m = re_isadeb.match(f)
627 file_package = m.group(1)
628 if entry["package"] != file_package:
629 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
630 (f, file_package, entry["dbtype"], entry["package"]))
631 epochless_version = re_no_epoch.sub('', control.Find("Version"))
634 file_version = m.group(2)
635 if epochless_version != file_version:
636 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
637 (f, file_version, entry["dbtype"], epochless_version))
640 file_architecture = m.group(3)
641 if entry["architecture"] != file_architecture:
642 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
643 (f, file_architecture, entry["dbtype"], entry["architecture"]))
645 # Check for existent source
646 source_version = entry["source version"]
647 source_package = entry["source package"]
648 if self.pkg.changes["architecture"].has_key("source"):
649 if source_version != self.pkg.changes["version"]:
650 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
651 (source_version, f, self.pkg.changes["version"]))
653 # Check in the SQL database
654 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
655 # Check in one of the other directories
656 source_epochless_version = re_no_epoch.sub('', source_version)
657 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
658 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
660 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
663 dsc_file_exists = False
664 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
665 if cnf.has_key("Dir::Queue::%s" % (myq)):
666 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
667 dsc_file_exists = True
670 if not dsc_file_exists:
671 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
673 # Check the version and for file overwrites
674 self.check_binary_against_db(f, session)
676 # Temporarily disable contents generation until we change the table storage layout
679 #if len(b.rejects) > 0:
680 # for j in b.rejects:
681 # self.rejects.append(j)
683 def source_file_checks(self, f, session):
684 entry = self.pkg.files[f]
686 m = re_issource.match(f)
690 entry["package"] = m.group(1)
691 entry["version"] = m.group(2)
692 entry["type"] = m.group(3)
694 # Ensure the source package name matches the Source filed in the .changes
695 if self.pkg.changes["source"] != entry["package"]:
696 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
698 # Ensure the source version matches the version in the .changes file
699 if re_is_orig_source.match(f):
700 changes_version = self.pkg.changes["chopversion2"]
702 changes_version = self.pkg.changes["chopversion"]
704 if changes_version != entry["version"]:
705 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
707 # Ensure the .changes lists source in the Architecture field
708 if not self.pkg.changes["architecture"].has_key("source"):
709 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
711 # Check the signature of a .dsc file
712 if entry["type"] == "dsc":
713 # check_signature returns either:
714 # (None, [list, of, rejects]) or (signature, [])
715 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
717 self.rejects.append(j)
719 entry["architecture"] = "source"
721 def per_suite_file_checks(self, f, suite, session):
723 entry = self.pkg.files[f]
726 if entry.has_key("byhand"):
729 # Check we have fields we need to do these checks
731 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
732 if not entry.has_key(m):
733 self.rejects.append("file '%s' does not have field %s set" % (f, m))
739 # Handle component mappings
740 for m in cnf.ValueList("ComponentMappings"):
741 (source, dest) = m.split()
742 if entry["component"] == source:
743 entry["original component"] = source
744 entry["component"] = dest
746 # Ensure the component is valid for the target suite
747 if cnf.has_key("Suite:%s::Components" % (suite)) and \
748 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
749 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
752 # Validate the component
753 if not get_component(entry["component"], session):
754 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
757 # See if the package is NEW
758 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
761 # Validate the priority
762 if entry["priority"].find('/') != -1:
763 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
765 # Determine the location
766 location = cnf["Dir::Pool"]
767 l = get_location(location, entry["component"], session=session)
769 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
770 entry["location id"] = -1
772 entry["location id"] = l.location_id
774 # Check the md5sum & size against existing files (if any)
775 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
777 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
778 entry["size"], entry["md5sum"], entry["location id"])
781 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
782 elif found is False and poolfile is not None:
783 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
786 entry["files id"] = None
788 entry["files id"] = poolfile.file_id
790 # Check for packages that have moved from one component to another
791 entry['suite'] = suite
792 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
794 entry["othercomponents"] = res.fetchone()[0]
796 def check_files(self, action=True):
797 file_keys = self.pkg.files.keys()
803 os.chdir(self.pkg.directory)
805 ret = holding.copy_to_holding(f)
807 # XXX: Should we bail out here or try and continue?
808 self.rejects.append(ret)
812 # check we already know the changes file
813 # [NB: this check must be done post-suite mapping]
814 base_filename = os.path.basename(self.pkg.changes_file)
816 session = DBConn().session()
819 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
820 # if in the pool or in a queue other than unchecked, reject
821 if (dbc.in_queue is None) \
822 or (dbc.in_queue is not None
823 and dbc.in_queue.queue_name != 'unchecked'):
824 self.rejects.append("%s file already known to dak" % base_filename)
825 except NoResultFound, e:
832 for f, entry in self.pkg.files.items():
833 # Ensure the file does not already exist in one of the accepted directories
834 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
835 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
836 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
837 self.rejects.append("%s file already exists in the %s directory." % (f, d))
839 if not re_taint_free.match(f):
840 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
842 # Check the file is readable
843 if os.access(f, os.R_OK) == 0:
844 # When running in -n, copy_to_holding() won't have
845 # generated the reject_message, so we need to.
847 if os.path.exists(f):
848 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
850 self.rejects.append("Can't read `%s'. [file not found]" % (f))
851 entry["type"] = "unreadable"
854 # If it's byhand skip remaining checks
855 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
857 entry["type"] = "byhand"
859 # Checks for a binary package...
860 elif re_isadeb.match(f):
862 entry["type"] = "deb"
864 # This routine appends to self.rejects/warnings as appropriate
865 self.binary_file_checks(f, session)
867 # Checks for a source package...
868 elif re_issource.match(f):
871 # This routine appends to self.rejects/warnings as appropriate
872 self.source_file_checks(f, session)
874 # Not a binary or source package? Assume byhand...
877 entry["type"] = "byhand"
879 # Per-suite file checks
880 entry["oldfiles"] = {}
881 for suite in self.pkg.changes["distribution"].keys():
882 self.per_suite_file_checks(f, suite, session)
886 # If the .changes file says it has source, it must have source.
887 if self.pkg.changes["architecture"].has_key("source"):
889 self.rejects.append("no source found and Architecture line in changes mention source.")
891 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
892 self.rejects.append("source only uploads are not supported.")
894 ###########################################################################
895 def check_dsc(self, action=True, session=None):
896 """Returns bool indicating whether or not the source changes are valid"""
897 # Ensure there is source to check
898 if not self.pkg.changes["architecture"].has_key("source"):
903 for f, entry in self.pkg.files.items():
904 if entry["type"] == "dsc":
906 self.rejects.append("can not process a .changes file with multiple .dsc's.")
911 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
913 self.rejects.append("source uploads must contain a dsc file")
916 # Parse the .dsc file
918 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
919 except CantOpenError:
920 # if not -n copy_to_holding() will have done this for us...
922 self.rejects.append("%s: can't read file." % (dsc_filename))
923 except ParseChangesError, line:
924 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
925 except InvalidDscError, line:
926 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
927 except ChangesUnicodeError:
928 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
930 # Build up the file list of files mentioned by the .dsc
932 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
933 except NoFilesFieldError:
934 self.rejects.append("%s: no Files: field." % (dsc_filename))
936 except UnknownFormatError, format:
937 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
939 except ParseChangesError, line:
940 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
943 # Enforce mandatory fields
944 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
945 if not self.pkg.dsc.has_key(i):
946 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
949 # Validate the source and version fields
950 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
951 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
952 if not re_valid_version.match(self.pkg.dsc["version"]):
953 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
955 # Only a limited list of source formats are allowed in each suite
956 for dist in self.pkg.changes["distribution"].keys():
957 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
958 if self.pkg.dsc["format"] not in allowed:
959 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
961 # Validate the Maintainer field
963 # We ignore the return value
964 fix_maintainer(self.pkg.dsc["maintainer"])
965 except ParseMaintError, msg:
966 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
967 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
969 # Validate the build-depends field(s)
970 for field_name in [ "build-depends", "build-depends-indep" ]:
971 field = self.pkg.dsc.get(field_name)
973 # Have apt try to parse them...
975 apt_pkg.ParseSrcDepends(field)
977 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
979 # Ensure the version number in the .dsc matches the version number in the .changes
980 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
981 changes_version = self.pkg.files[dsc_filename]["version"]
983 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
984 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
986 # Ensure the Files field contain only what's expected
987 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
989 # Ensure source is newer than existing source in target suites
990 session = DBConn().session()
991 self.check_source_against_db(dsc_filename, session)
992 self.check_dsc_against_db(dsc_filename, session)
997 ###########################################################################
999 def get_changelog_versions(self, source_dir):
1000 """Extracts a the source package and (optionally) grabs the
1001 version history out of debian/changelog for the BTS."""
1005 # Find the .dsc (again)
1007 for f in self.pkg.files.keys():
1008 if self.pkg.files[f]["type"] == "dsc":
1011 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1012 if not dsc_filename:
1015 # Create a symlink mirror of the source files in our temporary directory
1016 for f in self.pkg.files.keys():
1017 m = re_issource.match(f)
1019 src = os.path.join(source_dir, f)
1020 # If a file is missing for whatever reason, give up.
1021 if not os.path.exists(src):
1024 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1025 self.pkg.orig_files[f].has_key("path"):
1027 dest = os.path.join(os.getcwd(), f)
1028 os.symlink(src, dest)
1030 # If the orig files are not a part of the upload, create symlinks to the
1032 for orig_file in self.pkg.orig_files.keys():
1033 if not self.pkg.orig_files[orig_file].has_key("path"):
1035 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1036 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1038 # Extract the source
1039 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1040 (result, output) = commands.getstatusoutput(cmd)
1042 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1043 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1046 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1049 # Get the upstream version
1050 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1051 if re_strip_revision.search(upstr_version):
1052 upstr_version = re_strip_revision.sub('', upstr_version)
1054 # Ensure the changelog file exists
1055 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1056 if not os.path.exists(changelog_filename):
1057 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1060 # Parse the changelog
1061 self.pkg.dsc["bts changelog"] = ""
1062 changelog_file = utils.open_file(changelog_filename)
1063 for line in changelog_file.readlines():
1064 m = re_changelog_versions.match(line)
1066 self.pkg.dsc["bts changelog"] += line
1067 changelog_file.close()
1069 # Check we found at least one revision in the changelog
1070 if not self.pkg.dsc["bts changelog"]:
1071 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1073 def check_source(self):
1075 # a) there's no source
1076 # or c) the orig files are MIA
1077 if not self.pkg.changes["architecture"].has_key("source") \
1078 or len(self.pkg.orig_files) == 0:
1081 tmpdir = utils.temp_dirname()
1083 # Move into the temporary directory
1087 # Get the changelog version history
1088 self.get_changelog_versions(cwd)
1090 # Move back and cleanup the temporary tree
1094 shutil.rmtree(tmpdir)
1096 if e.errno != errno.EACCES:
1098 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1100 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1101 # We probably have u-r or u-w directories so chmod everything
1103 cmd = "chmod -R u+rwx %s" % (tmpdir)
1104 result = os.system(cmd)
1106 utils.fubar("'%s' failed with result %s." % (cmd, result))
1107 shutil.rmtree(tmpdir)
1108 except Exception, e:
1109 print "foobar2 (%s)" % e
1110 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1112 ###########################################################################
1113 def ensure_hashes(self):
1114 # Make sure we recognise the format of the Files: field in the .changes
1115 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1116 if len(format) == 2:
1117 format = int(format[0]), int(format[1])
1119 format = int(float(format[0])), 0
1121 # We need to deal with the original changes blob, as the fields we need
1122 # might not be in the changes dict serialised into the .dak anymore.
1123 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1125 # Copy the checksums over to the current changes dict. This will keep
1126 # the existing modifications to it intact.
1127 for field in orig_changes:
1128 if field.startswith('checksums-'):
1129 self.pkg.changes[field] = orig_changes[field]
1131 # Check for unsupported hashes
1132 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1133 self.rejects.append(j)
1135 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1136 self.rejects.append(j)
1138 # We have to calculate the hash if we have an earlier changes version than
1139 # the hash appears in rather than require it exist in the changes file
1140 for hashname, hashfunc, version in utils.known_hashes:
1141 # TODO: Move _ensure_changes_hash into this class
1142 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1143 self.rejects.append(j)
1144 if "source" in self.pkg.changes["architecture"]:
1145 # TODO: Move _ensure_dsc_hash into this class
1146 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1147 self.rejects.append(j)
1149 def check_hashes(self):
1150 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1151 self.rejects.append(m)
1153 for m in utils.check_size(".changes", self.pkg.files):
1154 self.rejects.append(m)
1156 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1157 self.rejects.append(m)
1159 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1160 self.rejects.append(m)
1162 self.ensure_hashes()
1164 ###########################################################################
1166 def ensure_orig(self, target_dir='.', session=None):
1168 Ensures that all orig files mentioned in the changes file are present
1169 in target_dir. If they do not exist, they are symlinked into place.
1171 An list containing the symlinks that were created are returned (so they
1178 for filename, entry in self.pkg.dsc_files.iteritems():
1179 if not re_is_orig_source.match(filename):
1180 # File is not an orig; ignore
1183 if os.path.exists(filename):
1184 # File exists, no need to continue
1187 def symlink_if_valid(path):
1188 f = utils.open_file(path)
1189 md5sum = apt_pkg.md5sum(f)
1192 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1193 expected = (int(entry['size']), entry['md5sum'])
1195 if fingerprint != expected:
1198 dest = os.path.join(target_dir, filename)
1200 os.symlink(path, dest)
1201 symlinked.append(dest)
1207 session_ = DBConn().session()
1212 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1213 poolfile_path = os.path.join(
1214 poolfile.location.path, poolfile.filename
1217 if symlink_if_valid(poolfile_path):
1227 # Look in some other queues for the file
1228 queues = ('New', 'Byhand', 'ProposedUpdates',
1229 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1231 for queue in queues:
1232 if not cnf.get('Dir::Queue::%s' % queue):
1235 queuefile_path = os.path.join(
1236 cnf['Dir::Queue::%s' % queue], filename
1239 if not os.path.exists(queuefile_path):
1240 # Does not exist in this queue
1243 if symlink_if_valid(queuefile_path):
1248 ###########################################################################
1250 def check_lintian(self):
1252 Extends self.rejects by checking the output of lintian against tags
1253 specified in Dinstall::LintianTags.
1258 # Don't reject binary uploads
1259 if not self.pkg.changes['architecture'].has_key('source'):
1262 # Only check some distributions
1263 for dist in ('unstable', 'experimental'):
1264 if dist in self.pkg.changes['distribution']:
1269 # If we do not have a tagfile, don't do anything
1270 tagfile = cnf.get("Dinstall::LintianTags")
1274 # Parse the yaml file
1275 sourcefile = file(tagfile, 'r')
1276 sourcecontent = sourcefile.read()
1280 lintiantags = yaml.load(sourcecontent)['lintian']
1281 except yaml.YAMLError, msg:
1282 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1285 # Try and find all orig mentioned in the .dsc
1286 symlinked = self.ensure_orig()
1288 # Setup the input file for lintian
1289 fd, temp_filename = utils.temp_filename()
1290 temptagfile = os.fdopen(fd, 'w')
1291 for tags in lintiantags.values():
1292 temptagfile.writelines(['%s\n' % x for x in tags])
1296 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1297 (temp_filename, self.pkg.changes_file)
1299 result, output = commands.getstatusoutput(cmd)
1301 # Remove our tempfile and any symlinks we created
1302 os.unlink(temp_filename)
1304 for symlink in symlinked:
1308 utils.warn("lintian failed for %s [return code: %s]." % \
1309 (self.pkg.changes_file, result))
1310 utils.warn(utils.prefix_multi_line_string(output, \
1311 " [possible output:] "))
1316 [self.pkg.changes_file, "check_lintian"] + list(txt)
1320 parsed_tags = parse_lintian_output(output)
1321 self.rejects.extend(
1322 generate_reject_messages(parsed_tags, lintiantags, log=log)
1325 ###########################################################################
1326 def check_urgency(self):
1328 if self.pkg.changes["architecture"].has_key("source"):
1329 if not self.pkg.changes.has_key("urgency"):
1330 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1331 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1332 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1333 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1334 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1335 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1337 ###########################################################################
1339 # Sanity check the time stamps of files inside debs.
1340 # [Files in the near future cause ugly warnings and extreme time
1341 # travel can cause errors on extraction]
1343 def check_timestamps(self):
1346 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1347 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1348 tar = TarTime(future_cutoff, past_cutoff)
1350 for filename, entry in self.pkg.files.items():
1351 if entry["type"] == "deb":
1354 deb_file = utils.open_file(filename)
1355 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1358 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1359 except SystemError, e:
1360 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1361 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1364 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1368 future_files = tar.future_files.keys()
1370 num_future_files = len(future_files)
1371 future_file = future_files[0]
1372 future_date = tar.future_files[future_file]
1373 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1374 % (filename, num_future_files, future_file, time.ctime(future_date)))
1376 ancient_files = tar.ancient_files.keys()
1378 num_ancient_files = len(ancient_files)
1379 ancient_file = ancient_files[0]
1380 ancient_date = tar.ancient_files[ancient_file]
1381 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1382 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1384 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1386 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1387 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1389 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1395 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1396 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1397 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1398 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1399 self.pkg.changes["sponsoremail"] = uid_email
1404 ###########################################################################
1405 # check_signed_by_key checks
1406 ###########################################################################
1408 def check_signed_by_key(self):
1409 """Ensure the .changes is signed by an authorized uploader."""
1410 session = DBConn().session()
1412 # First of all we check that the person has proper upload permissions
1413 # and that this upload isn't blocked
1414 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1417 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1420 # TODO: Check that import-keyring adds UIDs properly
1422 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1425 # Check that the fingerprint which uploaded has permission to do so
1426 self.check_upload_permissions(fpr, session)
1428 # Check that this package is not in a transition
1429 self.check_transition(session)
1434 def check_upload_permissions(self, fpr, session):
1435 # Check any one-off upload blocks
1436 self.check_upload_blocks(fpr, session)
1438 # Start with DM as a special case
1439 # DM is a special case unfortunately, so we check it first
1440 # (keys with no source access get more access than DMs in one
1441 # way; DMs can only upload for their packages whether source
1442 # or binary, whereas keys with no access might be able to
1443 # upload some binaries)
1444 if fpr.source_acl.access_level == 'dm':
1445 self.check_dm_upload(fpr, session)
1447 # Check source-based permissions for other types
1448 if self.pkg.changes["architecture"].has_key("source"):
1449 if fpr.source_acl.access_level is None:
1450 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1451 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1452 self.rejects.append(rej)
1455 # If not a DM, we allow full upload rights
1456 uid_email = "%s@debian.org" % (fpr.uid.uid)
1457 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1460 # Check binary upload permissions
1461 # By this point we know that DMs can't have got here unless they
1462 # are allowed to deal with the package concerned so just apply
1464 if fpr.binary_acl.access_level == 'full':
1467 # Otherwise we're in the map case
1468 tmparches = self.pkg.changes["architecture"].copy()
1469 tmparches.pop('source', None)
1471 for bam in fpr.binary_acl_map:
1472 tmparches.pop(bam.architecture.arch_string, None)
1474 if len(tmparches.keys()) > 0:
1475 if fpr.binary_reject:
1476 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1477 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1478 self.rejects.append(rej)
1480 # TODO: This is where we'll implement reject vs throw away binaries later
1481 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1482 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1483 rej += "\nFingerprint: %s", (fpr.fingerprint)
1484 self.rejects.append(rej)
1487 def check_upload_blocks(self, fpr, session):
1488 """Check whether any upload blocks apply to this source, source
1489 version, uid / fpr combination"""
1491 def block_rej_template(fb):
1492 rej = 'Manual upload block in place for package %s' % fb.source
1493 if fb.version is not None:
1494 rej += ', version %s' % fb.version
1497 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1498 # version is None if the block applies to all versions
1499 if fb.version is None or fb.version == self.pkg.changes['version']:
1500 # Check both fpr and uid - either is enough to cause a reject
1501 if fb.fpr is not None:
1502 if fb.fpr.fingerprint == fpr.fingerprint:
1503 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1504 if fb.uid is not None:
1505 if fb.uid == fpr.uid:
1506 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1509 def check_dm_upload(self, fpr, session):
1510 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1511 ## none of the uploaded packages are NEW
1513 for f in self.pkg.files.keys():
1514 if self.pkg.files[f].has_key("byhand"):
1515 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1517 if self.pkg.files[f].has_key("new"):
1518 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1524 ## the most recent version of the package uploaded to unstable or
1525 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1526 ## section of its control file
1527 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1528 q = q.join(SrcAssociation)
1529 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1530 q = q.order_by(desc('source.version')).limit(1)
1535 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1536 self.rejects.append(rej)
1540 if not r.dm_upload_allowed:
1541 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1542 self.rejects.append(rej)
1545 ## the Maintainer: field of the uploaded .changes file corresponds with
1546 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1548 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1549 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1551 ## the most recent version of the package uploaded to unstable or
1552 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1553 ## non-developer maintainers cannot NMU or hijack packages)
1555 # srcuploaders includes the maintainer
1557 for sup in r.srcuploaders:
1558 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1559 # Eww - I hope we never have two people with the same name in Debian
1560 if email == fpr.uid.uid or name == fpr.uid.name:
1565 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1568 ## none of the packages are being taken over from other source packages
1569 for b in self.pkg.changes["binary"].keys():
1570 for suite in self.pkg.changes["distribution"].keys():
1571 q = session.query(DBSource)
1572 q = q.join(DBBinary).filter_by(package=b)
1573 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1576 if s.source != self.pkg.changes["source"]:
1577 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1581 def check_transition(self, session):
1584 sourcepkg = self.pkg.changes["source"]
1586 # No sourceful upload -> no need to do anything else, direct return
1587 # We also work with unstable uploads, not experimental or those going to some
1588 # proposed-updates queue
1589 if "source" not in self.pkg.changes["architecture"] or \
1590 "unstable" not in self.pkg.changes["distribution"]:
1593 # Also only check if there is a file defined (and existant) with
1595 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1596 if transpath == "" or not os.path.exists(transpath):
1599 # Parse the yaml file
1600 sourcefile = file(transpath, 'r')
1601 sourcecontent = sourcefile.read()
1603 transitions = yaml.load(sourcecontent)
1604 except yaml.YAMLError, msg:
1605 # This shouldn't happen, there is a wrapper to edit the file which
1606 # checks it, but we prefer to be safe than ending up rejecting
1608 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1611 # Now look through all defined transitions
1612 for trans in transitions:
1613 t = transitions[trans]
1614 source = t["source"]
1617 # Will be None if nothing is in testing.
1618 current = get_source_in_suite(source, "testing", session)
1619 if current is not None:
1620 compare = apt_pkg.VersionCompare(current.version, expected)
1622 if current is None or compare < 0:
1623 # This is still valid, the current version in testing is older than
1624 # the new version we wait for, or there is none in testing yet
1626 # Check if the source we look at is affected by this.
1627 if sourcepkg in t['packages']:
1628 # The source is affected, lets reject it.
1630 rejectmsg = "%s: part of the %s transition.\n\n" % (
1633 if current is not None:
1634 currentlymsg = "at version %s" % (current.version)
1636 currentlymsg = "not present in testing"
1638 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1640 rejectmsg += "\n".join(textwrap.wrap("""Your package
1641 is part of a testing transition designed to get %s migrated (it is
1642 currently %s, we need version %s). This transition is managed by the
1643 Release Team, and %s is the Release-Team member responsible for it.
1644 Please mail debian-release@lists.debian.org or contact %s directly if you
1645 need further assistance. You might want to upload to experimental until this
1646 transition is done."""
1647 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1649 self.rejects.append(rejectmsg)
1652 ###########################################################################
1653 # End check_signed_by_key checks
1654 ###########################################################################
1656 def build_summaries(self):
1657 """ Build a summary of changes the upload introduces. """
1659 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1661 short_summary = summary
1663 # This is for direport's benefit...
1664 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1667 summary += "Changes: " + f
1669 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1671 summary += self.announce(short_summary, 0)
1673 return (summary, short_summary)
1675 ###########################################################################
1677 def close_bugs(self, summary, action):
1679 Send mail to close bugs as instructed by the closes field in the changes file.
1680 Also add a line to summary if any work was done.
1682 @type summary: string
1683 @param summary: summary text, as given by L{build_summaries}
1686 @param action: Set to false no real action will be done.
1689 @return: summary. If action was taken, extended by the list of closed bugs.
1693 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1695 bugs = self.pkg.changes["closes"].keys()
1701 summary += "Closing bugs: "
1703 summary += "%s " % (bug)
1706 self.Subst["__BUG_NUMBER__"] = bug
1707 if self.pkg.changes["distribution"].has_key("stable"):
1708 self.Subst["__STABLE_WARNING__"] = """
1709 Note that this package is not part of the released stable Debian
1710 distribution. It may have dependencies on other unreleased software,
1711 or other instabilities. Please take care if you wish to install it.
1712 The update will eventually make its way into the next released Debian
1715 self.Subst["__STABLE_WARNING__"] = ""
1716 mail_message = utils.TemplateSubst(self.Subst, template)
1717 utils.send_mail(mail_message)
1719 # Clear up after ourselves
1720 del self.Subst["__BUG_NUMBER__"]
1721 del self.Subst["__STABLE_WARNING__"]
1723 if action and self.logger:
1724 self.logger.log(["closing bugs"] + bugs)
1730 ###########################################################################
1732 def announce(self, short_summary, action):
1734 Send an announce mail about a new upload.
1736 @type short_summary: string
1737 @param short_summary: Short summary text to include in the mail
1740 @param action: Set to false no real action will be done.
1743 @return: Textstring about action taken.
1748 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1750 # Only do announcements for source uploads with a recent dpkg-dev installed
1751 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1752 self.pkg.changes["architecture"].has_key("source"):
1758 self.Subst["__SHORT_SUMMARY__"] = short_summary
1760 for dist in self.pkg.changes["distribution"].keys():
1761 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1762 if announce_list == "" or lists_done.has_key(announce_list):
1765 lists_done[announce_list] = 1
1766 summary += "Announcing to %s\n" % (announce_list)
1770 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1771 if cnf.get("Dinstall::TrackingServer") and \
1772 self.pkg.changes["architecture"].has_key("source"):
1773 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1774 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1776 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1777 utils.send_mail(mail_message)
1779 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1781 if cnf.FindB("Dinstall::CloseBugs"):
1782 summary = self.close_bugs(summary, action)
1784 del self.Subst["__SHORT_SUMMARY__"]
1788 ###########################################################################
1790 def accept (self, summary, short_summary, session=None):
1794 This moves all files referenced from the .changes into the pool,
1795 sends the accepted mail, announces to lists, closes bugs and
1796 also checks for override disparities. If enabled it will write out
1797 the version history for the BTS Version Tracking and will finally call
1800 @type summary: string
1801 @param summary: Summary text
1803 @type short_summary: string
1804 @param short_summary: Short summary
1808 stats = SummaryStats()
1811 self.logger.log(["installing changes", self.pkg.changes_file])
1815 # Add the .dsc file to the DB first
1816 for newfile, entry in self.pkg.files.items():
1817 if entry["type"] == "dsc":
1818 dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1822 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1823 for newfile, entry in self.pkg.files.items():
1824 if entry["type"] == "deb":
1825 poolfiles.append(add_deb_to_db(self, newfile, session))
1827 # If this is a sourceful diff only upload that is moving
1828 # cross-component we need to copy the .orig files into the new
1829 # component too for the same reasons as above.
1830 if self.pkg.changes["architecture"].has_key("source"):
1831 for orig_file in self.pkg.orig_files.keys():
1832 if not self.pkg.orig_files[orig_file].has_key("id"):
1833 continue # Skip if it's not in the pool
1834 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1835 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1836 continue # Skip if the location didn't change
1839 oldf = get_poolfile_by_id(orig_file_id, session)
1840 old_filename = os.path.join(oldf.location.path, oldf.filename)
1841 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1842 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1844 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1846 # TODO: Care about size/md5sum collisions etc
1847 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
1850 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1851 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1853 # TODO: Check that there's only 1 here
1854 source = get_sources_from_name(self.pkg.changes["source"], self.pkg.changes["version"])[0]
1855 dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
1856 dscf.poolfile_id = newf.file_id
1860 poolfiles.append(newf)
1862 # Install the files into the pool
1863 for newfile, entry in self.pkg.files.items():
1864 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1865 utils.move(newfile, destination)
1866 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1867 stats.accept_bytes += float(entry["size"])
1869 # Copy the .changes file across for suite which need it.
1871 for suite_name in self.pkg.changes["distribution"].keys():
1872 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1873 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1875 for dest in copy_changes.keys():
1876 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1878 # We're done - commit the database changes
1880 # Our SQL session will automatically start a new transaction after
1883 # Move the .changes into the 'done' directory
1884 utils.move(self.pkg.changes_file,
1885 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1887 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1888 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1890 # Send accept mail, announce to lists, close bugs and check for
1891 # override disparities
1892 if not cnf["Dinstall::Options::No-Mail"]:
1894 self.Subst["__SUITE__"] = ""
1895 self.Subst["__SUMMARY__"] = summary
1896 mail_message = utils.TemplateSubst(self.Subst,
1897 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1898 utils.send_mail(mail_message)
1899 self.announce(short_summary, 1)
1901 ## Helper stuff for DebBugs Version Tracking
1902 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1903 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1904 # the conditionalization on dsc["bts changelog"] should be
1907 # Write out the version history from the changelog
1908 if self.pkg.changes["architecture"].has_key("source") and \
1909 self.pkg.dsc.has_key("bts changelog"):
1911 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1912 version_history = os.fdopen(fd, 'w')
1913 version_history.write(self.pkg.dsc["bts changelog"])
1914 version_history.close()
1915 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1916 self.pkg.changes_file[:-8]+".versions")
1917 os.rename(temp_filename, filename)
1918 os.chmod(filename, 0644)
1920 # Write out the binary -> source mapping.
1921 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1922 debinfo = os.fdopen(fd, 'w')
1923 for name, entry in sorted(self.pkg.files.items()):
1924 if entry["type"] == "deb":
1925 line = " ".join([entry["package"], entry["version"],
1926 entry["architecture"], entry["source package"],
1927 entry["source version"]])
1928 debinfo.write(line+"\n")
1930 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1931 self.pkg.changes_file[:-8]+".debinfo")
1932 os.rename(temp_filename, filename)
1933 os.chmod(filename, 0644)
1937 # Set up our copy queues (e.g. buildd queues)
1938 for suite_name in self.pkg.changes["distribution"].keys():
1939 suite = get_suite(suite_name, session)
1940 for q in suite.copy_queues:
1942 q.add_file_from_pool(f)
1947 stats.accept_count += 1
1949 def check_override(self):
1951 Checks override entries for validity. Mails "Override disparity" warnings,
1952 if that feature is enabled.
1954 Abandons the check if
1955 - override disparity checks are disabled
1956 - mail sending is disabled
1961 # Abandon the check if:
1962 # a) override disparity checks have been disabled
1963 # b) we're not sending mail
1964 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1965 cnf["Dinstall::Options::No-Mail"]:
1968 summary = self.pkg.check_override()
1973 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1976 self.Subst["__SUMMARY__"] = summary
1977 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1978 utils.send_mail(mail_message)
1979 del self.Subst["__SUMMARY__"]
1981 ###########################################################################
1983 def remove(self, from_dir=None):
1985 Used (for instance) in p-u to remove the package from unchecked
1987 Also removes the package from holding area.
1989 if from_dir is None:
1990 from_dir = self.pkg.directory
1993 for f in self.pkg.files.keys():
1994 os.unlink(os.path.join(from_dir, f))
1995 if os.path.exists(os.path.join(h.holding_dir, f)):
1996 os.unlink(os.path.join(h.holding_dir, f))
1998 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
1999 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2000 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2002 ###########################################################################
2004 def move_to_queue (self, queue):
2006 Move files to a destination queue using the permissions in the table
2009 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2010 queue.path, perms=int(queue.change_perms, 8))
2011 for f in self.pkg.files.keys():
2012 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2014 ###########################################################################
2016 def force_reject(self, reject_files):
2018 Forcefully move files from the current directory to the
2019 reject directory. If any file already exists in the reject
2020 directory it will be moved to the morgue to make way for
2024 @param files: file dictionary
2030 for file_entry in reject_files:
2031 # Skip any files which don't exist or which we don't have permission to copy.
2032 if os.access(file_entry, os.R_OK) == 0:
2035 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2038 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2040 # File exists? Let's try and move it to the morgue
2041 if e.errno == errno.EEXIST:
2042 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2044 morgue_file = utils.find_next_free(morgue_file)
2045 except NoFreeFilenameError:
2046 # Something's either gone badly Pete Tong, or
2047 # someone is trying to exploit us.
2048 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2050 utils.move(dest_file, morgue_file, perms=0660)
2052 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2055 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2059 # If we got here, we own the destination file, so we can
2060 # safely overwrite it.
2061 utils.move(file_entry, dest_file, 1, perms=0660)
2064 ###########################################################################
2065 def do_reject (self, manual=0, reject_message="", note=""):
2067 Reject an upload. If called without a reject message or C{manual} is
2068 true, spawn an editor so the user can write one.
2071 @param manual: manual or automated rejection
2073 @type reject_message: string
2074 @param reject_message: A reject message
2079 # If we weren't given a manual rejection message, spawn an
2080 # editor so the user can add one in...
2081 if manual and not reject_message:
2082 (fd, temp_filename) = utils.temp_filename()
2083 temp_file = os.fdopen(fd, 'w')
2086 temp_file.write(line)
2088 editor = os.environ.get("EDITOR","vi")
2090 while answer == 'E':
2091 os.system("%s %s" % (editor, temp_filename))
2092 temp_fh = utils.open_file(temp_filename)
2093 reject_message = "".join(temp_fh.readlines())
2095 print "Reject message:"
2096 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2097 prompt = "[R]eject, Edit, Abandon, Quit ?"
2099 while prompt.find(answer) == -1:
2100 answer = utils.our_raw_input(prompt)
2101 m = re_default_answer.search(prompt)
2104 answer = answer[:1].upper()
2105 os.unlink(temp_filename)
2111 print "Rejecting.\n"
2115 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2116 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2118 # Move all the files into the reject directory
2119 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2120 self.force_reject(reject_files)
2122 # If we fail here someone is probably trying to exploit the race
2123 # so let's just raise an exception ...
2124 if os.path.exists(reason_filename):
2125 os.unlink(reason_filename)
2126 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2128 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2132 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2133 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2134 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2135 os.write(reason_fd, reject_message)
2136 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2138 # Build up the rejection email
2139 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2140 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2141 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2142 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2143 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2144 # Write the rejection email out as the <foo>.reason file
2145 os.write(reason_fd, reject_mail_message)
2147 del self.Subst["__REJECTOR_ADDRESS__"]
2148 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2149 del self.Subst["__CC__"]
2153 # Send the rejection mail if appropriate
2154 if not cnf["Dinstall::Options::No-Mail"]:
2155 utils.send_mail(reject_mail_message)
2158 self.logger.log(["rejected", self.pkg.changes_file])
2162 ################################################################################
2163 def in_override_p(self, package, component, suite, binary_type, filename, session):
2165 Check if a package already has override entries in the DB
2167 @type package: string
2168 @param package: package name
2170 @type component: string
2171 @param component: database id of the component
2174 @param suite: database id of the suite
2176 @type binary_type: string
2177 @param binary_type: type of the package
2179 @type filename: string
2180 @param filename: filename we check
2182 @return: the database result. But noone cares anyway.
2188 if binary_type == "": # must be source
2191 file_type = binary_type
2193 # Override suite name; used for example with proposed-updates
2194 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2195 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2197 result = get_override(package, suite, component, file_type, session)
2199 # If checking for a source package fall back on the binary override type
2200 if file_type == "dsc" and len(result) < 1:
2201 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2203 # Remember the section and priority so we can check them later if appropriate
2206 self.pkg.files[filename]["override section"] = result.section.section
2207 self.pkg.files[filename]["override priority"] = result.priority.priority
2212 ################################################################################
2213 def get_anyversion(self, sv_list, suite):
2216 @param sv_list: list of (suite, version) tuples to check
2219 @param suite: suite name
2225 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2226 for (s, v) in sv_list:
2227 if s in [ x.lower() for x in anysuite ]:
2228 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2233 ################################################################################
2235 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2238 @param sv_list: list of (suite, version) tuples to check
2240 @type filename: string
2241 @param filename: XXX
2243 @type new_version: string
2244 @param new_version: XXX
2246 Ensure versions are newer than existing packages in target
2247 suites and that cross-suite version checking rules as
2248 set out in the conf file are satisfied.
2253 # Check versions for each target suite
2254 for target_suite in self.pkg.changes["distribution"].keys():
2255 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2256 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2258 # Enforce "must be newer than target suite" even if conffile omits it
2259 if target_suite not in must_be_newer_than:
2260 must_be_newer_than.append(target_suite)
2262 for (suite, existent_version) in sv_list:
2263 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2265 if suite in must_be_newer_than and sourceful and vercmp < 1:
2266 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2268 if suite in must_be_older_than and vercmp > -1:
2271 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2272 # we really use the other suite, ignoring the conflicting one ...
2273 addsuite = self.pkg.changes["distribution-version"][suite]
2275 add_version = self.get_anyversion(sv_list, addsuite)
2276 target_version = self.get_anyversion(sv_list, target_suite)
2279 # not add_version can only happen if we map to a suite
2280 # that doesn't enhance the suite we're propup'ing from.
2281 # so "propup-ver x a b c; map a d" is a problem only if
2282 # d doesn't enhance a.
2284 # i think we could always propagate in this case, rather
2285 # than complaining. either way, this isn't a REJECT issue
2287 # And - we really should complain to the dorks who configured dak
2288 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2289 self.pkg.changes.setdefault("propdistribution", {})
2290 self.pkg.changes["propdistribution"][addsuite] = 1
2292 elif not target_version:
2293 # not targets_version is true when the package is NEW
2294 # we could just stick with the "...old version..." REJECT
2295 # for this, I think.
2296 self.rejects.append("Won't propogate NEW packages.")
2297 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2298 # propogation would be redundant. no need to reject though.
2299 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2301 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2302 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2304 self.warnings.append("Propogating upload to %s" % (addsuite))
2305 self.pkg.changes.setdefault("propdistribution", {})
2306 self.pkg.changes["propdistribution"][addsuite] = 1
2310 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2312 ################################################################################
2313 def check_binary_against_db(self, filename, session):
2314 # Ensure version is sane
2315 q = session.query(BinAssociation)
2316 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2317 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2319 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2320 filename, self.pkg.files[filename]["version"], sourceful=False)
2322 # Check for any existing copies of the file
2323 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2324 q = q.filter_by(version=self.pkg.files[filename]["version"])
2325 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2328 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2330 ################################################################################
2332 def check_source_against_db(self, filename, session):
2333 source = self.pkg.dsc.get("source")
2334 version = self.pkg.dsc.get("version")
2336 # Ensure version is sane
2337 q = session.query(SrcAssociation)
2338 q = q.join(DBSource).filter(DBSource.source==source)
2340 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2341 filename, version, sourceful=True)
2343 ################################################################################
2344 def check_dsc_against_db(self, filename, session):
2347 @warning: NB: this function can remove entries from the 'files' index [if
2348 the orig tarball is a duplicate of the one in the archive]; if
2349 you're iterating over 'files' and call this function as part of
2350 the loop, be sure to add a check to the top of the loop to
2351 ensure you haven't just tried to dereference the deleted entry.
2356 self.pkg.orig_files = {} # XXX: do we need to clear it?
2357 orig_files = self.pkg.orig_files
2359 # Try and find all files mentioned in the .dsc. This has
2360 # to work harder to cope with the multiple possible
2361 # locations of an .orig.tar.gz.
2362 # The ordering on the select is needed to pick the newest orig
2363 # when it exists in multiple places.
2364 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2366 if self.pkg.files.has_key(dsc_name):
2367 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2368 actual_size = int(self.pkg.files[dsc_name]["size"])
2369 found = "%s in incoming" % (dsc_name)
2371 # Check the file does not already exist in the archive
2372 ql = get_poolfile_like_name(dsc_name, session)
2374 # Strip out anything that isn't '%s' or '/%s$'
2376 if not i.filename.endswith(dsc_name):
2379 # "[dak] has not broken them. [dak] has fixed a
2380 # brokenness. Your crappy hack exploited a bug in
2383 # "(Come on! I thought it was always obvious that
2384 # one just doesn't release different files with
2385 # the same name and version.)"
2386 # -- ajk@ on d-devel@l.d.o
2389 # Ignore exact matches for .orig.tar.gz
2391 if re_is_orig_source.match(dsc_name):
2393 if self.pkg.files.has_key(dsc_name) and \
2394 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2395 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2396 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2397 # TODO: Don't delete the entry, just mark it as not needed
2398 # This would fix the stupidity of changing something we often iterate over
2399 # whilst we're doing it
2400 del self.pkg.files[dsc_name]
2401 dsc_entry["files id"] = i.file_id
2402 if not orig_files.has_key(dsc_name):
2403 orig_files[dsc_name] = {}
2404 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2408 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2410 elif re_is_orig_source.match(dsc_name):
2412 ql = get_poolfile_like_name(dsc_name, session)
2414 # Strip out anything that isn't '%s' or '/%s$'
2415 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2417 if not i.filename.endswith(dsc_name):
2421 # Unfortunately, we may get more than one match here if,
2422 # for example, the package was in potato but had an -sa
2423 # upload in woody. So we need to choose the right one.
2425 # default to something sane in case we don't match any or have only one
2430 old_file = os.path.join(i.location.path, i.filename)
2431 old_file_fh = utils.open_file(old_file)
2432 actual_md5 = apt_pkg.md5sum(old_file_fh)
2434 actual_size = os.stat(old_file)[stat.ST_SIZE]
2435 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2438 old_file = os.path.join(i.location.path, i.filename)
2439 old_file_fh = utils.open_file(old_file)
2440 actual_md5 = apt_pkg.md5sum(old_file_fh)
2442 actual_size = os.stat(old_file)[stat.ST_SIZE]
2444 suite_type = x.location.archive_type
2445 # need this for updating dsc_files in install()
2446 dsc_entry["files id"] = x.file_id
2447 # See install() in process-accepted...
2448 if not orig_files.has_key(dsc_name):
2449 orig_files[dsc_name] = {}
2450 orig_files[dsc_name]["id"] = x.file_id
2451 orig_files[dsc_name]["path"] = old_file
2452 orig_files[dsc_name]["location"] = x.location.location_id
2454 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2455 # Not there? Check the queue directories...
2456 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2457 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2459 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2460 if os.path.exists(in_otherdir):
2461 in_otherdir_fh = utils.open_file(in_otherdir)
2462 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2463 in_otherdir_fh.close()
2464 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2466 if not orig_files.has_key(dsc_name):
2467 orig_files[dsc_name] = {}
2468 orig_files[dsc_name]["path"] = in_otherdir
2471 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2474 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2476 if actual_md5 != dsc_entry["md5sum"]:
2477 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2478 if actual_size != int(dsc_entry["size"]):
2479 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2481 ################################################################################
2482 # This is used by process-new and process-holding to recheck a changes file
2483 # at the time we're running. It mainly wraps various other internal functions
2484 # and is similar to accepted_checks - these should probably be tidied up
2486 def recheck(self, session):
2488 for f in self.pkg.files.keys():
2489 # The .orig.tar.gz can disappear out from under us is it's a
2490 # duplicate of one in the archive.
2491 if not self.pkg.files.has_key(f):
2494 entry = self.pkg.files[f]
2496 # Check that the source still exists
2497 if entry["type"] == "deb":
2498 source_version = entry["source version"]
2499 source_package = entry["source package"]
2500 if not self.pkg.changes["architecture"].has_key("source") \
2501 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2502 source_epochless_version = re_no_epoch.sub('', source_version)
2503 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2505 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2506 if cnf.has_key("Dir::Queue::%s" % (q)):
2507 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2510 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2512 # Version and file overwrite checks
2513 if entry["type"] == "deb":
2514 self.check_binary_against_db(f, session)
2515 elif entry["type"] == "dsc":
2516 self.check_source_against_db(f, session)
2517 self.check_dsc_against_db(f, session)
2519 ################################################################################
2520 def accepted_checks(self, overwrite_checks, session):
2521 # Recheck anything that relies on the database; since that's not
2522 # frozen between accept and our run time when called from p-a.
2524 # overwrite_checks is set to False when installing to stable/oldstable
2529 # Find the .dsc (again)
2531 for f in self.pkg.files.keys():
2532 if self.pkg.files[f]["type"] == "dsc":
2535 for checkfile in self.pkg.files.keys():
2536 # The .orig.tar.gz can disappear out from under us is it's a
2537 # duplicate of one in the archive.
2538 if not self.pkg.files.has_key(checkfile):
2541 entry = self.pkg.files[checkfile]
2543 # Check that the source still exists
2544 if entry["type"] == "deb":
2545 source_version = entry["source version"]
2546 source_package = entry["source package"]
2547 if not self.pkg.changes["architecture"].has_key("source") \
2548 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2549 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2551 # Version and file overwrite checks
2552 if overwrite_checks:
2553 if entry["type"] == "deb":
2554 self.check_binary_against_db(checkfile, session)
2555 elif entry["type"] == "dsc":
2556 self.check_source_against_db(checkfile, session)
2557 self.check_dsc_against_db(dsc_filename, session)
2559 # propogate in the case it is in the override tables:
2560 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2561 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2562 propogate[suite] = 1
2564 nopropogate[suite] = 1
2566 for suite in propogate.keys():
2567 if suite in nopropogate:
2569 self.pkg.changes["distribution"][suite] = 1
2571 for checkfile in self.pkg.files.keys():
2572 # Check the package is still in the override tables
2573 for suite in self.pkg.changes["distribution"].keys():
2574 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2575 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2577 ################################################################################
2578 # This is not really a reject, but an unaccept, but since a) the code for
2579 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2580 # extremely rare, for now we'll go with whining at our admin folks...
2582 def do_unaccept(self):
2586 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2587 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2588 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2589 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2590 if cnf.has_key("Dinstall::Bcc"):
2591 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2593 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2595 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2597 # Write the rejection email out as the <foo>.reason file
2598 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2599 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2601 # If we fail here someone is probably trying to exploit the race
2602 # so let's just raise an exception ...
2603 if os.path.exists(reject_filename):
2604 os.unlink(reject_filename)
2606 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2607 os.write(fd, reject_mail_message)
2610 utils.send_mail(reject_mail_message)
2612 del self.Subst["__REJECTOR_ADDRESS__"]
2613 del self.Subst["__REJECT_MESSAGE__"]
2614 del self.Subst["__CC__"]
2616 ################################################################################
2617 # If any file of an upload has a recent mtime then chances are good
2618 # the file is still being uploaded.
2620 def upload_too_new(self):
2623 # Move back to the original directory to get accurate time stamps
2625 os.chdir(self.pkg.directory)
2626 file_list = self.pkg.files.keys()
2627 file_list.extend(self.pkg.dsc_files.keys())
2628 file_list.append(self.pkg.changes_file)
2631 last_modified = time.time()-os.path.getmtime(f)
2632 if last_modified < int(cnf["Dinstall::SkipTime"]):