5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
81 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
83 # Validate the override type
84 type_id = get_override_type(file_type, session)
86 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
90 ################################################################################
92 # Determine what parts in a .changes are NEW
94 def determine_new(changes, files, warn=1):
96 Determine what parts in a C{changes} file are NEW.
98 @type changes: Upload.Pkg.changes dict
99 @param changes: Changes dictionary
101 @type files: Upload.Pkg.files dict
102 @param files: Files dictionary
105 @param warn: Warn if overrides are added for (old)stable
108 @return: dictionary of NEW components.
113 session = DBConn().session()
115 # Build up a list of potentially new things
116 for name, f in files.items():
117 # Skip byhand elements
118 # if f["type"] == "byhand":
121 priority = f["priority"]
122 section = f["section"]
123 file_type = get_type(f, session)
124 component = f["component"]
126 if file_type == "dsc":
129 if not new.has_key(pkg):
131 new[pkg]["priority"] = priority
132 new[pkg]["section"] = section
133 new[pkg]["type"] = file_type
134 new[pkg]["component"] = component
135 new[pkg]["files"] = []
137 old_type = new[pkg]["type"]
138 if old_type != file_type:
139 # source gets trumped by deb or udeb
140 if old_type == "dsc":
141 new[pkg]["priority"] = priority
142 new[pkg]["section"] = section
143 new[pkg]["type"] = file_type
144 new[pkg]["component"] = component
146 new[pkg]["files"].append(name)
148 if f.has_key("othercomponents"):
149 new[pkg]["othercomponents"] = f["othercomponents"]
151 for suite in changes["suite"].keys():
152 for pkg in new.keys():
153 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
155 for file_entry in new[pkg]["files"]:
156 if files[file_entry].has_key("new"):
157 del files[file_entry]["new"]
161 for s in ['stable', 'oldstable']:
162 if changes["suite"].has_key(s):
163 print "WARNING: overrides will be added for %s!" % s
164 for pkg in new.keys():
165 if new[pkg].has_key("othercomponents"):
166 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
172 ################################################################################
174 def check_valid(new):
176 Check if section and priority for NEW packages exist in database.
177 Additionally does sanity checks:
178 - debian-installer packages have to be udeb (or source)
179 - non debian-installer packages can not be udeb
180 - source priority can only be assigned to dsc file types
183 @param new: Dict of new packages with their section, priority and type.
186 for pkg in new.keys():
187 section_name = new[pkg]["section"]
188 priority_name = new[pkg]["priority"]
189 file_type = new[pkg]["type"]
191 section = get_section(section_name)
193 new[pkg]["section id"] = -1
195 new[pkg]["section id"] = section.section_id
197 priority = get_priority(priority_name)
199 new[pkg]["priority id"] = -1
201 new[pkg]["priority id"] = priority.priority_id
204 di = section_name.find("debian-installer") != -1
206 # If d-i, we must be udeb and vice-versa
207 if (di and file_type not in ("udeb", "dsc")) or \
208 (not di and file_type == "udeb"):
209 new[pkg]["section id"] = -1
211 # If dsc we need to be source and vice-versa
212 if (priority == "source" and file_type != "dsc") or \
213 (priority != "source" and file_type == "dsc"):
214 new[pkg]["priority id"] = -1
216 ###############################################################################
218 def check_status(files):
220 for f in files.keys():
221 if files[f].has_key("byhand"):
223 elif files[f].has_key("new"):
227 ###############################################################################
229 # Used by Upload.check_timestamps
230 class TarTime(object):
231 def __init__(self, future_cutoff, past_cutoff):
233 self.future_cutoff = future_cutoff
234 self.past_cutoff = past_cutoff
237 self.future_files = {}
238 self.ancient_files = {}
240 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
241 if MTime > self.future_cutoff:
242 self.future_files[Name] = MTime
243 if MTime < self.past_cutoff:
244 self.ancient_files[Name] = MTime
246 ###############################################################################
248 class Upload(object):
250 Everything that has to do with an upload processed.
258 ###########################################################################
261 """ Reset a number of internal variables."""
263 # Initialize the substitution template map
266 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
267 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
268 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
269 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
277 def package_info(self):
279 Format various messages from this Upload to send to the maintainer.
283 ('Reject Reasons', self.rejects),
284 ('Warnings', self.warnings),
285 ('Notes', self.notes),
289 for title, messages in msgs:
291 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
296 ###########################################################################
297 def update_subst(self):
298 """ Set up the per-package template substitution mappings """
302 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
303 if not self.pkg.changes.has_key("architecture") or not \
304 isinstance(self.pkg.changes["architecture"], dict):
305 self.pkg.changes["architecture"] = { "Unknown" : "" }
307 # and maintainer2047 may not exist.
308 if not self.pkg.changes.has_key("maintainer2047"):
309 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
311 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
312 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
313 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
315 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
316 if self.pkg.changes["architecture"].has_key("source") and \
317 self.pkg.changes["changedby822"] != "" and \
318 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
320 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
321 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
322 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
324 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
325 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
326 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
328 if "sponsoremail" in self.pkg.changes:
329 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
331 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
332 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
334 # Apply any global override of the Maintainer field
335 if cnf.get("Dinstall::OverrideMaintainer"):
336 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
337 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
339 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
340 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
341 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
343 ###########################################################################
344 def load_changes(self, filename):
347 @rvalue: whether the changes file was valid or not. We may want to
348 reject even if this is True (see what gets put in self.rejects).
349 This is simply to prevent us even trying things later which will
350 fail because we couldn't properly parse the file.
353 self.pkg.changes_file = filename
355 # Parse the .changes field into a dictionary
357 self.pkg.changes.update(parse_changes(filename))
358 except CantOpenError:
359 self.rejects.append("%s: can't read file." % (filename))
361 except ParseChangesError, line:
362 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
364 except ChangesUnicodeError:
365 self.rejects.append("%s: changes file not proper utf-8" % (filename))
368 # Parse the Files field from the .changes into another dictionary
370 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
371 except ParseChangesError, line:
372 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
374 except UnknownFormatError, format:
375 self.rejects.append("%s: unknown format '%s'." % (filename, format))
378 # Check for mandatory fields
379 for i in ("distribution", "source", "binary", "architecture",
380 "version", "maintainer", "files", "changes", "description"):
381 if not self.pkg.changes.has_key(i):
382 # Avoid undefined errors later
383 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
386 # Strip a source version in brackets from the source field
387 if re_strip_srcver.search(self.pkg.changes["source"]):
388 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
390 # Ensure the source field is a valid package name.
391 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
392 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
394 # Split multi-value fields into a lower-level dictionary
395 for i in ("architecture", "distribution", "binary", "closes"):
396 o = self.pkg.changes.get(i, "")
398 del self.pkg.changes[i]
400 self.pkg.changes[i] = {}
403 self.pkg.changes[i][j] = 1
405 # Fix the Maintainer: field to be RFC822/2047 compatible
407 (self.pkg.changes["maintainer822"],
408 self.pkg.changes["maintainer2047"],
409 self.pkg.changes["maintainername"],
410 self.pkg.changes["maintaineremail"]) = \
411 fix_maintainer (self.pkg.changes["maintainer"])
412 except ParseMaintError, msg:
413 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
414 % (filename, self.pkg.changes["maintainer"], msg))
416 # ...likewise for the Changed-By: field if it exists.
418 (self.pkg.changes["changedby822"],
419 self.pkg.changes["changedby2047"],
420 self.pkg.changes["changedbyname"],
421 self.pkg.changes["changedbyemail"]) = \
422 fix_maintainer (self.pkg.changes.get("changed-by", ""))
423 except ParseMaintError, msg:
424 self.pkg.changes["changedby822"] = ""
425 self.pkg.changes["changedby2047"] = ""
426 self.pkg.changes["changedbyname"] = ""
427 self.pkg.changes["changedbyemail"] = ""
429 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
430 % (filename, self.pkg.changes["changed-by"], msg))
432 # Ensure all the values in Closes: are numbers
433 if self.pkg.changes.has_key("closes"):
434 for i in self.pkg.changes["closes"].keys():
435 if re_isanum.match (i) == None:
436 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
438 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
439 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
440 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
442 # Check the .changes is non-empty
443 if not self.pkg.files:
444 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
447 # Changes was syntactically valid even if we'll reject
450 ###########################################################################
452 def check_distributions(self):
453 "Check and map the Distribution field"
457 # Handle suite mappings
458 for m in Cnf.ValueList("SuiteMappings"):
461 if mtype == "map" or mtype == "silent-map":
462 (source, dest) = args[1:3]
463 if self.pkg.changes["distribution"].has_key(source):
464 del self.pkg.changes["distribution"][source]
465 self.pkg.changes["distribution"][dest] = 1
466 if mtype != "silent-map":
467 self.notes.append("Mapping %s to %s." % (source, dest))
468 if self.pkg.changes.has_key("distribution-version"):
469 if self.pkg.changes["distribution-version"].has_key(source):
470 self.pkg.changes["distribution-version"][source]=dest
471 elif mtype == "map-unreleased":
472 (source, dest) = args[1:3]
473 if self.pkg.changes["distribution"].has_key(source):
474 for arch in self.pkg.changes["architecture"].keys():
475 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
476 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
477 del self.pkg.changes["distribution"][source]
478 self.pkg.changes["distribution"][dest] = 1
480 elif mtype == "ignore":
482 if self.pkg.changes["distribution"].has_key(suite):
483 del self.pkg.changes["distribution"][suite]
484 self.warnings.append("Ignoring %s as a target suite." % (suite))
485 elif mtype == "reject":
487 if self.pkg.changes["distribution"].has_key(suite):
488 self.rejects.append("Uploads to %s are not accepted." % (suite))
489 elif mtype == "propup-version":
490 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
492 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
493 if self.pkg.changes["distribution"].has_key(args[1]):
494 self.pkg.changes.setdefault("distribution-version", {})
495 for suite in args[2:]:
496 self.pkg.changes["distribution-version"][suite] = suite
498 # Ensure there is (still) a target distribution
499 if len(self.pkg.changes["distribution"].keys()) < 1:
500 self.rejects.append("No valid distribution remaining.")
502 # Ensure target distributions exist
503 for suite in self.pkg.changes["distribution"].keys():
504 if not Cnf.has_key("Suite::%s" % (suite)):
505 self.rejects.append("Unknown distribution `%s'." % (suite))
507 ###########################################################################
509 def binary_file_checks(self, f, session):
511 entry = self.pkg.files[f]
513 # Extract package control information
514 deb_file = utils.open_file(f)
516 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
518 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
520 # Can't continue, none of the checks on control would work.
523 # Check for mandantory "Description:"
526 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
528 self.rejects.append("%s: Missing Description in binary package" % (f))
533 # Check for mandatory fields
534 for field in [ "Package", "Architecture", "Version" ]:
535 if control.Find(field) == None:
537 self.rejects.append("%s: No %s field in control." % (f, field))
540 # Ensure the package name matches the one give in the .changes
541 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
542 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
544 # Validate the package field
545 package = control.Find("Package")
546 if not re_valid_pkg_name.match(package):
547 self.rejects.append("%s: invalid package name '%s'." % (f, package))
549 # Validate the version field
550 version = control.Find("Version")
551 if not re_valid_version.match(version):
552 self.rejects.append("%s: invalid version number '%s'." % (f, version))
554 # Ensure the architecture of the .deb is one we know about.
555 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
556 architecture = control.Find("Architecture")
557 upload_suite = self.pkg.changes["distribution"].keys()[0]
559 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
560 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
561 self.rejects.append("Unknown architecture '%s'." % (architecture))
563 # Ensure the architecture of the .deb is one of the ones
564 # listed in the .changes.
565 if not self.pkg.changes["architecture"].has_key(architecture):
566 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
568 # Sanity-check the Depends field
569 depends = control.Find("Depends")
571 self.rejects.append("%s: Depends field is empty." % (f))
573 # Sanity-check the Provides field
574 provides = control.Find("Provides")
576 provide = re_spacestrip.sub('', provides)
578 self.rejects.append("%s: Provides field is empty." % (f))
579 prov_list = provide.split(",")
580 for prov in prov_list:
581 if not re_valid_pkg_name.match(prov):
582 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
584 # Check the section & priority match those given in the .changes (non-fatal)
585 if control.Find("Section") and entry["section"] != "" \
586 and entry["section"] != control.Find("Section"):
587 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
588 (f, control.Find("Section", ""), entry["section"]))
589 if control.Find("Priority") and entry["priority"] != "" \
590 and entry["priority"] != control.Find("Priority"):
591 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
592 (f, control.Find("Priority", ""), entry["priority"]))
594 entry["package"] = package
595 entry["architecture"] = architecture
596 entry["version"] = version
597 entry["maintainer"] = control.Find("Maintainer", "")
599 if f.endswith(".udeb"):
600 self.pkg.files[f]["dbtype"] = "udeb"
601 elif f.endswith(".deb"):
602 self.pkg.files[f]["dbtype"] = "deb"
604 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
606 entry["source"] = control.Find("Source", entry["package"])
608 # Get the source version
609 source = entry["source"]
612 if source.find("(") != -1:
613 m = re_extract_src_version.match(source)
615 source_version = m.group(2)
617 if not source_version:
618 source_version = self.pkg.files[f]["version"]
620 entry["source package"] = source
621 entry["source version"] = source_version
623 # Ensure the filename matches the contents of the .deb
624 m = re_isadeb.match(f)
627 file_package = m.group(1)
628 if entry["package"] != file_package:
629 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
630 (f, file_package, entry["dbtype"], entry["package"]))
631 epochless_version = re_no_epoch.sub('', control.Find("Version"))
634 file_version = m.group(2)
635 if epochless_version != file_version:
636 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
637 (f, file_version, entry["dbtype"], epochless_version))
640 file_architecture = m.group(3)
641 if entry["architecture"] != file_architecture:
642 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
643 (f, file_architecture, entry["dbtype"], entry["architecture"]))
645 # Check for existent source
646 source_version = entry["source version"]
647 source_package = entry["source package"]
648 if self.pkg.changes["architecture"].has_key("source"):
649 if source_version != self.pkg.changes["version"]:
650 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
651 (source_version, f, self.pkg.changes["version"]))
653 # Check in the SQL database
654 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
655 # Check in one of the other directories
656 source_epochless_version = re_no_epoch.sub('', source_version)
657 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
658 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
660 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
663 dsc_file_exists = False
664 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
665 if cnf.has_key("Dir::Queue::%s" % (myq)):
666 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
667 dsc_file_exists = True
670 if not dsc_file_exists:
671 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
673 # Check the version and for file overwrites
674 self.check_binary_against_db(f, session)
676 # Temporarily disable contents generation until we change the table storage layout
679 #if len(b.rejects) > 0:
680 # for j in b.rejects:
681 # self.rejects.append(j)
683 def source_file_checks(self, f, session):
684 entry = self.pkg.files[f]
686 m = re_issource.match(f)
690 entry["package"] = m.group(1)
691 entry["version"] = m.group(2)
692 entry["type"] = m.group(3)
694 # Ensure the source package name matches the Source filed in the .changes
695 if self.pkg.changes["source"] != entry["package"]:
696 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
698 # Ensure the source version matches the version in the .changes file
699 if re_is_orig_source.match(f):
700 changes_version = self.pkg.changes["chopversion2"]
702 changes_version = self.pkg.changes["chopversion"]
704 if changes_version != entry["version"]:
705 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
707 # Ensure the .changes lists source in the Architecture field
708 if not self.pkg.changes["architecture"].has_key("source"):
709 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
711 # Check the signature of a .dsc file
712 if entry["type"] == "dsc":
713 # check_signature returns either:
714 # (None, [list, of, rejects]) or (signature, [])
715 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
717 self.rejects.append(j)
719 entry["architecture"] = "source"
721 def per_suite_file_checks(self, f, suite, session):
723 entry = self.pkg.files[f]
726 if entry.has_key("byhand"):
729 # Check we have fields we need to do these checks
731 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
732 if not entry.has_key(m):
733 self.rejects.append("file '%s' does not have field %s set" % (f, m))
739 # Handle component mappings
740 for m in cnf.ValueList("ComponentMappings"):
741 (source, dest) = m.split()
742 if entry["component"] == source:
743 entry["original component"] = source
744 entry["component"] = dest
746 # Ensure the component is valid for the target suite
747 if cnf.has_key("Suite:%s::Components" % (suite)) and \
748 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
749 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
752 # Validate the component
753 if not get_component(entry["component"], session):
754 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
757 # See if the package is NEW
758 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
761 # Validate the priority
762 if entry["priority"].find('/') != -1:
763 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
765 # Determine the location
766 location = cnf["Dir::Pool"]
767 l = get_location(location, entry["component"], session=session)
769 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
770 entry["location id"] = -1
772 entry["location id"] = l.location_id
774 # Check the md5sum & size against existing files (if any)
775 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
777 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
778 entry["size"], entry["md5sum"], entry["location id"])
781 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
782 elif found is False and poolfile is not None:
783 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
786 entry["files id"] = None
788 entry["files id"] = poolfile.file_id
790 # Check for packages that have moved from one component to another
791 entry['suite'] = suite
792 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
794 entry["othercomponents"] = res.fetchone()[0]
796 def check_files(self, action=True):
797 file_keys = self.pkg.files.keys()
803 os.chdir(self.pkg.directory)
805 ret = holding.copy_to_holding(f)
807 # XXX: Should we bail out here or try and continue?
808 self.rejects.append(ret)
812 # check we already know the changes file
813 # [NB: this check must be done post-suite mapping]
814 base_filename = os.path.basename(self.pkg.changes_file)
816 session = DBConn().session()
819 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
820 # if in the pool or in a queue other than unchecked, reject
821 if (dbc.in_queue is None) \
822 or (dbc.in_queue is not None
823 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
824 self.rejects.append("%s file already known to dak" % base_filename)
825 except NoResultFound, e:
832 for f, entry in self.pkg.files.items():
833 # Ensure the file does not already exist in one of the accepted directories
834 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
835 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
836 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
837 self.rejects.append("%s file already exists in the %s directory." % (f, d))
839 if not re_taint_free.match(f):
840 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
842 # Check the file is readable
843 if os.access(f, os.R_OK) == 0:
844 # When running in -n, copy_to_holding() won't have
845 # generated the reject_message, so we need to.
847 if os.path.exists(f):
848 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
850 self.rejects.append("Can't read `%s'. [file not found]" % (f))
851 entry["type"] = "unreadable"
854 # If it's byhand skip remaining checks
855 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
857 entry["type"] = "byhand"
859 # Checks for a binary package...
860 elif re_isadeb.match(f):
862 entry["type"] = "deb"
864 # This routine appends to self.rejects/warnings as appropriate
865 self.binary_file_checks(f, session)
867 # Checks for a source package...
868 elif re_issource.match(f):
871 # This routine appends to self.rejects/warnings as appropriate
872 self.source_file_checks(f, session)
874 # Not a binary or source package? Assume byhand...
877 entry["type"] = "byhand"
879 # Per-suite file checks
880 entry["oldfiles"] = {}
881 for suite in self.pkg.changes["distribution"].keys():
882 self.per_suite_file_checks(f, suite, session)
886 # If the .changes file says it has source, it must have source.
887 if self.pkg.changes["architecture"].has_key("source"):
889 self.rejects.append("no source found and Architecture line in changes mention source.")
891 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
892 self.rejects.append("source only uploads are not supported.")
894 ###########################################################################
895 def check_dsc(self, action=True, session=None):
896 """Returns bool indicating whether or not the source changes are valid"""
897 # Ensure there is source to check
898 if not self.pkg.changes["architecture"].has_key("source"):
903 for f, entry in self.pkg.files.items():
904 if entry["type"] == "dsc":
906 self.rejects.append("can not process a .changes file with multiple .dsc's.")
911 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
913 self.rejects.append("source uploads must contain a dsc file")
916 # Parse the .dsc file
918 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
919 except CantOpenError:
920 # if not -n copy_to_holding() will have done this for us...
922 self.rejects.append("%s: can't read file." % (dsc_filename))
923 except ParseChangesError, line:
924 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
925 except InvalidDscError, line:
926 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
927 except ChangesUnicodeError:
928 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
930 # Build up the file list of files mentioned by the .dsc
932 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
933 except NoFilesFieldError:
934 self.rejects.append("%s: no Files: field." % (dsc_filename))
936 except UnknownFormatError, format:
937 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
939 except ParseChangesError, line:
940 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
943 # Enforce mandatory fields
944 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
945 if not self.pkg.dsc.has_key(i):
946 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
949 # Validate the source and version fields
950 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
951 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
952 if not re_valid_version.match(self.pkg.dsc["version"]):
953 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
955 # Only a limited list of source formats are allowed in each suite
956 for dist in self.pkg.changes["distribution"].keys():
957 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
958 if self.pkg.dsc["format"] not in allowed:
959 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
961 # Validate the Maintainer field
963 # We ignore the return value
964 fix_maintainer(self.pkg.dsc["maintainer"])
965 except ParseMaintError, msg:
966 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
967 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
969 # Validate the build-depends field(s)
970 for field_name in [ "build-depends", "build-depends-indep" ]:
971 field = self.pkg.dsc.get(field_name)
973 # Have apt try to parse them...
975 apt_pkg.ParseSrcDepends(field)
977 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
979 # Ensure the version number in the .dsc matches the version number in the .changes
980 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
981 changes_version = self.pkg.files[dsc_filename]["version"]
983 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
984 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
986 # Ensure the Files field contain only what's expected
987 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
989 # Ensure source is newer than existing source in target suites
990 session = DBConn().session()
991 self.check_source_against_db(dsc_filename, session)
992 self.check_dsc_against_db(dsc_filename, session)
997 ###########################################################################
999 def get_changelog_versions(self, source_dir):
1000 """Extracts a the source package and (optionally) grabs the
1001 version history out of debian/changelog for the BTS."""
1005 # Find the .dsc (again)
1007 for f in self.pkg.files.keys():
1008 if self.pkg.files[f]["type"] == "dsc":
1011 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1012 if not dsc_filename:
1015 # Create a symlink mirror of the source files in our temporary directory
1016 for f in self.pkg.files.keys():
1017 m = re_issource.match(f)
1019 src = os.path.join(source_dir, f)
1020 # If a file is missing for whatever reason, give up.
1021 if not os.path.exists(src):
1024 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1025 self.pkg.orig_files[f].has_key("path"):
1027 dest = os.path.join(os.getcwd(), f)
1028 os.symlink(src, dest)
1030 # If the orig files are not a part of the upload, create symlinks to the
1032 for orig_file in self.pkg.orig_files.keys():
1033 if not self.pkg.orig_files[orig_file].has_key("path"):
1035 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1036 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1038 # Extract the source
1039 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1040 (result, output) = commands.getstatusoutput(cmd)
1042 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1043 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1046 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1049 # Get the upstream version
1050 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1051 if re_strip_revision.search(upstr_version):
1052 upstr_version = re_strip_revision.sub('', upstr_version)
1054 # Ensure the changelog file exists
1055 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1056 if not os.path.exists(changelog_filename):
1057 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1060 # Parse the changelog
1061 self.pkg.dsc["bts changelog"] = ""
1062 changelog_file = utils.open_file(changelog_filename)
1063 for line in changelog_file.readlines():
1064 m = re_changelog_versions.match(line)
1066 self.pkg.dsc["bts changelog"] += line
1067 changelog_file.close()
1069 # Check we found at least one revision in the changelog
1070 if not self.pkg.dsc["bts changelog"]:
1071 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1073 def check_source(self):
1075 # a) there's no source
1076 # or c) the orig files are MIA
1077 if not self.pkg.changes["architecture"].has_key("source") \
1078 or len(self.pkg.orig_files) == 0:
1081 tmpdir = utils.temp_dirname()
1083 # Move into the temporary directory
1087 # Get the changelog version history
1088 self.get_changelog_versions(cwd)
1090 # Move back and cleanup the temporary tree
1094 shutil.rmtree(tmpdir)
1096 if e.errno != errno.EACCES:
1098 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1100 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1101 # We probably have u-r or u-w directories so chmod everything
1103 cmd = "chmod -R u+rwx %s" % (tmpdir)
1104 result = os.system(cmd)
1106 utils.fubar("'%s' failed with result %s." % (cmd, result))
1107 shutil.rmtree(tmpdir)
1108 except Exception, e:
1109 print "foobar2 (%s)" % e
1110 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1112 ###########################################################################
1113 def ensure_hashes(self):
1114 # Make sure we recognise the format of the Files: field in the .changes
1115 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1116 if len(format) == 2:
1117 format = int(format[0]), int(format[1])
1119 format = int(float(format[0])), 0
1121 # We need to deal with the original changes blob, as the fields we need
1122 # might not be in the changes dict serialised into the .dak anymore.
1123 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1125 # Copy the checksums over to the current changes dict. This will keep
1126 # the existing modifications to it intact.
1127 for field in orig_changes:
1128 if field.startswith('checksums-'):
1129 self.pkg.changes[field] = orig_changes[field]
1131 # Check for unsupported hashes
1132 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1133 self.rejects.append(j)
1135 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1136 self.rejects.append(j)
1138 # We have to calculate the hash if we have an earlier changes version than
1139 # the hash appears in rather than require it exist in the changes file
1140 for hashname, hashfunc, version in utils.known_hashes:
1141 # TODO: Move _ensure_changes_hash into this class
1142 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1143 self.rejects.append(j)
1144 if "source" in self.pkg.changes["architecture"]:
1145 # TODO: Move _ensure_dsc_hash into this class
1146 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1147 self.rejects.append(j)
1149 def check_hashes(self):
1150 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1151 self.rejects.append(m)
1153 for m in utils.check_size(".changes", self.pkg.files):
1154 self.rejects.append(m)
1156 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1157 self.rejects.append(m)
1159 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1160 self.rejects.append(m)
1162 self.ensure_hashes()
1164 ###########################################################################
1166 def ensure_orig(self, target_dir='.', session=None):
1168 Ensures that all orig files mentioned in the changes file are present
1169 in target_dir. If they do not exist, they are symlinked into place.
1171 An list containing the symlinks that were created are returned (so they
1178 for filename, entry in self.pkg.dsc_files.iteritems():
1179 if not re_is_orig_source.match(filename):
1180 # File is not an orig; ignore
1183 if os.path.exists(filename):
1184 # File exists, no need to continue
1187 def symlink_if_valid(path):
1188 f = utils.open_file(path)
1189 md5sum = apt_pkg.md5sum(f)
1192 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1193 expected = (int(entry['size']), entry['md5sum'])
1195 if fingerprint != expected:
1198 dest = os.path.join(target_dir, filename)
1200 os.symlink(path, dest)
1201 symlinked.append(dest)
1207 session_ = DBConn().session()
1212 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1213 poolfile_path = os.path.join(
1214 poolfile.location.path, poolfile.filename
1217 if symlink_if_valid(poolfile_path):
1227 # Look in some other queues for the file
1228 queues = ('New', 'Byhand', 'ProposedUpdates',
1229 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1231 for queue in queues:
1232 if not cnf.get('Dir::Queue::%s' % queue):
1235 queuefile_path = os.path.join(
1236 cnf['Dir::Queue::%s' % queue], filename
1239 if not os.path.exists(queuefile_path):
1240 # Does not exist in this queue
1243 if symlink_if_valid(queuefile_path):
1248 ###########################################################################
1250 def check_lintian(self):
1252 Extends self.rejects by checking the output of lintian against tags
1253 specified in Dinstall::LintianTags.
1258 # Don't reject binary uploads
1259 if not self.pkg.changes['architecture'].has_key('source'):
1262 # Only check some distributions
1263 for dist in ('unstable', 'experimental'):
1264 if dist in self.pkg.changes['distribution']:
1269 # If we do not have a tagfile, don't do anything
1270 tagfile = cnf.get("Dinstall::LintianTags")
1274 # Parse the yaml file
1275 sourcefile = file(tagfile, 'r')
1276 sourcecontent = sourcefile.read()
1280 lintiantags = yaml.load(sourcecontent)['lintian']
1281 except yaml.YAMLError, msg:
1282 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1285 # Try and find all orig mentioned in the .dsc
1286 symlinked = self.ensure_orig()
1288 # Setup the input file for lintian
1289 fd, temp_filename = utils.temp_filename()
1290 temptagfile = os.fdopen(fd, 'w')
1291 for tags in lintiantags.values():
1292 temptagfile.writelines(['%s\n' % x for x in tags])
1296 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1297 (temp_filename, self.pkg.changes_file)
1299 result, output = commands.getstatusoutput(cmd)
1301 # Remove our tempfile and any symlinks we created
1302 os.unlink(temp_filename)
1304 for symlink in symlinked:
1308 utils.warn("lintian failed for %s [return code: %s]." % \
1309 (self.pkg.changes_file, result))
1310 utils.warn(utils.prefix_multi_line_string(output, \
1311 " [possible output:] "))
1316 [self.pkg.changes_file, "check_lintian"] + list(txt)
1320 parsed_tags = parse_lintian_output(output)
1321 self.rejects.extend(
1322 generate_reject_messages(parsed_tags, lintiantags, log=log)
1325 ###########################################################################
1326 def check_urgency(self):
1328 if self.pkg.changes["architecture"].has_key("source"):
1329 if not self.pkg.changes.has_key("urgency"):
1330 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1331 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1332 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1333 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1334 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1335 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1337 ###########################################################################
1339 # Sanity check the time stamps of files inside debs.
1340 # [Files in the near future cause ugly warnings and extreme time
1341 # travel can cause errors on extraction]
1343 def check_timestamps(self):
1346 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1347 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1348 tar = TarTime(future_cutoff, past_cutoff)
1350 for filename, entry in self.pkg.files.items():
1351 if entry["type"] == "deb":
1354 deb_file = utils.open_file(filename)
1355 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1358 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1359 except SystemError, e:
1360 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1361 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1364 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1368 future_files = tar.future_files.keys()
1370 num_future_files = len(future_files)
1371 future_file = future_files[0]
1372 future_date = tar.future_files[future_file]
1373 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1374 % (filename, num_future_files, future_file, time.ctime(future_date)))
1376 ancient_files = tar.ancient_files.keys()
1378 num_ancient_files = len(ancient_files)
1379 ancient_file = ancient_files[0]
1380 ancient_date = tar.ancient_files[ancient_file]
1381 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1382 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1384 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1386 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1387 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1389 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1395 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1396 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1397 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1398 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1399 self.pkg.changes["sponsoremail"] = uid_email
1404 ###########################################################################
1405 # check_signed_by_key checks
1406 ###########################################################################
1408 def check_signed_by_key(self):
1409 """Ensure the .changes is signed by an authorized uploader."""
1410 session = DBConn().session()
1412 # First of all we check that the person has proper upload permissions
1413 # and that this upload isn't blocked
1414 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1417 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1420 # TODO: Check that import-keyring adds UIDs properly
1422 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1425 # Check that the fingerprint which uploaded has permission to do so
1426 self.check_upload_permissions(fpr, session)
1428 # Check that this package is not in a transition
1429 self.check_transition(session)
1434 def check_upload_permissions(self, fpr, session):
1435 # Check any one-off upload blocks
1436 self.check_upload_blocks(fpr, session)
1438 # Start with DM as a special case
1439 # DM is a special case unfortunately, so we check it first
1440 # (keys with no source access get more access than DMs in one
1441 # way; DMs can only upload for their packages whether source
1442 # or binary, whereas keys with no access might be able to
1443 # upload some binaries)
1444 if fpr.source_acl.access_level == 'dm':
1445 self.check_dm_upload(fpr, session)
1447 # Check source-based permissions for other types
1448 if self.pkg.changes["architecture"].has_key("source") and \
1449 fpr.source_acl.access_level is None:
1450 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1451 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1452 self.rejects.append(rej)
1454 # If not a DM, we allow full upload rights
1455 uid_email = "%s@debian.org" % (fpr.uid.uid)
1456 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1459 # Check binary upload permissions
1460 # By this point we know that DMs can't have got here unless they
1461 # are allowed to deal with the package concerned so just apply
1463 if fpr.binary_acl.access_level == 'full':
1466 # Otherwise we're in the map case
1467 tmparches = self.pkg.changes["architecture"].copy()
1468 tmparches.pop('source', None)
1470 for bam in fpr.binary_acl_map:
1471 tmparches.pop(bam.architecture.arch_string, None)
1473 if len(tmparches.keys()) > 0:
1474 if fpr.binary_reject:
1475 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1476 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1477 self.rejects.append(rej)
1479 # TODO: This is where we'll implement reject vs throw away binaries later
1480 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1481 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1482 rej += "\nFingerprint: %s", (fpr.fingerprint)
1483 self.rejects.append(rej)
1486 def check_upload_blocks(self, fpr, session):
1487 """Check whether any upload blocks apply to this source, source
1488 version, uid / fpr combination"""
1490 def block_rej_template(fb):
1491 rej = 'Manual upload block in place for package %s' % fb.source
1492 if fb.version is not None:
1493 rej += ', version %s' % fb.version
1496 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1497 # version is None if the block applies to all versions
1498 if fb.version is None or fb.version == self.pkg.changes['version']:
1499 # Check both fpr and uid - either is enough to cause a reject
1500 if fb.fpr is not None:
1501 if fb.fpr.fingerprint == fpr.fingerprint:
1502 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1503 if fb.uid is not None:
1504 if fb.uid == fpr.uid:
1505 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1508 def check_dm_upload(self, fpr, session):
1509 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1510 ## none of the uploaded packages are NEW
1512 for f in self.pkg.files.keys():
1513 if self.pkg.files[f].has_key("byhand"):
1514 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1516 if self.pkg.files[f].has_key("new"):
1517 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1523 ## the most recent version of the package uploaded to unstable or
1524 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1525 ## section of its control file
1526 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1527 q = q.join(SrcAssociation)
1528 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1529 q = q.order_by(desc('source.version')).limit(1)
1534 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1535 self.rejects.append(rej)
1539 if not r.dm_upload_allowed:
1540 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1541 self.rejects.append(rej)
1544 ## the Maintainer: field of the uploaded .changes file corresponds with
1545 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1547 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1548 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1550 ## the most recent version of the package uploaded to unstable or
1551 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1552 ## non-developer maintainers cannot NMU or hijack packages)
1554 # srcuploaders includes the maintainer
1556 for sup in r.srcuploaders:
1557 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1558 # Eww - I hope we never have two people with the same name in Debian
1559 if email == fpr.uid.uid or name == fpr.uid.name:
1564 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1567 ## none of the packages are being taken over from other source packages
1568 for b in self.pkg.changes["binary"].keys():
1569 for suite in self.pkg.changes["distribution"].keys():
1570 q = session.query(DBSource)
1571 q = q.join(DBBinary).filter_by(package=b)
1572 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1575 if s.source != self.pkg.changes["source"]:
1576 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1580 def check_transition(self, session):
1583 sourcepkg = self.pkg.changes["source"]
1585 # No sourceful upload -> no need to do anything else, direct return
1586 # We also work with unstable uploads, not experimental or those going to some
1587 # proposed-updates queue
1588 if "source" not in self.pkg.changes["architecture"] or \
1589 "unstable" not in self.pkg.changes["distribution"]:
1592 # Also only check if there is a file defined (and existant) with
1594 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1595 if transpath == "" or not os.path.exists(transpath):
1598 # Parse the yaml file
1599 sourcefile = file(transpath, 'r')
1600 sourcecontent = sourcefile.read()
1602 transitions = yaml.load(sourcecontent)
1603 except yaml.YAMLError, msg:
1604 # This shouldn't happen, there is a wrapper to edit the file which
1605 # checks it, but we prefer to be safe than ending up rejecting
1607 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1610 # Now look through all defined transitions
1611 for trans in transitions:
1612 t = transitions[trans]
1613 source = t["source"]
1616 # Will be None if nothing is in testing.
1617 current = get_source_in_suite(source, "testing", session)
1618 if current is not None:
1619 compare = apt_pkg.VersionCompare(current.version, expected)
1621 if current is None or compare < 0:
1622 # This is still valid, the current version in testing is older than
1623 # the new version we wait for, or there is none in testing yet
1625 # Check if the source we look at is affected by this.
1626 if sourcepkg in t['packages']:
1627 # The source is affected, lets reject it.
1629 rejectmsg = "%s: part of the %s transition.\n\n" % (
1632 if current is not None:
1633 currentlymsg = "at version %s" % (current.version)
1635 currentlymsg = "not present in testing"
1637 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1639 rejectmsg += "\n".join(textwrap.wrap("""Your package
1640 is part of a testing transition designed to get %s migrated (it is
1641 currently %s, we need version %s). This transition is managed by the
1642 Release Team, and %s is the Release-Team member responsible for it.
1643 Please mail debian-release@lists.debian.org or contact %s directly if you
1644 need further assistance. You might want to upload to experimental until this
1645 transition is done."""
1646 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1648 self.rejects.append(rejectmsg)
1651 ###########################################################################
1652 # End check_signed_by_key checks
1653 ###########################################################################
1655 def build_summaries(self):
1656 """ Build a summary of changes the upload introduces. """
1658 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1660 short_summary = summary
1662 # This is for direport's benefit...
1663 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1666 summary += "Changes: " + f
1668 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1670 summary += self.announce(short_summary, 0)
1672 return (summary, short_summary)
1674 ###########################################################################
1676 def close_bugs(self, summary, action):
1678 Send mail to close bugs as instructed by the closes field in the changes file.
1679 Also add a line to summary if any work was done.
1681 @type summary: string
1682 @param summary: summary text, as given by L{build_summaries}
1685 @param action: Set to false no real action will be done.
1688 @return: summary. If action was taken, extended by the list of closed bugs.
1692 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1694 bugs = self.pkg.changes["closes"].keys()
1700 summary += "Closing bugs: "
1702 summary += "%s " % (bug)
1705 self.Subst["__BUG_NUMBER__"] = bug
1706 if self.pkg.changes["distribution"].has_key("stable"):
1707 self.Subst["__STABLE_WARNING__"] = """
1708 Note that this package is not part of the released stable Debian
1709 distribution. It may have dependencies on other unreleased software,
1710 or other instabilities. Please take care if you wish to install it.
1711 The update will eventually make its way into the next released Debian
1714 self.Subst["__STABLE_WARNING__"] = ""
1715 mail_message = utils.TemplateSubst(self.Subst, template)
1716 utils.send_mail(mail_message)
1718 # Clear up after ourselves
1719 del self.Subst["__BUG_NUMBER__"]
1720 del self.Subst["__STABLE_WARNING__"]
1722 if action and self.logger:
1723 self.logger.log(["closing bugs"] + bugs)
1729 ###########################################################################
1731 def announce(self, short_summary, action):
1733 Send an announce mail about a new upload.
1735 @type short_summary: string
1736 @param short_summary: Short summary text to include in the mail
1739 @param action: Set to false no real action will be done.
1742 @return: Textstring about action taken.
1747 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1749 # Only do announcements for source uploads with a recent dpkg-dev installed
1750 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1751 self.pkg.changes["architecture"].has_key("source"):
1757 self.Subst["__SHORT_SUMMARY__"] = short_summary
1759 for dist in self.pkg.changes["distribution"].keys():
1760 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1761 if announce_list == "" or lists_done.has_key(announce_list):
1764 lists_done[announce_list] = 1
1765 summary += "Announcing to %s\n" % (announce_list)
1769 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1770 if cnf.get("Dinstall::TrackingServer") and \
1771 self.pkg.changes["architecture"].has_key("source"):
1772 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1773 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1775 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1776 utils.send_mail(mail_message)
1778 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1780 if cnf.FindB("Dinstall::CloseBugs"):
1781 summary = self.close_bugs(summary, action)
1783 del self.Subst["__SHORT_SUMMARY__"]
1787 ###########################################################################
1789 def accept (self, summary, short_summary, session=None):
1793 This moves all files referenced from the .changes into the pool,
1794 sends the accepted mail, announces to lists, closes bugs and
1795 also checks for override disparities. If enabled it will write out
1796 the version history for the BTS Version Tracking and will finally call
1799 @type summary: string
1800 @param summary: Summary text
1802 @type short_summary: string
1803 @param short_summary: Short summary
1807 stats = SummaryStats()
1810 self.logger.log(["installing changes", self.pkg.changes_file])
1814 # Add the .dsc file to the DB first
1815 for newfile, entry in self.pkg.files.items():
1816 if entry["type"] == "dsc":
1817 dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1821 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1822 for newfile, entry in self.pkg.files.items():
1823 if entry["type"] == "deb":
1824 poolfiles.append(add_deb_to_db(self, newfile, session))
1826 # If this is a sourceful diff only upload that is moving
1827 # cross-component we need to copy the .orig files into the new
1828 # component too for the same reasons as above.
1829 if self.pkg.changes["architecture"].has_key("source"):
1830 for orig_file in self.pkg.orig_files.keys():
1831 if not self.pkg.orig_files[orig_file].has_key("id"):
1832 continue # Skip if it's not in the pool
1833 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1834 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1835 continue # Skip if the location didn't change
1838 oldf = get_poolfile_by_id(orig_file_id, session)
1839 old_filename = os.path.join(oldf.location.path, oldf.filename)
1840 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1841 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1843 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1845 # TODO: Care about size/md5sum collisions etc
1846 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
1849 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1850 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1852 # TODO: Check that there's only 1 here
1853 source = get_sources_from_name(self.pkg.changes["source"], self.pkg.changes["version"])[0]
1854 dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
1855 dscf.poolfile_id = newf.file_id
1859 poolfiles.append(newf)
1861 # Install the files into the pool
1862 for newfile, entry in self.pkg.files.items():
1863 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1864 utils.move(newfile, destination)
1865 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1866 stats.accept_bytes += float(entry["size"])
1868 # Copy the .changes file across for suite which need it.
1870 for suite_name in self.pkg.changes["distribution"].keys():
1871 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1872 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1874 for dest in copy_changes.keys():
1875 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1877 # We're done - commit the database changes
1879 # Our SQL session will automatically start a new transaction after
1882 # Move the .changes into the 'done' directory
1883 utils.move(self.pkg.changes_file,
1884 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1886 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1887 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1889 # Send accept mail, announce to lists, close bugs and check for
1890 # override disparities
1891 if not cnf["Dinstall::Options::No-Mail"]:
1893 self.Subst["__SUITE__"] = ""
1894 self.Subst["__SUMMARY__"] = summary
1895 mail_message = utils.TemplateSubst(self.Subst,
1896 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1897 utils.send_mail(mail_message)
1898 self.announce(short_summary, 1)
1900 ## Helper stuff for DebBugs Version Tracking
1901 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1902 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1903 # the conditionalization on dsc["bts changelog"] should be
1906 # Write out the version history from the changelog
1907 if self.pkg.changes["architecture"].has_key("source") and \
1908 self.pkg.dsc.has_key("bts changelog"):
1910 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1911 version_history = os.fdopen(fd, 'w')
1912 version_history.write(self.pkg.dsc["bts changelog"])
1913 version_history.close()
1914 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1915 self.pkg.changes_file[:-8]+".versions")
1916 os.rename(temp_filename, filename)
1917 os.chmod(filename, 0644)
1919 # Write out the binary -> source mapping.
1920 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1921 debinfo = os.fdopen(fd, 'w')
1922 for name, entry in sorted(self.pkg.files.items()):
1923 if entry["type"] == "deb":
1924 line = " ".join([entry["package"], entry["version"],
1925 entry["architecture"], entry["source package"],
1926 entry["source version"]])
1927 debinfo.write(line+"\n")
1929 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1930 self.pkg.changes_file[:-8]+".debinfo")
1931 os.rename(temp_filename, filename)
1932 os.chmod(filename, 0644)
1936 # Set up our copy queues (e.g. buildd queues)
1937 for suite_name in self.pkg.changes["distribution"].keys():
1938 suite = get_suite(suite_name, session)
1939 for q in suite.copy_queues:
1941 q.add_file_from_pool(f)
1946 stats.accept_count += 1
1948 def check_override(self):
1950 Checks override entries for validity. Mails "Override disparity" warnings,
1951 if that feature is enabled.
1953 Abandons the check if
1954 - override disparity checks are disabled
1955 - mail sending is disabled
1960 # Abandon the check if:
1961 # a) override disparity checks have been disabled
1962 # b) we're not sending mail
1963 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1964 cnf["Dinstall::Options::No-Mail"]:
1967 summary = self.pkg.check_override()
1972 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1975 self.Subst["__SUMMARY__"] = summary
1976 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1977 utils.send_mail(mail_message)
1978 del self.Subst["__SUMMARY__"]
1980 ###########################################################################
1982 def remove(self, from_dir=None):
1984 Used (for instance) in p-u to remove the package from unchecked
1986 Also removes the package from holding area.
1988 if from_dir is None:
1989 from_dir = self.pkg.directory
1992 for f in self.pkg.files.keys():
1993 os.unlink(os.path.join(from_dir, f))
1994 if os.path.exists(os.path.join(h.holding_dir, f)):
1995 os.unlink(os.path.join(h.holding_dir, f))
1997 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
1998 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
1999 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2001 ###########################################################################
2003 def move_to_queue (self, queue):
2005 Move files to a destination queue using the permissions in the table
2008 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2009 queue.path, perms=int(queue.change_perms, 8))
2010 for f in self.pkg.files.keys():
2011 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2013 ###########################################################################
2015 def force_reject(self, reject_files):
2017 Forcefully move files from the current directory to the
2018 reject directory. If any file already exists in the reject
2019 directory it will be moved to the morgue to make way for
2023 @param files: file dictionary
2029 for file_entry in reject_files:
2030 # Skip any files which don't exist or which we don't have permission to copy.
2031 if os.access(file_entry, os.R_OK) == 0:
2034 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2037 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2039 # File exists? Let's try and move it to the morgue
2040 if e.errno == errno.EEXIST:
2041 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2043 morgue_file = utils.find_next_free(morgue_file)
2044 except NoFreeFilenameError:
2045 # Something's either gone badly Pete Tong, or
2046 # someone is trying to exploit us.
2047 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2049 utils.move(dest_file, morgue_file, perms=0660)
2051 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2054 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2058 # If we got here, we own the destination file, so we can
2059 # safely overwrite it.
2060 utils.move(file_entry, dest_file, 1, perms=0660)
2063 ###########################################################################
2064 def do_reject (self, manual=0, reject_message="", notes=""):
2066 Reject an upload. If called without a reject message or C{manual} is
2067 true, spawn an editor so the user can write one.
2070 @param manual: manual or automated rejection
2072 @type reject_message: string
2073 @param reject_message: A reject message
2078 # If we weren't given a manual rejection message, spawn an
2079 # editor so the user can add one in...
2080 if manual and not reject_message:
2081 (fd, temp_filename) = utils.temp_filename()
2082 temp_file = os.fdopen(fd, 'w')
2085 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2086 % (note.author, note.version, note.notedate, note.comment))
2088 editor = os.environ.get("EDITOR","vi")
2090 while answer == 'E':
2091 os.system("%s %s" % (editor, temp_filename))
2092 temp_fh = utils.open_file(temp_filename)
2093 reject_message = "".join(temp_fh.readlines())
2095 print "Reject message:"
2096 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2097 prompt = "[R]eject, Edit, Abandon, Quit ?"
2099 while prompt.find(answer) == -1:
2100 answer = utils.our_raw_input(prompt)
2101 m = re_default_answer.search(prompt)
2104 answer = answer[:1].upper()
2105 os.unlink(temp_filename)
2111 print "Rejecting.\n"
2115 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2116 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2118 # Move all the files into the reject directory
2119 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2120 self.force_reject(reject_files)
2122 # If we fail here someone is probably trying to exploit the race
2123 # so let's just raise an exception ...
2124 if os.path.exists(reason_filename):
2125 os.unlink(reason_filename)
2126 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2128 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2132 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2133 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2134 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2135 os.write(reason_fd, reject_message)
2136 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2138 # Build up the rejection email
2139 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2140 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2141 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2142 self.Subst["__REJECT_MESSAGE__"] = ""
2143 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2144 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2145 # Write the rejection email out as the <foo>.reason file
2146 os.write(reason_fd, reject_mail_message)
2148 del self.Subst["__REJECTOR_ADDRESS__"]
2149 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2150 del self.Subst["__CC__"]
2154 # Send the rejection mail if appropriate
2155 if not cnf["Dinstall::Options::No-Mail"]:
2156 utils.send_mail(reject_mail_message)
2159 self.logger.log(["rejected", self.pkg.changes_file])
2163 ################################################################################
2164 def in_override_p(self, package, component, suite, binary_type, filename, session):
2166 Check if a package already has override entries in the DB
2168 @type package: string
2169 @param package: package name
2171 @type component: string
2172 @param component: database id of the component
2175 @param suite: database id of the suite
2177 @type binary_type: string
2178 @param binary_type: type of the package
2180 @type filename: string
2181 @param filename: filename we check
2183 @return: the database result. But noone cares anyway.
2189 if binary_type == "": # must be source
2192 file_type = binary_type
2194 # Override suite name; used for example with proposed-updates
2195 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2196 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2198 result = get_override(package, suite, component, file_type, session)
2200 # If checking for a source package fall back on the binary override type
2201 if file_type == "dsc" and len(result) < 1:
2202 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2204 # Remember the section and priority so we can check them later if appropriate
2207 self.pkg.files[filename]["override section"] = result.section.section
2208 self.pkg.files[filename]["override priority"] = result.priority.priority
2213 ################################################################################
2214 def get_anyversion(self, sv_list, suite):
2217 @param sv_list: list of (suite, version) tuples to check
2220 @param suite: suite name
2226 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2227 for (s, v) in sv_list:
2228 if s in [ x.lower() for x in anysuite ]:
2229 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2234 ################################################################################
2236 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2239 @param sv_list: list of (suite, version) tuples to check
2241 @type filename: string
2242 @param filename: XXX
2244 @type new_version: string
2245 @param new_version: XXX
2247 Ensure versions are newer than existing packages in target
2248 suites and that cross-suite version checking rules as
2249 set out in the conf file are satisfied.
2254 # Check versions for each target suite
2255 for target_suite in self.pkg.changes["distribution"].keys():
2256 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2257 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2259 # Enforce "must be newer than target suite" even if conffile omits it
2260 if target_suite not in must_be_newer_than:
2261 must_be_newer_than.append(target_suite)
2263 for (suite, existent_version) in sv_list:
2264 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2266 if suite in must_be_newer_than and sourceful and vercmp < 1:
2267 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2269 if suite in must_be_older_than and vercmp > -1:
2272 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2273 # we really use the other suite, ignoring the conflicting one ...
2274 addsuite = self.pkg.changes["distribution-version"][suite]
2276 add_version = self.get_anyversion(sv_list, addsuite)
2277 target_version = self.get_anyversion(sv_list, target_suite)
2280 # not add_version can only happen if we map to a suite
2281 # that doesn't enhance the suite we're propup'ing from.
2282 # so "propup-ver x a b c; map a d" is a problem only if
2283 # d doesn't enhance a.
2285 # i think we could always propagate in this case, rather
2286 # than complaining. either way, this isn't a REJECT issue
2288 # And - we really should complain to the dorks who configured dak
2289 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2290 self.pkg.changes.setdefault("propdistribution", {})
2291 self.pkg.changes["propdistribution"][addsuite] = 1
2293 elif not target_version:
2294 # not targets_version is true when the package is NEW
2295 # we could just stick with the "...old version..." REJECT
2296 # for this, I think.
2297 self.rejects.append("Won't propogate NEW packages.")
2298 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2299 # propogation would be redundant. no need to reject though.
2300 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2302 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2303 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2305 self.warnings.append("Propogating upload to %s" % (addsuite))
2306 self.pkg.changes.setdefault("propdistribution", {})
2307 self.pkg.changes["propdistribution"][addsuite] = 1
2311 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2313 ################################################################################
2314 def check_binary_against_db(self, filename, session):
2315 # Ensure version is sane
2316 q = session.query(BinAssociation)
2317 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2318 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2320 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2321 filename, self.pkg.files[filename]["version"], sourceful=False)
2323 # Check for any existing copies of the file
2324 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2325 q = q.filter_by(version=self.pkg.files[filename]["version"])
2326 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2329 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2331 ################################################################################
2333 def check_source_against_db(self, filename, session):
2334 source = self.pkg.dsc.get("source")
2335 version = self.pkg.dsc.get("version")
2337 # Ensure version is sane
2338 q = session.query(SrcAssociation)
2339 q = q.join(DBSource).filter(DBSource.source==source)
2341 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2342 filename, version, sourceful=True)
2344 ################################################################################
2345 def check_dsc_against_db(self, filename, session):
2348 @warning: NB: this function can remove entries from the 'files' index [if
2349 the orig tarball is a duplicate of the one in the archive]; if
2350 you're iterating over 'files' and call this function as part of
2351 the loop, be sure to add a check to the top of the loop to
2352 ensure you haven't just tried to dereference the deleted entry.
2357 self.pkg.orig_files = {} # XXX: do we need to clear it?
2358 orig_files = self.pkg.orig_files
2360 # Try and find all files mentioned in the .dsc. This has
2361 # to work harder to cope with the multiple possible
2362 # locations of an .orig.tar.gz.
2363 # The ordering on the select is needed to pick the newest orig
2364 # when it exists in multiple places.
2365 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2367 if self.pkg.files.has_key(dsc_name):
2368 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2369 actual_size = int(self.pkg.files[dsc_name]["size"])
2370 found = "%s in incoming" % (dsc_name)
2372 # Check the file does not already exist in the archive
2373 ql = get_poolfile_like_name(dsc_name, session)
2375 # Strip out anything that isn't '%s' or '/%s$'
2377 if not i.filename.endswith(dsc_name):
2380 # "[dak] has not broken them. [dak] has fixed a
2381 # brokenness. Your crappy hack exploited a bug in
2384 # "(Come on! I thought it was always obvious that
2385 # one just doesn't release different files with
2386 # the same name and version.)"
2387 # -- ajk@ on d-devel@l.d.o
2390 # Ignore exact matches for .orig.tar.gz
2392 if re_is_orig_source.match(dsc_name):
2394 if self.pkg.files.has_key(dsc_name) and \
2395 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2396 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2397 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2398 # TODO: Don't delete the entry, just mark it as not needed
2399 # This would fix the stupidity of changing something we often iterate over
2400 # whilst we're doing it
2401 del self.pkg.files[dsc_name]
2402 dsc_entry["files id"] = i.file_id
2403 if not orig_files.has_key(dsc_name):
2404 orig_files[dsc_name] = {}
2405 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2409 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2411 elif re_is_orig_source.match(dsc_name):
2413 ql = get_poolfile_like_name(dsc_name, session)
2415 # Strip out anything that isn't '%s' or '/%s$'
2416 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2418 if not i.filename.endswith(dsc_name):
2422 # Unfortunately, we may get more than one match here if,
2423 # for example, the package was in potato but had an -sa
2424 # upload in woody. So we need to choose the right one.
2426 # default to something sane in case we don't match any or have only one
2431 old_file = os.path.join(i.location.path, i.filename)
2432 old_file_fh = utils.open_file(old_file)
2433 actual_md5 = apt_pkg.md5sum(old_file_fh)
2435 actual_size = os.stat(old_file)[stat.ST_SIZE]
2436 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2439 old_file = os.path.join(i.location.path, i.filename)
2440 old_file_fh = utils.open_file(old_file)
2441 actual_md5 = apt_pkg.md5sum(old_file_fh)
2443 actual_size = os.stat(old_file)[stat.ST_SIZE]
2445 suite_type = x.location.archive_type
2446 # need this for updating dsc_files in install()
2447 dsc_entry["files id"] = x.file_id
2448 # See install() in process-accepted...
2449 if not orig_files.has_key(dsc_name):
2450 orig_files[dsc_name] = {}
2451 orig_files[dsc_name]["id"] = x.file_id
2452 orig_files[dsc_name]["path"] = old_file
2453 orig_files[dsc_name]["location"] = x.location.location_id
2455 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2456 # Not there? Check the queue directories...
2457 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2458 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2460 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2461 if os.path.exists(in_otherdir):
2462 in_otherdir_fh = utils.open_file(in_otherdir)
2463 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2464 in_otherdir_fh.close()
2465 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2467 if not orig_files.has_key(dsc_name):
2468 orig_files[dsc_name] = {}
2469 orig_files[dsc_name]["path"] = in_otherdir
2472 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2475 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2477 if actual_md5 != dsc_entry["md5sum"]:
2478 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2479 if actual_size != int(dsc_entry["size"]):
2480 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2482 ################################################################################
2483 # This is used by process-new and process-holding to recheck a changes file
2484 # at the time we're running. It mainly wraps various other internal functions
2485 # and is similar to accepted_checks - these should probably be tidied up
2487 def recheck(self, session):
2489 for f in self.pkg.files.keys():
2490 # The .orig.tar.gz can disappear out from under us is it's a
2491 # duplicate of one in the archive.
2492 if not self.pkg.files.has_key(f):
2495 entry = self.pkg.files[f]
2497 # Check that the source still exists
2498 if entry["type"] == "deb":
2499 source_version = entry["source version"]
2500 source_package = entry["source package"]
2501 if not self.pkg.changes["architecture"].has_key("source") \
2502 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2503 source_epochless_version = re_no_epoch.sub('', source_version)
2504 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2506 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2507 if cnf.has_key("Dir::Queue::%s" % (q)):
2508 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2511 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2513 # Version and file overwrite checks
2514 if entry["type"] == "deb":
2515 self.check_binary_against_db(f, session)
2516 elif entry["type"] == "dsc":
2517 self.check_source_against_db(f, session)
2518 self.check_dsc_against_db(f, session)
2520 ################################################################################
2521 def accepted_checks(self, overwrite_checks, session):
2522 # Recheck anything that relies on the database; since that's not
2523 # frozen between accept and our run time when called from p-a.
2525 # overwrite_checks is set to False when installing to stable/oldstable
2530 # Find the .dsc (again)
2532 for f in self.pkg.files.keys():
2533 if self.pkg.files[f]["type"] == "dsc":
2536 for checkfile in self.pkg.files.keys():
2537 # The .orig.tar.gz can disappear out from under us is it's a
2538 # duplicate of one in the archive.
2539 if not self.pkg.files.has_key(checkfile):
2542 entry = self.pkg.files[checkfile]
2544 # Check that the source still exists
2545 if entry["type"] == "deb":
2546 source_version = entry["source version"]
2547 source_package = entry["source package"]
2548 if not self.pkg.changes["architecture"].has_key("source") \
2549 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2550 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2552 # Version and file overwrite checks
2553 if overwrite_checks:
2554 if entry["type"] == "deb":
2555 self.check_binary_against_db(checkfile, session)
2556 elif entry["type"] == "dsc":
2557 self.check_source_against_db(checkfile, session)
2558 self.check_dsc_against_db(dsc_filename, session)
2560 # propogate in the case it is in the override tables:
2561 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2562 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2563 propogate[suite] = 1
2565 nopropogate[suite] = 1
2567 for suite in propogate.keys():
2568 if suite in nopropogate:
2570 self.pkg.changes["distribution"][suite] = 1
2572 for checkfile in self.pkg.files.keys():
2573 # Check the package is still in the override tables
2574 for suite in self.pkg.changes["distribution"].keys():
2575 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2576 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2578 ################################################################################
2579 # This is not really a reject, but an unaccept, but since a) the code for
2580 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2581 # extremely rare, for now we'll go with whining at our admin folks...
2583 def do_unaccept(self):
2587 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2588 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2589 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2590 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2591 if cnf.has_key("Dinstall::Bcc"):
2592 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2594 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2596 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2598 # Write the rejection email out as the <foo>.reason file
2599 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2600 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2602 # If we fail here someone is probably trying to exploit the race
2603 # so let's just raise an exception ...
2604 if os.path.exists(reject_filename):
2605 os.unlink(reject_filename)
2607 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2608 os.write(fd, reject_mail_message)
2611 utils.send_mail(reject_mail_message)
2613 del self.Subst["__REJECTOR_ADDRESS__"]
2614 del self.Subst["__REJECT_MESSAGE__"]
2615 del self.Subst["__CC__"]
2617 ################################################################################
2618 # If any file of an upload has a recent mtime then chances are good
2619 # the file is still being uploaded.
2621 def upload_too_new(self):
2624 # Move back to the original directory to get accurate time stamps
2626 os.chdir(self.pkg.directory)
2627 file_list = self.pkg.files.keys()
2628 file_list.extend(self.pkg.dsc_files.keys())
2629 file_list.append(self.pkg.changes_file)
2632 last_modified = time.time()-os.path.getmtime(f)
2633 if last_modified < int(cnf["Dinstall::SkipTime"]):