5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
82 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
84 # Validate the override type
85 type_id = get_override_type(file_type, session)
87 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
91 ################################################################################
93 # Determine what parts in a .changes are NEW
95 def determine_new(changes, files, warn=1, session = None):
97 Determine what parts in a C{changes} file are NEW.
99 @type changes: Upload.Pkg.changes dict
100 @param changes: Changes dictionary
102 @type files: Upload.Pkg.files dict
103 @param files: Files dictionary
106 @param warn: Warn if overrides are added for (old)stable
109 @return: dictionary of NEW components.
114 # Build up a list of potentially new things
115 for name, f in files.items():
116 # Skip byhand elements
117 # if f["type"] == "byhand":
120 priority = f["priority"]
121 section = f["section"]
122 file_type = get_type(f, session)
123 component = f["component"]
125 if file_type == "dsc":
128 if not new.has_key(pkg):
130 new[pkg]["priority"] = priority
131 new[pkg]["section"] = section
132 new[pkg]["type"] = file_type
133 new[pkg]["component"] = component
134 new[pkg]["files"] = []
136 old_type = new[pkg]["type"]
137 if old_type != file_type:
138 # source gets trumped by deb or udeb
139 if old_type == "dsc":
140 new[pkg]["priority"] = priority
141 new[pkg]["section"] = section
142 new[pkg]["type"] = file_type
143 new[pkg]["component"] = component
145 new[pkg]["files"].append(name)
147 if f.has_key("othercomponents"):
148 new[pkg]["othercomponents"] = f["othercomponents"]
150 # Fix up the list of target suites
152 for suite in changes["suite"].keys():
153 override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
155 (olderr, newerr) = (get_suite(suite, session) == None,
156 get_suite(override, session) == None)
158 (oinv, newinv) = ("", "")
159 if olderr: oinv = "invalid "
160 if newerr: ninv = "invalid "
161 print "warning: overriding %ssuite %s to %ssuite %s" % (
162 oinv, suite, ninv, override)
163 del changes["suite"][suite]
164 changes["suite"][override] = 1
166 for suite in changes["suite"].keys():
167 for pkg in new.keys():
168 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
170 for file_entry in new[pkg]["files"]:
171 if files[file_entry].has_key("new"):
172 del files[file_entry]["new"]
176 for s in ['stable', 'oldstable']:
177 if changes["suite"].has_key(s):
178 print "WARNING: overrides will be added for %s!" % s
179 for pkg in new.keys():
180 if new[pkg].has_key("othercomponents"):
181 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
185 ################################################################################
187 def check_valid(new, session = None):
189 Check if section and priority for NEW packages exist in database.
190 Additionally does sanity checks:
191 - debian-installer packages have to be udeb (or source)
192 - non debian-installer packages can not be udeb
193 - source priority can only be assigned to dsc file types
196 @param new: Dict of new packages with their section, priority and type.
199 for pkg in new.keys():
200 section_name = new[pkg]["section"]
201 priority_name = new[pkg]["priority"]
202 file_type = new[pkg]["type"]
204 section = get_section(section_name, session)
206 new[pkg]["section id"] = -1
208 new[pkg]["section id"] = section.section_id
210 priority = get_priority(priority_name, session)
212 new[pkg]["priority id"] = -1
214 new[pkg]["priority id"] = priority.priority_id
217 di = section_name.find("debian-installer") != -1
219 # If d-i, we must be udeb and vice-versa
220 if (di and file_type not in ("udeb", "dsc")) or \
221 (not di and file_type == "udeb"):
222 new[pkg]["section id"] = -1
224 # If dsc we need to be source and vice-versa
225 if (priority == "source" and file_type != "dsc") or \
226 (priority != "source" and file_type == "dsc"):
227 new[pkg]["priority id"] = -1
229 ###############################################################################
231 # Used by Upload.check_timestamps
232 class TarTime(object):
233 def __init__(self, future_cutoff, past_cutoff):
235 self.future_cutoff = future_cutoff
236 self.past_cutoff = past_cutoff
239 self.future_files = {}
240 self.ancient_files = {}
242 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
243 if MTime > self.future_cutoff:
244 self.future_files[Name] = MTime
245 if MTime < self.past_cutoff:
246 self.ancient_files[Name] = MTime
248 ###############################################################################
250 class Upload(object):
252 Everything that has to do with an upload processed.
260 ###########################################################################
263 """ Reset a number of internal variables."""
265 # Initialize the substitution template map
268 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
269 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
270 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
271 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
277 self.later_check_files = []
281 def package_info(self):
283 Format various messages from this Upload to send to the maintainer.
287 ('Reject Reasons', self.rejects),
288 ('Warnings', self.warnings),
289 ('Notes', self.notes),
293 for title, messages in msgs:
295 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
300 ###########################################################################
301 def update_subst(self):
302 """ Set up the per-package template substitution mappings """
306 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
307 if not self.pkg.changes.has_key("architecture") or not \
308 isinstance(self.pkg.changes["architecture"], dict):
309 self.pkg.changes["architecture"] = { "Unknown" : "" }
311 # and maintainer2047 may not exist.
312 if not self.pkg.changes.has_key("maintainer2047"):
313 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
315 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
316 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
317 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
319 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
320 if self.pkg.changes["architecture"].has_key("source") and \
321 self.pkg.changes["changedby822"] != "" and \
322 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
324 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
325 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
326 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
328 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
329 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
330 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
332 if "sponsoremail" in self.pkg.changes:
333 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
335 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
336 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
338 # Apply any global override of the Maintainer field
339 if cnf.get("Dinstall::OverrideMaintainer"):
340 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
341 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
343 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
344 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
345 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
347 ###########################################################################
348 def load_changes(self, filename):
350 Load a changes file and setup a dictionary around it. Also checks for mandantory
353 @type filename: string
354 @param filename: Changes filename, full path.
357 @return: whether the changes file was valid or not. We may want to
358 reject even if this is True (see what gets put in self.rejects).
359 This is simply to prevent us even trying things later which will
360 fail because we couldn't properly parse the file.
363 self.pkg.changes_file = filename
365 # Parse the .changes field into a dictionary
367 self.pkg.changes.update(parse_changes(filename))
368 except CantOpenError:
369 self.rejects.append("%s: can't read file." % (filename))
371 except ParseChangesError, line:
372 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
374 except ChangesUnicodeError:
375 self.rejects.append("%s: changes file not proper utf-8" % (filename))
378 # Parse the Files field from the .changes into another dictionary
380 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
381 except ParseChangesError, line:
382 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
384 except UnknownFormatError, format:
385 self.rejects.append("%s: unknown format '%s'." % (filename, format))
388 # Check for mandatory fields
389 for i in ("distribution", "source", "binary", "architecture",
390 "version", "maintainer", "files", "changes", "description"):
391 if not self.pkg.changes.has_key(i):
392 # Avoid undefined errors later
393 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
396 # Strip a source version in brackets from the source field
397 if re_strip_srcver.search(self.pkg.changes["source"]):
398 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
400 # Ensure the source field is a valid package name.
401 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
402 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
404 # Split multi-value fields into a lower-level dictionary
405 for i in ("architecture", "distribution", "binary", "closes"):
406 o = self.pkg.changes.get(i, "")
408 del self.pkg.changes[i]
410 self.pkg.changes[i] = {}
413 self.pkg.changes[i][j] = 1
415 # Fix the Maintainer: field to be RFC822/2047 compatible
417 (self.pkg.changes["maintainer822"],
418 self.pkg.changes["maintainer2047"],
419 self.pkg.changes["maintainername"],
420 self.pkg.changes["maintaineremail"]) = \
421 fix_maintainer (self.pkg.changes["maintainer"])
422 except ParseMaintError, msg:
423 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
424 % (filename, self.pkg.changes["maintainer"], msg))
426 # ...likewise for the Changed-By: field if it exists.
428 (self.pkg.changes["changedby822"],
429 self.pkg.changes["changedby2047"],
430 self.pkg.changes["changedbyname"],
431 self.pkg.changes["changedbyemail"]) = \
432 fix_maintainer (self.pkg.changes.get("changed-by", ""))
433 except ParseMaintError, msg:
434 self.pkg.changes["changedby822"] = ""
435 self.pkg.changes["changedby2047"] = ""
436 self.pkg.changes["changedbyname"] = ""
437 self.pkg.changes["changedbyemail"] = ""
439 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
440 % (filename, self.pkg.changes["changed-by"], msg))
442 # Ensure all the values in Closes: are numbers
443 if self.pkg.changes.has_key("closes"):
444 for i in self.pkg.changes["closes"].keys():
445 if re_isanum.match (i) == None:
446 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
448 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
449 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
450 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
452 # Check the .changes is non-empty
453 if not self.pkg.files:
454 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
457 # Changes was syntactically valid even if we'll reject
460 ###########################################################################
462 def check_distributions(self):
463 "Check and map the Distribution field"
467 # Handle suite mappings
468 for m in Cnf.ValueList("SuiteMappings"):
471 if mtype == "map" or mtype == "silent-map":
472 (source, dest) = args[1:3]
473 if self.pkg.changes["distribution"].has_key(source):
474 del self.pkg.changes["distribution"][source]
475 self.pkg.changes["distribution"][dest] = 1
476 if mtype != "silent-map":
477 self.notes.append("Mapping %s to %s." % (source, dest))
478 if self.pkg.changes.has_key("distribution-version"):
479 if self.pkg.changes["distribution-version"].has_key(source):
480 self.pkg.changes["distribution-version"][source]=dest
481 elif mtype == "map-unreleased":
482 (source, dest) = args[1:3]
483 if self.pkg.changes["distribution"].has_key(source):
484 for arch in self.pkg.changes["architecture"].keys():
485 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
486 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
487 del self.pkg.changes["distribution"][source]
488 self.pkg.changes["distribution"][dest] = 1
490 elif mtype == "ignore":
492 if self.pkg.changes["distribution"].has_key(suite):
493 del self.pkg.changes["distribution"][suite]
494 self.warnings.append("Ignoring %s as a target suite." % (suite))
495 elif mtype == "reject":
497 if self.pkg.changes["distribution"].has_key(suite):
498 self.rejects.append("Uploads to %s are not accepted." % (suite))
499 elif mtype == "propup-version":
500 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
502 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
503 if self.pkg.changes["distribution"].has_key(args[1]):
504 self.pkg.changes.setdefault("distribution-version", {})
505 for suite in args[2:]:
506 self.pkg.changes["distribution-version"][suite] = suite
508 # Ensure there is (still) a target distribution
509 if len(self.pkg.changes["distribution"].keys()) < 1:
510 self.rejects.append("No valid distribution remaining.")
512 # Ensure target distributions exist
513 for suite in self.pkg.changes["distribution"].keys():
514 if not Cnf.has_key("Suite::%s" % (suite)):
515 self.rejects.append("Unknown distribution `%s'." % (suite))
517 ###########################################################################
519 def binary_file_checks(self, f, session):
521 entry = self.pkg.files[f]
523 # Extract package control information
524 deb_file = utils.open_file(f)
526 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
528 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
530 # Can't continue, none of the checks on control would work.
533 # Check for mandantory "Description:"
536 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
538 self.rejects.append("%s: Missing Description in binary package" % (f))
543 # Check for mandatory fields
544 for field in [ "Package", "Architecture", "Version" ]:
545 if control.Find(field) == None:
547 self.rejects.append("%s: No %s field in control." % (f, field))
550 # Ensure the package name matches the one give in the .changes
551 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
552 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
554 # Validate the package field
555 package = control.Find("Package")
556 if not re_valid_pkg_name.match(package):
557 self.rejects.append("%s: invalid package name '%s'." % (f, package))
559 # Validate the version field
560 version = control.Find("Version")
561 if not re_valid_version.match(version):
562 self.rejects.append("%s: invalid version number '%s'." % (f, version))
564 # Ensure the architecture of the .deb is one we know about.
565 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
566 architecture = control.Find("Architecture")
567 upload_suite = self.pkg.changes["distribution"].keys()[0]
569 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
570 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
571 self.rejects.append("Unknown architecture '%s'." % (architecture))
573 # Ensure the architecture of the .deb is one of the ones
574 # listed in the .changes.
575 if not self.pkg.changes["architecture"].has_key(architecture):
576 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
578 # Sanity-check the Depends field
579 depends = control.Find("Depends")
581 self.rejects.append("%s: Depends field is empty." % (f))
583 # Sanity-check the Provides field
584 provides = control.Find("Provides")
586 provide = re_spacestrip.sub('', provides)
588 self.rejects.append("%s: Provides field is empty." % (f))
589 prov_list = provide.split(",")
590 for prov in prov_list:
591 if not re_valid_pkg_name.match(prov):
592 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
594 # Check the section & priority match those given in the .changes (non-fatal)
595 if control.Find("Section") and entry["section"] != "" \
596 and entry["section"] != control.Find("Section"):
597 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
598 (f, control.Find("Section", ""), entry["section"]))
599 if control.Find("Priority") and entry["priority"] != "" \
600 and entry["priority"] != control.Find("Priority"):
601 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
602 (f, control.Find("Priority", ""), entry["priority"]))
604 entry["package"] = package
605 entry["architecture"] = architecture
606 entry["version"] = version
607 entry["maintainer"] = control.Find("Maintainer", "")
609 if f.endswith(".udeb"):
610 self.pkg.files[f]["dbtype"] = "udeb"
611 elif f.endswith(".deb"):
612 self.pkg.files[f]["dbtype"] = "deb"
614 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
616 entry["source"] = control.Find("Source", entry["package"])
618 # Get the source version
619 source = entry["source"]
622 if source.find("(") != -1:
623 m = re_extract_src_version.match(source)
625 source_version = m.group(2)
627 if not source_version:
628 source_version = self.pkg.files[f]["version"]
630 entry["source package"] = source
631 entry["source version"] = source_version
633 # Ensure the filename matches the contents of the .deb
634 m = re_isadeb.match(f)
637 file_package = m.group(1)
638 if entry["package"] != file_package:
639 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
640 (f, file_package, entry["dbtype"], entry["package"]))
641 epochless_version = re_no_epoch.sub('', control.Find("Version"))
644 file_version = m.group(2)
645 if epochless_version != file_version:
646 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
647 (f, file_version, entry["dbtype"], epochless_version))
650 file_architecture = m.group(3)
651 if entry["architecture"] != file_architecture:
652 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
653 (f, file_architecture, entry["dbtype"], entry["architecture"]))
655 # Check for existent source
656 source_version = entry["source version"]
657 source_package = entry["source package"]
658 if self.pkg.changes["architecture"].has_key("source"):
659 if source_version != self.pkg.changes["version"]:
660 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
661 (source_version, f, self.pkg.changes["version"]))
663 # Check in the SQL database
664 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
665 # Check in one of the other directories
666 source_epochless_version = re_no_epoch.sub('', source_version)
667 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
668 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
670 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
673 dsc_file_exists = False
674 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
675 if cnf.has_key("Dir::Queue::%s" % (myq)):
676 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
677 dsc_file_exists = True
680 if not dsc_file_exists:
681 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
683 # Check the version and for file overwrites
684 self.check_binary_against_db(f, session)
686 # Temporarily disable contents generation until we change the table storage layout
689 #if len(b.rejects) > 0:
690 # for j in b.rejects:
691 # self.rejects.append(j)
693 def source_file_checks(self, f, session):
694 entry = self.pkg.files[f]
696 m = re_issource.match(f)
700 entry["package"] = m.group(1)
701 entry["version"] = m.group(2)
702 entry["type"] = m.group(3)
704 # Ensure the source package name matches the Source filed in the .changes
705 if self.pkg.changes["source"] != entry["package"]:
706 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
708 # Ensure the source version matches the version in the .changes file
709 if re_is_orig_source.match(f):
710 changes_version = self.pkg.changes["chopversion2"]
712 changes_version = self.pkg.changes["chopversion"]
714 if changes_version != entry["version"]:
715 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
717 # Ensure the .changes lists source in the Architecture field
718 if not self.pkg.changes["architecture"].has_key("source"):
719 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
721 # Check the signature of a .dsc file
722 if entry["type"] == "dsc":
723 # check_signature returns either:
724 # (None, [list, of, rejects]) or (signature, [])
725 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
727 self.rejects.append(j)
729 entry["architecture"] = "source"
731 def per_suite_file_checks(self, f, suite, session):
733 entry = self.pkg.files[f]
736 if entry.has_key("byhand"):
739 # Check we have fields we need to do these checks
741 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
742 if not entry.has_key(m):
743 self.rejects.append("file '%s' does not have field %s set" % (f, m))
749 # Handle component mappings
750 for m in cnf.ValueList("ComponentMappings"):
751 (source, dest) = m.split()
752 if entry["component"] == source:
753 entry["original component"] = source
754 entry["component"] = dest
756 # Ensure the component is valid for the target suite
757 if cnf.has_key("Suite:%s::Components" % (suite)) and \
758 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
759 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
762 # Validate the component
763 if not get_component(entry["component"], session):
764 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
767 # See if the package is NEW
768 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
771 # Validate the priority
772 if entry["priority"].find('/') != -1:
773 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
775 # Determine the location
776 location = cnf["Dir::Pool"]
777 l = get_location(location, entry["component"], session=session)
779 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
780 entry["location id"] = -1
782 entry["location id"] = l.location_id
784 # Check the md5sum & size against existing files (if any)
785 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
787 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
788 entry["size"], entry["md5sum"], entry["location id"])
791 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
792 elif found is False and poolfile is not None:
793 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
796 entry["files id"] = None
798 entry["files id"] = poolfile.file_id
800 # Check for packages that have moved from one component to another
801 entry['suite'] = suite
802 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
804 entry["othercomponents"] = res.fetchone()[0]
806 def check_files(self, action=True):
807 file_keys = self.pkg.files.keys()
813 os.chdir(self.pkg.directory)
815 ret = holding.copy_to_holding(f)
817 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
821 # check we already know the changes file
822 # [NB: this check must be done post-suite mapping]
823 base_filename = os.path.basename(self.pkg.changes_file)
825 session = DBConn().session()
828 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
829 # if in the pool or in a queue other than unchecked, reject
830 if (dbc.in_queue is None) \
831 or (dbc.in_queue is not None
832 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
833 self.rejects.append("%s file already known to dak" % base_filename)
834 except NoResultFound, e:
841 for f, entry in self.pkg.files.items():
842 # Ensure the file does not already exist in one of the accepted directories
843 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
844 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
845 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
846 self.rejects.append("%s file already exists in the %s directory." % (f, d))
848 if not re_taint_free.match(f):
849 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
851 # Check the file is readable
852 if os.access(f, os.R_OK) == 0:
853 # When running in -n, copy_to_holding() won't have
854 # generated the reject_message, so we need to.
856 if os.path.exists(f):
857 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
859 # Don't directly reject, mark to check later to deal with orig's
860 # we can find in the pool
861 self.later_check_files.append(f)
862 entry["type"] = "unreadable"
865 # If it's byhand skip remaining checks
866 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
868 entry["type"] = "byhand"
870 # Checks for a binary package...
871 elif re_isadeb.match(f):
873 entry["type"] = "deb"
875 # This routine appends to self.rejects/warnings as appropriate
876 self.binary_file_checks(f, session)
878 # Checks for a source package...
879 elif re_issource.match(f):
882 # This routine appends to self.rejects/warnings as appropriate
883 self.source_file_checks(f, session)
885 # Not a binary or source package? Assume byhand...
888 entry["type"] = "byhand"
890 # Per-suite file checks
891 entry["oldfiles"] = {}
892 for suite in self.pkg.changes["distribution"].keys():
893 self.per_suite_file_checks(f, suite, session)
897 # If the .changes file says it has source, it must have source.
898 if self.pkg.changes["architecture"].has_key("source"):
900 self.rejects.append("no source found and Architecture line in changes mention source.")
902 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
903 self.rejects.append("source only uploads are not supported.")
905 ###########################################################################
906 def check_dsc(self, action=True, session=None):
907 """Returns bool indicating whether or not the source changes are valid"""
908 # Ensure there is source to check
909 if not self.pkg.changes["architecture"].has_key("source"):
914 for f, entry in self.pkg.files.items():
915 if entry["type"] == "dsc":
917 self.rejects.append("can not process a .changes file with multiple .dsc's.")
922 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
924 self.rejects.append("source uploads must contain a dsc file")
927 # Parse the .dsc file
929 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
930 except CantOpenError:
931 # if not -n copy_to_holding() will have done this for us...
933 self.rejects.append("%s: can't read file." % (dsc_filename))
934 except ParseChangesError, line:
935 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
936 except InvalidDscError, line:
937 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
938 except ChangesUnicodeError:
939 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
941 # Build up the file list of files mentioned by the .dsc
943 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
944 except NoFilesFieldError:
945 self.rejects.append("%s: no Files: field." % (dsc_filename))
947 except UnknownFormatError, format:
948 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
950 except ParseChangesError, line:
951 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
954 # Enforce mandatory fields
955 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
956 if not self.pkg.dsc.has_key(i):
957 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
960 # Validate the source and version fields
961 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
962 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
963 if not re_valid_version.match(self.pkg.dsc["version"]):
964 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
966 # Only a limited list of source formats are allowed in each suite
967 for dist in self.pkg.changes["distribution"].keys():
968 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
969 if self.pkg.dsc["format"] not in allowed:
970 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
972 # Validate the Maintainer field
974 # We ignore the return value
975 fix_maintainer(self.pkg.dsc["maintainer"])
976 except ParseMaintError, msg:
977 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
978 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
980 # Validate the build-depends field(s)
981 for field_name in [ "build-depends", "build-depends-indep" ]:
982 field = self.pkg.dsc.get(field_name)
984 # Have apt try to parse them...
986 apt_pkg.ParseSrcDepends(field)
988 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
990 # Ensure the version number in the .dsc matches the version number in the .changes
991 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
992 changes_version = self.pkg.files[dsc_filename]["version"]
994 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
995 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
997 # Ensure the Files field contain only what's expected
998 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1000 # Ensure source is newer than existing source in target suites
1001 session = DBConn().session()
1002 self.check_source_against_db(dsc_filename, session)
1003 self.check_dsc_against_db(dsc_filename, session)
1006 # Finally, check if we're missing any files
1007 for f in self.later_check_files:
1008 self.rejects.append("Could not find file %s references in changes" % f)
1012 ###########################################################################
1014 def get_changelog_versions(self, source_dir):
1015 """Extracts a the source package and (optionally) grabs the
1016 version history out of debian/changelog for the BTS."""
1020 # Find the .dsc (again)
1022 for f in self.pkg.files.keys():
1023 if self.pkg.files[f]["type"] == "dsc":
1026 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1027 if not dsc_filename:
1030 # Create a symlink mirror of the source files in our temporary directory
1031 for f in self.pkg.files.keys():
1032 m = re_issource.match(f)
1034 src = os.path.join(source_dir, f)
1035 # If a file is missing for whatever reason, give up.
1036 if not os.path.exists(src):
1039 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1040 self.pkg.orig_files[f].has_key("path"):
1042 dest = os.path.join(os.getcwd(), f)
1043 os.symlink(src, dest)
1045 # If the orig files are not a part of the upload, create symlinks to the
1047 for orig_file in self.pkg.orig_files.keys():
1048 if not self.pkg.orig_files[orig_file].has_key("path"):
1050 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1051 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1053 # Extract the source
1054 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1055 (result, output) = commands.getstatusoutput(cmd)
1057 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1058 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1061 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1064 # Get the upstream version
1065 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1066 if re_strip_revision.search(upstr_version):
1067 upstr_version = re_strip_revision.sub('', upstr_version)
1069 # Ensure the changelog file exists
1070 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1071 if not os.path.exists(changelog_filename):
1072 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1075 # Parse the changelog
1076 self.pkg.dsc["bts changelog"] = ""
1077 changelog_file = utils.open_file(changelog_filename)
1078 for line in changelog_file.readlines():
1079 m = re_changelog_versions.match(line)
1081 self.pkg.dsc["bts changelog"] += line
1082 changelog_file.close()
1084 # Check we found at least one revision in the changelog
1085 if not self.pkg.dsc["bts changelog"]:
1086 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1088 def check_source(self):
1090 # a) there's no source
1091 if not self.pkg.changes["architecture"].has_key("source"):
1094 tmpdir = utils.temp_dirname()
1096 # Move into the temporary directory
1100 # Get the changelog version history
1101 self.get_changelog_versions(cwd)
1103 # Move back and cleanup the temporary tree
1107 shutil.rmtree(tmpdir)
1109 if e.errno != errno.EACCES:
1111 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1113 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1114 # We probably have u-r or u-w directories so chmod everything
1116 cmd = "chmod -R u+rwx %s" % (tmpdir)
1117 result = os.system(cmd)
1119 utils.fubar("'%s' failed with result %s." % (cmd, result))
1120 shutil.rmtree(tmpdir)
1121 except Exception, e:
1122 print "foobar2 (%s)" % e
1123 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1125 ###########################################################################
1126 def ensure_hashes(self):
1127 # Make sure we recognise the format of the Files: field in the .changes
1128 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1129 if len(format) == 2:
1130 format = int(format[0]), int(format[1])
1132 format = int(float(format[0])), 0
1134 # We need to deal with the original changes blob, as the fields we need
1135 # might not be in the changes dict serialised into the .dak anymore.
1136 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1138 # Copy the checksums over to the current changes dict. This will keep
1139 # the existing modifications to it intact.
1140 for field in orig_changes:
1141 if field.startswith('checksums-'):
1142 self.pkg.changes[field] = orig_changes[field]
1144 # Check for unsupported hashes
1145 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1146 self.rejects.append(j)
1148 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1149 self.rejects.append(j)
1151 # We have to calculate the hash if we have an earlier changes version than
1152 # the hash appears in rather than require it exist in the changes file
1153 for hashname, hashfunc, version in utils.known_hashes:
1154 # TODO: Move _ensure_changes_hash into this class
1155 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1156 self.rejects.append(j)
1157 if "source" in self.pkg.changes["architecture"]:
1158 # TODO: Move _ensure_dsc_hash into this class
1159 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1160 self.rejects.append(j)
1162 def check_hashes(self):
1163 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1164 self.rejects.append(m)
1166 for m in utils.check_size(".changes", self.pkg.files):
1167 self.rejects.append(m)
1169 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1170 self.rejects.append(m)
1172 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1173 self.rejects.append(m)
1175 self.ensure_hashes()
1177 ###########################################################################
1179 def ensure_orig(self, target_dir='.', session=None):
1181 Ensures that all orig files mentioned in the changes file are present
1182 in target_dir. If they do not exist, they are symlinked into place.
1184 An list containing the symlinks that were created are returned (so they
1191 for filename, entry in self.pkg.dsc_files.iteritems():
1192 if not re_is_orig_source.match(filename):
1193 # File is not an orig; ignore
1196 if os.path.exists(filename):
1197 # File exists, no need to continue
1200 def symlink_if_valid(path):
1201 f = utils.open_file(path)
1202 md5sum = apt_pkg.md5sum(f)
1205 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1206 expected = (int(entry['size']), entry['md5sum'])
1208 if fingerprint != expected:
1211 dest = os.path.join(target_dir, filename)
1213 os.symlink(path, dest)
1214 symlinked.append(dest)
1220 session_ = DBConn().session()
1225 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1226 poolfile_path = os.path.join(
1227 poolfile.location.path, poolfile.filename
1230 if symlink_if_valid(poolfile_path):
1240 # Look in some other queues for the file
1241 queues = ('New', 'Byhand', 'ProposedUpdates',
1242 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1244 for queue in queues:
1245 if not cnf.get('Dir::Queue::%s' % queue):
1248 queuefile_path = os.path.join(
1249 cnf['Dir::Queue::%s' % queue], filename
1252 if not os.path.exists(queuefile_path):
1253 # Does not exist in this queue
1256 if symlink_if_valid(queuefile_path):
1261 ###########################################################################
1263 def check_lintian(self):
1265 Extends self.rejects by checking the output of lintian against tags
1266 specified in Dinstall::LintianTags.
1271 # Don't reject binary uploads
1272 if not self.pkg.changes['architecture'].has_key('source'):
1275 # Only check some distributions
1276 for dist in ('unstable', 'experimental'):
1277 if dist in self.pkg.changes['distribution']:
1282 # If we do not have a tagfile, don't do anything
1283 tagfile = cnf.get("Dinstall::LintianTags")
1287 # Parse the yaml file
1288 sourcefile = file(tagfile, 'r')
1289 sourcecontent = sourcefile.read()
1293 lintiantags = yaml.load(sourcecontent)['lintian']
1294 except yaml.YAMLError, msg:
1295 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1298 # Try and find all orig mentioned in the .dsc
1299 symlinked = self.ensure_orig()
1301 # Setup the input file for lintian
1302 fd, temp_filename = utils.temp_filename()
1303 temptagfile = os.fdopen(fd, 'w')
1304 for tags in lintiantags.values():
1305 temptagfile.writelines(['%s\n' % x for x in tags])
1309 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1310 (temp_filename, self.pkg.changes_file)
1312 result, output = commands.getstatusoutput(cmd)
1314 # Remove our tempfile and any symlinks we created
1315 os.unlink(temp_filename)
1317 for symlink in symlinked:
1321 utils.warn("lintian failed for %s [return code: %s]." % \
1322 (self.pkg.changes_file, result))
1323 utils.warn(utils.prefix_multi_line_string(output, \
1324 " [possible output:] "))
1329 [self.pkg.changes_file, "check_lintian"] + list(txt)
1333 parsed_tags = parse_lintian_output(output)
1334 self.rejects.extend(
1335 generate_reject_messages(parsed_tags, lintiantags, log=log)
1338 ###########################################################################
1339 def check_urgency(self):
1341 if self.pkg.changes["architecture"].has_key("source"):
1342 if not self.pkg.changes.has_key("urgency"):
1343 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1344 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1345 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1346 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1347 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1348 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1350 ###########################################################################
1352 # Sanity check the time stamps of files inside debs.
1353 # [Files in the near future cause ugly warnings and extreme time
1354 # travel can cause errors on extraction]
1356 def check_timestamps(self):
1359 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1360 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1361 tar = TarTime(future_cutoff, past_cutoff)
1363 for filename, entry in self.pkg.files.items():
1364 if entry["type"] == "deb":
1367 deb_file = utils.open_file(filename)
1368 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1371 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1372 except SystemError, e:
1373 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1374 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1377 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1381 future_files = tar.future_files.keys()
1383 num_future_files = len(future_files)
1384 future_file = future_files[0]
1385 future_date = tar.future_files[future_file]
1386 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1387 % (filename, num_future_files, future_file, time.ctime(future_date)))
1389 ancient_files = tar.ancient_files.keys()
1391 num_ancient_files = len(ancient_files)
1392 ancient_file = ancient_files[0]
1393 ancient_date = tar.ancient_files[ancient_file]
1394 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1395 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1397 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1399 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1400 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1402 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1408 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1409 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1410 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1411 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1412 self.pkg.changes["sponsoremail"] = uid_email
1417 ###########################################################################
1418 # check_signed_by_key checks
1419 ###########################################################################
1421 def check_signed_by_key(self):
1422 """Ensure the .changes is signed by an authorized uploader."""
1423 session = DBConn().session()
1425 # First of all we check that the person has proper upload permissions
1426 # and that this upload isn't blocked
1427 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1430 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1433 # TODO: Check that import-keyring adds UIDs properly
1435 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1438 # Check that the fingerprint which uploaded has permission to do so
1439 self.check_upload_permissions(fpr, session)
1441 # Check that this package is not in a transition
1442 self.check_transition(session)
1447 def check_upload_permissions(self, fpr, session):
1448 # Check any one-off upload blocks
1449 self.check_upload_blocks(fpr, session)
1451 # Start with DM as a special case
1452 # DM is a special case unfortunately, so we check it first
1453 # (keys with no source access get more access than DMs in one
1454 # way; DMs can only upload for their packages whether source
1455 # or binary, whereas keys with no access might be able to
1456 # upload some binaries)
1457 if fpr.source_acl.access_level == 'dm':
1458 self.check_dm_upload(fpr, session)
1460 # Check source-based permissions for other types
1461 if self.pkg.changes["architecture"].has_key("source") and \
1462 fpr.source_acl.access_level is None:
1463 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1464 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1465 self.rejects.append(rej)
1467 # If not a DM, we allow full upload rights
1468 uid_email = "%s@debian.org" % (fpr.uid.uid)
1469 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1472 # Check binary upload permissions
1473 # By this point we know that DMs can't have got here unless they
1474 # are allowed to deal with the package concerned so just apply
1476 if fpr.binary_acl.access_level == 'full':
1479 # Otherwise we're in the map case
1480 tmparches = self.pkg.changes["architecture"].copy()
1481 tmparches.pop('source', None)
1483 for bam in fpr.binary_acl_map:
1484 tmparches.pop(bam.architecture.arch_string, None)
1486 if len(tmparches.keys()) > 0:
1487 if fpr.binary_reject:
1488 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1489 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1490 self.rejects.append(rej)
1492 # TODO: This is where we'll implement reject vs throw away binaries later
1493 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1494 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1495 rej += "\nFingerprint: %s", (fpr.fingerprint)
1496 self.rejects.append(rej)
1499 def check_upload_blocks(self, fpr, session):
1500 """Check whether any upload blocks apply to this source, source
1501 version, uid / fpr combination"""
1503 def block_rej_template(fb):
1504 rej = 'Manual upload block in place for package %s' % fb.source
1505 if fb.version is not None:
1506 rej += ', version %s' % fb.version
1509 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1510 # version is None if the block applies to all versions
1511 if fb.version is None or fb.version == self.pkg.changes['version']:
1512 # Check both fpr and uid - either is enough to cause a reject
1513 if fb.fpr is not None:
1514 if fb.fpr.fingerprint == fpr.fingerprint:
1515 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1516 if fb.uid is not None:
1517 if fb.uid == fpr.uid:
1518 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1521 def check_dm_upload(self, fpr, session):
1522 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1523 ## none of the uploaded packages are NEW
1525 for f in self.pkg.files.keys():
1526 if self.pkg.files[f].has_key("byhand"):
1527 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1529 if self.pkg.files[f].has_key("new"):
1530 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1536 ## the most recent version of the package uploaded to unstable or
1537 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1538 ## section of its control file
1539 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1540 q = q.join(SrcAssociation)
1541 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1542 q = q.order_by(desc('source.version')).limit(1)
1547 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1548 self.rejects.append(rej)
1552 if not r.dm_upload_allowed:
1553 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1554 self.rejects.append(rej)
1557 ## the Maintainer: field of the uploaded .changes file corresponds with
1558 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1560 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1561 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1563 ## the most recent version of the package uploaded to unstable or
1564 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1565 ## non-developer maintainers cannot NMU or hijack packages)
1567 # srcuploaders includes the maintainer
1569 for sup in r.srcuploaders:
1570 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1571 # Eww - I hope we never have two people with the same name in Debian
1572 if email == fpr.uid.uid or name == fpr.uid.name:
1577 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1580 ## none of the packages are being taken over from other source packages
1581 for b in self.pkg.changes["binary"].keys():
1582 for suite in self.pkg.changes["distribution"].keys():
1583 q = session.query(DBSource)
1584 q = q.join(DBBinary).filter_by(package=b)
1585 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1588 if s.source != self.pkg.changes["source"]:
1589 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1593 def check_transition(self, session):
1596 sourcepkg = self.pkg.changes["source"]
1598 # No sourceful upload -> no need to do anything else, direct return
1599 # We also work with unstable uploads, not experimental or those going to some
1600 # proposed-updates queue
1601 if "source" not in self.pkg.changes["architecture"] or \
1602 "unstable" not in self.pkg.changes["distribution"]:
1605 # Also only check if there is a file defined (and existant) with
1607 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1608 if transpath == "" or not os.path.exists(transpath):
1611 # Parse the yaml file
1612 sourcefile = file(transpath, 'r')
1613 sourcecontent = sourcefile.read()
1615 transitions = yaml.load(sourcecontent)
1616 except yaml.YAMLError, msg:
1617 # This shouldn't happen, there is a wrapper to edit the file which
1618 # checks it, but we prefer to be safe than ending up rejecting
1620 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1623 # Now look through all defined transitions
1624 for trans in transitions:
1625 t = transitions[trans]
1626 source = t["source"]
1629 # Will be None if nothing is in testing.
1630 current = get_source_in_suite(source, "testing", session)
1631 if current is not None:
1632 compare = apt_pkg.VersionCompare(current.version, expected)
1634 if current is None or compare < 0:
1635 # This is still valid, the current version in testing is older than
1636 # the new version we wait for, or there is none in testing yet
1638 # Check if the source we look at is affected by this.
1639 if sourcepkg in t['packages']:
1640 # The source is affected, lets reject it.
1642 rejectmsg = "%s: part of the %s transition.\n\n" % (
1645 if current is not None:
1646 currentlymsg = "at version %s" % (current.version)
1648 currentlymsg = "not present in testing"
1650 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1652 rejectmsg += "\n".join(textwrap.wrap("""Your package
1653 is part of a testing transition designed to get %s migrated (it is
1654 currently %s, we need version %s). This transition is managed by the
1655 Release Team, and %s is the Release-Team member responsible for it.
1656 Please mail debian-release@lists.debian.org or contact %s directly if you
1657 need further assistance. You might want to upload to experimental until this
1658 transition is done."""
1659 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1661 self.rejects.append(rejectmsg)
1664 ###########################################################################
1665 # End check_signed_by_key checks
1666 ###########################################################################
1668 def build_summaries(self):
1669 """ Build a summary of changes the upload introduces. """
1671 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1673 short_summary = summary
1675 # This is for direport's benefit...
1676 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1679 summary += "Changes: " + f
1681 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1683 summary += self.announce(short_summary, 0)
1685 return (summary, short_summary)
1687 ###########################################################################
1689 def close_bugs(self, summary, action):
1691 Send mail to close bugs as instructed by the closes field in the changes file.
1692 Also add a line to summary if any work was done.
1694 @type summary: string
1695 @param summary: summary text, as given by L{build_summaries}
1698 @param action: Set to false no real action will be done.
1701 @return: summary. If action was taken, extended by the list of closed bugs.
1705 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1707 bugs = self.pkg.changes["closes"].keys()
1713 summary += "Closing bugs: "
1715 summary += "%s " % (bug)
1718 self.Subst["__BUG_NUMBER__"] = bug
1719 if self.pkg.changes["distribution"].has_key("stable"):
1720 self.Subst["__STABLE_WARNING__"] = """
1721 Note that this package is not part of the released stable Debian
1722 distribution. It may have dependencies on other unreleased software,
1723 or other instabilities. Please take care if you wish to install it.
1724 The update will eventually make its way into the next released Debian
1727 self.Subst["__STABLE_WARNING__"] = ""
1728 mail_message = utils.TemplateSubst(self.Subst, template)
1729 utils.send_mail(mail_message)
1731 # Clear up after ourselves
1732 del self.Subst["__BUG_NUMBER__"]
1733 del self.Subst["__STABLE_WARNING__"]
1735 if action and self.logger:
1736 self.logger.log(["closing bugs"] + bugs)
1742 ###########################################################################
1744 def announce(self, short_summary, action):
1746 Send an announce mail about a new upload.
1748 @type short_summary: string
1749 @param short_summary: Short summary text to include in the mail
1752 @param action: Set to false no real action will be done.
1755 @return: Textstring about action taken.
1760 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1762 # Only do announcements for source uploads with a recent dpkg-dev installed
1763 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1764 self.pkg.changes["architecture"].has_key("source"):
1770 self.Subst["__SHORT_SUMMARY__"] = short_summary
1772 for dist in self.pkg.changes["distribution"].keys():
1773 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1774 if announce_list == "" or lists_done.has_key(announce_list):
1777 lists_done[announce_list] = 1
1778 summary += "Announcing to %s\n" % (announce_list)
1782 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1783 if cnf.get("Dinstall::TrackingServer") and \
1784 self.pkg.changes["architecture"].has_key("source"):
1785 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1786 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1788 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1789 utils.send_mail(mail_message)
1791 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1793 if cnf.FindB("Dinstall::CloseBugs"):
1794 summary = self.close_bugs(summary, action)
1796 del self.Subst["__SHORT_SUMMARY__"]
1800 ###########################################################################
1802 def accept (self, summary, short_summary, session=None):
1806 This moves all files referenced from the .changes into the pool,
1807 sends the accepted mail, announces to lists, closes bugs and
1808 also checks for override disparities. If enabled it will write out
1809 the version history for the BTS Version Tracking and will finally call
1812 @type summary: string
1813 @param summary: Summary text
1815 @type short_summary: string
1816 @param short_summary: Short summary
1820 stats = SummaryStats()
1823 self.logger.log(["installing changes", self.pkg.changes_file])
1827 # Add the .dsc file to the DB first
1828 for newfile, entry in self.pkg.files.items():
1829 if entry["type"] == "dsc":
1830 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1834 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1835 for newfile, entry in self.pkg.files.items():
1836 if entry["type"] == "deb":
1837 poolfiles.append(add_deb_to_db(self, newfile, session))
1839 # If this is a sourceful diff only upload that is moving
1840 # cross-component we need to copy the .orig files into the new
1841 # component too for the same reasons as above.
1842 # XXX: mhy: I think this should be in add_dsc_to_db
1843 if self.pkg.changes["architecture"].has_key("source"):
1844 for orig_file in self.pkg.orig_files.keys():
1845 if not self.pkg.orig_files[orig_file].has_key("id"):
1846 continue # Skip if it's not in the pool
1847 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1848 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1849 continue # Skip if the location didn't change
1852 oldf = get_poolfile_by_id(orig_file_id, session)
1853 old_filename = os.path.join(oldf.location.path, oldf.filename)
1854 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1855 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1857 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1859 # TODO: Care about size/md5sum collisions etc
1860 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1862 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1864 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1865 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1869 # Don't reference the old file from this changes
1871 if p.file_id == oldf.file_id:
1874 poolfiles.append(newf)
1876 # Fix up the DSC references
1879 for df in source.srcfiles:
1880 if df.poolfile.file_id == oldf.file_id:
1881 # Add a new DSC entry and mark the old one for deletion
1882 # Don't do it in the loop so we don't change the thing we're iterating over
1884 newdscf.source_id = source.source_id
1885 newdscf.poolfile_id = newf.file_id
1886 session.add(newdscf)
1896 # Make sure that our source object is up-to-date
1897 session.expire(source)
1899 # Add changelog information to the database
1900 self.store_changelog()
1902 # Install the files into the pool
1903 for newfile, entry in self.pkg.files.items():
1904 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1905 utils.move(newfile, destination)
1906 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1907 stats.accept_bytes += float(entry["size"])
1909 # Copy the .changes file across for suite which need it.
1911 for suite_name in self.pkg.changes["distribution"].keys():
1912 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1913 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1915 for dest in copy_changes.keys():
1916 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1918 # We're done - commit the database changes
1920 # Our SQL session will automatically start a new transaction after
1923 # Move the .changes into the 'done' directory
1924 utils.move(self.pkg.changes_file,
1925 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1927 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1928 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1931 self.Subst["__SUITE__"] = ""
1932 self.Subst["__SUMMARY__"] = summary
1933 mail_message = utils.TemplateSubst(self.Subst,
1934 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1935 utils.send_mail(mail_message)
1936 self.announce(short_summary, 1)
1938 ## Helper stuff for DebBugs Version Tracking
1939 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1940 if self.pkg.changes["architecture"].has_key("source"):
1941 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1942 version_history = os.fdopen(fd, 'w')
1943 version_history.write(self.pkg.dsc["bts changelog"])
1944 version_history.close()
1945 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1946 self.pkg.changes_file[:-8]+".versions")
1947 os.rename(temp_filename, filename)
1948 os.chmod(filename, 0644)
1950 # Write out the binary -> source mapping.
1951 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1952 debinfo = os.fdopen(fd, 'w')
1953 for name, entry in sorted(self.pkg.files.items()):
1954 if entry["type"] == "deb":
1955 line = " ".join([entry["package"], entry["version"],
1956 entry["architecture"], entry["source package"],
1957 entry["source version"]])
1958 debinfo.write(line+"\n")
1960 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1961 self.pkg.changes_file[:-8]+".debinfo")
1962 os.rename(temp_filename, filename)
1963 os.chmod(filename, 0644)
1967 # Set up our copy queues (e.g. buildd queues)
1968 for suite_name in self.pkg.changes["distribution"].keys():
1969 suite = get_suite(suite_name, session)
1970 for q in suite.copy_queues:
1972 q.add_file_from_pool(f)
1977 stats.accept_count += 1
1979 def check_override(self):
1981 Checks override entries for validity. Mails "Override disparity" warnings,
1982 if that feature is enabled.
1984 Abandons the check if
1985 - override disparity checks are disabled
1986 - mail sending is disabled
1991 # Abandon the check if override disparity checks have been disabled
1992 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
1995 summary = self.pkg.check_override()
2000 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2003 self.Subst["__SUMMARY__"] = summary
2004 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2005 utils.send_mail(mail_message)
2006 del self.Subst["__SUMMARY__"]
2008 ###########################################################################
2010 def remove(self, from_dir=None):
2012 Used (for instance) in p-u to remove the package from unchecked
2014 Also removes the package from holding area.
2016 if from_dir is None:
2017 from_dir = self.pkg.directory
2020 for f in self.pkg.files.keys():
2021 os.unlink(os.path.join(from_dir, f))
2022 if os.path.exists(os.path.join(h.holding_dir, f)):
2023 os.unlink(os.path.join(h.holding_dir, f))
2025 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2026 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2027 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2029 ###########################################################################
2031 def move_to_queue (self, queue):
2033 Move files to a destination queue using the permissions in the table
2036 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2037 queue.path, perms=int(queue.change_perms, 8))
2038 for f in self.pkg.files.keys():
2039 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2041 ###########################################################################
2043 def force_reject(self, reject_files):
2045 Forcefully move files from the current directory to the
2046 reject directory. If any file already exists in the reject
2047 directory it will be moved to the morgue to make way for
2050 @type reject_files: dict
2051 @param reject_files: file dictionary
2057 for file_entry in reject_files:
2058 # Skip any files which don't exist or which we don't have permission to copy.
2059 if os.access(file_entry, os.R_OK) == 0:
2062 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2065 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2067 # File exists? Let's find a new name by adding a number
2068 if e.errno == errno.EEXIST:
2070 dest_file = utils.find_next_free(dest_file, 255)
2071 except NoFreeFilenameError:
2072 # Something's either gone badly Pete Tong, or
2073 # someone is trying to exploit us.
2074 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2077 # Make sure we really got it
2079 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2082 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2086 # If we got here, we own the destination file, so we can
2087 # safely overwrite it.
2088 utils.move(file_entry, dest_file, 1, perms=0660)
2091 ###########################################################################
2092 def do_reject (self, manual=0, reject_message="", notes=""):
2094 Reject an upload. If called without a reject message or C{manual} is
2095 true, spawn an editor so the user can write one.
2098 @param manual: manual or automated rejection
2100 @type reject_message: string
2101 @param reject_message: A reject message
2106 # If we weren't given a manual rejection message, spawn an
2107 # editor so the user can add one in...
2108 if manual and not reject_message:
2109 (fd, temp_filename) = utils.temp_filename()
2110 temp_file = os.fdopen(fd, 'w')
2113 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2114 % (note.author, note.version, note.notedate, note.comment))
2116 editor = os.environ.get("EDITOR","vi")
2118 while answer == 'E':
2119 os.system("%s %s" % (editor, temp_filename))
2120 temp_fh = utils.open_file(temp_filename)
2121 reject_message = "".join(temp_fh.readlines())
2123 print "Reject message:"
2124 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2125 prompt = "[R]eject, Edit, Abandon, Quit ?"
2127 while prompt.find(answer) == -1:
2128 answer = utils.our_raw_input(prompt)
2129 m = re_default_answer.search(prompt)
2132 answer = answer[:1].upper()
2133 os.unlink(temp_filename)
2139 print "Rejecting.\n"
2143 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2144 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2146 # Move all the files into the reject directory
2147 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2148 self.force_reject(reject_files)
2150 # If we fail here someone is probably trying to exploit the race
2151 # so let's just raise an exception ...
2152 if os.path.exists(reason_filename):
2153 os.unlink(reason_filename)
2154 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2156 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2160 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2161 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2162 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2163 os.write(reason_fd, reject_message)
2164 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2166 # Build up the rejection email
2167 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2168 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2169 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2170 self.Subst["__REJECT_MESSAGE__"] = ""
2171 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2172 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2173 # Write the rejection email out as the <foo>.reason file
2174 os.write(reason_fd, reject_mail_message)
2176 del self.Subst["__REJECTOR_ADDRESS__"]
2177 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2178 del self.Subst["__CC__"]
2182 # Send the rejection mail
2183 utils.send_mail(reject_mail_message)
2186 self.logger.log(["rejected", self.pkg.changes_file])
2190 ################################################################################
2191 def in_override_p(self, package, component, suite, binary_type, filename, session):
2193 Check if a package already has override entries in the DB
2195 @type package: string
2196 @param package: package name
2198 @type component: string
2199 @param component: database id of the component
2202 @param suite: database id of the suite
2204 @type binary_type: string
2205 @param binary_type: type of the package
2207 @type filename: string
2208 @param filename: filename we check
2210 @return: the database result. But noone cares anyway.
2216 if binary_type == "": # must be source
2219 file_type = binary_type
2221 # Override suite name; used for example with proposed-updates
2222 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2223 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2225 result = get_override(package, suite, component, file_type, session)
2227 # If checking for a source package fall back on the binary override type
2228 if file_type == "dsc" and len(result) < 1:
2229 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2231 # Remember the section and priority so we can check them later if appropriate
2234 self.pkg.files[filename]["override section"] = result.section.section
2235 self.pkg.files[filename]["override priority"] = result.priority.priority
2240 ################################################################################
2241 def get_anyversion(self, sv_list, suite):
2244 @param sv_list: list of (suite, version) tuples to check
2247 @param suite: suite name
2253 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2254 for (s, v) in sv_list:
2255 if s in [ x.lower() for x in anysuite ]:
2256 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2261 ################################################################################
2263 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2266 @param sv_list: list of (suite, version) tuples to check
2268 @type filename: string
2269 @param filename: XXX
2271 @type new_version: string
2272 @param new_version: XXX
2274 Ensure versions are newer than existing packages in target
2275 suites and that cross-suite version checking rules as
2276 set out in the conf file are satisfied.
2281 # Check versions for each target suite
2282 for target_suite in self.pkg.changes["distribution"].keys():
2283 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2284 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2286 # Enforce "must be newer than target suite" even if conffile omits it
2287 if target_suite not in must_be_newer_than:
2288 must_be_newer_than.append(target_suite)
2290 for (suite, existent_version) in sv_list:
2291 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2293 if suite in must_be_newer_than and sourceful and vercmp < 1:
2294 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2296 if suite in must_be_older_than and vercmp > -1:
2299 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2300 # we really use the other suite, ignoring the conflicting one ...
2301 addsuite = self.pkg.changes["distribution-version"][suite]
2303 add_version = self.get_anyversion(sv_list, addsuite)
2304 target_version = self.get_anyversion(sv_list, target_suite)
2307 # not add_version can only happen if we map to a suite
2308 # that doesn't enhance the suite we're propup'ing from.
2309 # so "propup-ver x a b c; map a d" is a problem only if
2310 # d doesn't enhance a.
2312 # i think we could always propagate in this case, rather
2313 # than complaining. either way, this isn't a REJECT issue
2315 # And - we really should complain to the dorks who configured dak
2316 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2317 self.pkg.changes.setdefault("propdistribution", {})
2318 self.pkg.changes["propdistribution"][addsuite] = 1
2320 elif not target_version:
2321 # not targets_version is true when the package is NEW
2322 # we could just stick with the "...old version..." REJECT
2323 # for this, I think.
2324 self.rejects.append("Won't propogate NEW packages.")
2325 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2326 # propogation would be redundant. no need to reject though.
2327 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2329 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2330 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2332 self.warnings.append("Propogating upload to %s" % (addsuite))
2333 self.pkg.changes.setdefault("propdistribution", {})
2334 self.pkg.changes["propdistribution"][addsuite] = 1
2338 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2340 ################################################################################
2341 def check_binary_against_db(self, filename, session):
2342 # Ensure version is sane
2343 q = session.query(BinAssociation)
2344 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2345 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2347 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2348 filename, self.pkg.files[filename]["version"], sourceful=False)
2350 # Check for any existing copies of the file
2351 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2352 q = q.filter_by(version=self.pkg.files[filename]["version"])
2353 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2356 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2358 ################################################################################
2360 def check_source_against_db(self, filename, session):
2361 source = self.pkg.dsc.get("source")
2362 version = self.pkg.dsc.get("version")
2364 # Ensure version is sane
2365 q = session.query(SrcAssociation)
2366 q = q.join(DBSource).filter(DBSource.source==source)
2368 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2369 filename, version, sourceful=True)
2371 ################################################################################
2372 def check_dsc_against_db(self, filename, session):
2375 @warning: NB: this function can remove entries from the 'files' index [if
2376 the orig tarball is a duplicate of the one in the archive]; if
2377 you're iterating over 'files' and call this function as part of
2378 the loop, be sure to add a check to the top of the loop to
2379 ensure you haven't just tried to dereference the deleted entry.
2384 self.pkg.orig_files = {} # XXX: do we need to clear it?
2385 orig_files = self.pkg.orig_files
2387 # Try and find all files mentioned in the .dsc. This has
2388 # to work harder to cope with the multiple possible
2389 # locations of an .orig.tar.gz.
2390 # The ordering on the select is needed to pick the newest orig
2391 # when it exists in multiple places.
2392 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2394 if self.pkg.files.has_key(dsc_name):
2395 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2396 actual_size = int(self.pkg.files[dsc_name]["size"])
2397 found = "%s in incoming" % (dsc_name)
2399 # Check the file does not already exist in the archive
2400 ql = get_poolfile_like_name(dsc_name, session)
2402 # Strip out anything that isn't '%s' or '/%s$'
2404 if not i.filename.endswith(dsc_name):
2407 # "[dak] has not broken them. [dak] has fixed a
2408 # brokenness. Your crappy hack exploited a bug in
2411 # "(Come on! I thought it was always obvious that
2412 # one just doesn't release different files with
2413 # the same name and version.)"
2414 # -- ajk@ on d-devel@l.d.o
2417 # Ignore exact matches for .orig.tar.gz
2419 if re_is_orig_source.match(dsc_name):
2421 if self.pkg.files.has_key(dsc_name) and \
2422 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2423 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2424 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2425 # TODO: Don't delete the entry, just mark it as not needed
2426 # This would fix the stupidity of changing something we often iterate over
2427 # whilst we're doing it
2428 del self.pkg.files[dsc_name]
2429 dsc_entry["files id"] = i.file_id
2430 if not orig_files.has_key(dsc_name):
2431 orig_files[dsc_name] = {}
2432 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2435 # Don't bitch that we couldn't find this file later
2437 self.later_check_files.remove(dsc_name)
2443 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2445 elif re_is_orig_source.match(dsc_name):
2447 ql = get_poolfile_like_name(dsc_name, session)
2449 # Strip out anything that isn't '%s' or '/%s$'
2450 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2452 if not i.filename.endswith(dsc_name):
2456 # Unfortunately, we may get more than one match here if,
2457 # for example, the package was in potato but had an -sa
2458 # upload in woody. So we need to choose the right one.
2460 # default to something sane in case we don't match any or have only one
2465 old_file = os.path.join(i.location.path, i.filename)
2466 old_file_fh = utils.open_file(old_file)
2467 actual_md5 = apt_pkg.md5sum(old_file_fh)
2469 actual_size = os.stat(old_file)[stat.ST_SIZE]
2470 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2473 old_file = os.path.join(i.location.path, i.filename)
2474 old_file_fh = utils.open_file(old_file)
2475 actual_md5 = apt_pkg.md5sum(old_file_fh)
2477 actual_size = os.stat(old_file)[stat.ST_SIZE]
2479 suite_type = x.location.archive_type
2480 # need this for updating dsc_files in install()
2481 dsc_entry["files id"] = x.file_id
2482 # See install() in process-accepted...
2483 if not orig_files.has_key(dsc_name):
2484 orig_files[dsc_name] = {}
2485 orig_files[dsc_name]["id"] = x.file_id
2486 orig_files[dsc_name]["path"] = old_file
2487 orig_files[dsc_name]["location"] = x.location.location_id
2489 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2490 # Not there? Check the queue directories...
2491 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2492 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2494 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2495 if os.path.exists(in_otherdir):
2496 in_otherdir_fh = utils.open_file(in_otherdir)
2497 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2498 in_otherdir_fh.close()
2499 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2501 if not orig_files.has_key(dsc_name):
2502 orig_files[dsc_name] = {}
2503 orig_files[dsc_name]["path"] = in_otherdir
2506 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2509 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2511 if actual_md5 != dsc_entry["md5sum"]:
2512 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2513 if actual_size != int(dsc_entry["size"]):
2514 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2516 ################################################################################
2517 # This is used by process-new and process-holding to recheck a changes file
2518 # at the time we're running. It mainly wraps various other internal functions
2519 # and is similar to accepted_checks - these should probably be tidied up
2521 def recheck(self, session):
2523 for f in self.pkg.files.keys():
2524 # The .orig.tar.gz can disappear out from under us is it's a
2525 # duplicate of one in the archive.
2526 if not self.pkg.files.has_key(f):
2529 entry = self.pkg.files[f]
2531 # Check that the source still exists
2532 if entry["type"] == "deb":
2533 source_version = entry["source version"]
2534 source_package = entry["source package"]
2535 if not self.pkg.changes["architecture"].has_key("source") \
2536 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2537 source_epochless_version = re_no_epoch.sub('', source_version)
2538 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2540 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2541 if cnf.has_key("Dir::Queue::%s" % (q)):
2542 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2545 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2547 # Version and file overwrite checks
2548 if entry["type"] == "deb":
2549 self.check_binary_against_db(f, session)
2550 elif entry["type"] == "dsc":
2551 self.check_source_against_db(f, session)
2552 self.check_dsc_against_db(f, session)
2554 ################################################################################
2555 def accepted_checks(self, overwrite_checks, session):
2556 # Recheck anything that relies on the database; since that's not
2557 # frozen between accept and our run time when called from p-a.
2559 # overwrite_checks is set to False when installing to stable/oldstable
2564 # Find the .dsc (again)
2566 for f in self.pkg.files.keys():
2567 if self.pkg.files[f]["type"] == "dsc":
2570 for checkfile in self.pkg.files.keys():
2571 # The .orig.tar.gz can disappear out from under us is it's a
2572 # duplicate of one in the archive.
2573 if not self.pkg.files.has_key(checkfile):
2576 entry = self.pkg.files[checkfile]
2578 # Check that the source still exists
2579 if entry["type"] == "deb":
2580 source_version = entry["source version"]
2581 source_package = entry["source package"]
2582 if not self.pkg.changes["architecture"].has_key("source") \
2583 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2584 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2586 # Version and file overwrite checks
2587 if overwrite_checks:
2588 if entry["type"] == "deb":
2589 self.check_binary_against_db(checkfile, session)
2590 elif entry["type"] == "dsc":
2591 self.check_source_against_db(checkfile, session)
2592 self.check_dsc_against_db(dsc_filename, session)
2594 # propogate in the case it is in the override tables:
2595 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2596 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2597 propogate[suite] = 1
2599 nopropogate[suite] = 1
2601 for suite in propogate.keys():
2602 if suite in nopropogate:
2604 self.pkg.changes["distribution"][suite] = 1
2606 for checkfile in self.pkg.files.keys():
2607 # Check the package is still in the override tables
2608 for suite in self.pkg.changes["distribution"].keys():
2609 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2610 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2612 ################################################################################
2613 # This is not really a reject, but an unaccept, but since a) the code for
2614 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2615 # extremely rare, for now we'll go with whining at our admin folks...
2617 def do_unaccept(self):
2621 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2622 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2623 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2624 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2625 if cnf.has_key("Dinstall::Bcc"):
2626 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2628 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2630 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2632 # Write the rejection email out as the <foo>.reason file
2633 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2634 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2636 # If we fail here someone is probably trying to exploit the race
2637 # so let's just raise an exception ...
2638 if os.path.exists(reject_filename):
2639 os.unlink(reject_filename)
2641 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2642 os.write(fd, reject_mail_message)
2645 utils.send_mail(reject_mail_message)
2647 del self.Subst["__REJECTOR_ADDRESS__"]
2648 del self.Subst["__REJECT_MESSAGE__"]
2649 del self.Subst["__CC__"]
2651 ################################################################################
2652 # If any file of an upload has a recent mtime then chances are good
2653 # the file is still being uploaded.
2655 def upload_too_new(self):
2658 # Move back to the original directory to get accurate time stamps
2660 os.chdir(self.pkg.directory)
2661 file_list = self.pkg.files.keys()
2662 file_list.extend(self.pkg.dsc_files.keys())
2663 file_list.append(self.pkg.changes_file)
2666 last_modified = time.time()-os.path.getmtime(f)
2667 if last_modified < int(cnf["Dinstall::SkipTime"]):
2676 def store_changelog(self):
2678 # Skip binary-only upload if it is not a bin-NMU
2679 if not self.pkg.changes['architecture'].has_key('source'):
2680 from daklib.regexes import re_bin_only_nmu
2681 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2684 session = DBConn().session()
2686 # Check if upload already has a changelog entry
2687 query = """SELECT changelog_id FROM changes WHERE source = :source
2688 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2689 if session.execute(query, {'source': self.pkg.changes['source'], \
2690 'version': self.pkg.changes['version'], \
2691 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2695 # Add current changelog text into changelogs_text table, return created ID
2696 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2697 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2699 # Link ID to the upload available in changes table
2700 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2701 AND version = :version AND architecture = :architecture"""
2702 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2703 'version': self.pkg.changes['version'], \
2704 'architecture': " ".join(self.pkg.changes['architecture'].keys())})