5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
57 from lintian import parse_lintian_output, generate_reject_messages
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
81 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
83 # Validate the override type
84 type_id = get_override_type(file_type, session)
86 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
90 ################################################################################
92 # Determine what parts in a .changes are NEW
94 def determine_new(changes, files, warn=1, session = None):
96 Determine what parts in a C{changes} file are NEW.
98 @type changes: Upload.Pkg.changes dict
99 @param changes: Changes dictionary
101 @type files: Upload.Pkg.files dict
102 @param files: Files dictionary
105 @param warn: Warn if overrides are added for (old)stable
108 @return: dictionary of NEW components.
113 # Build up a list of potentially new things
114 for name, f in files.items():
115 # Skip byhand elements
116 # if f["type"] == "byhand":
119 priority = f["priority"]
120 section = f["section"]
121 file_type = get_type(f, session)
122 component = f["component"]
124 if file_type == "dsc":
127 if not new.has_key(pkg):
129 new[pkg]["priority"] = priority
130 new[pkg]["section"] = section
131 new[pkg]["type"] = file_type
132 new[pkg]["component"] = component
133 new[pkg]["files"] = []
135 old_type = new[pkg]["type"]
136 if old_type != file_type:
137 # source gets trumped by deb or udeb
138 if old_type == "dsc":
139 new[pkg]["priority"] = priority
140 new[pkg]["section"] = section
141 new[pkg]["type"] = file_type
142 new[pkg]["component"] = component
144 new[pkg]["files"].append(name)
146 if f.has_key("othercomponents"):
147 new[pkg]["othercomponents"] = f["othercomponents"]
149 # Fix up the list of target suites
151 for suite in changes["suite"].keys():
152 override = cnf.Find("Suite::%s::OverrideSuite" % (suite))
154 (olderr, newerr) = (get_suite(suite, session) == None,
155 get_suite(override, session) == None)
157 (oinv, newinv) = ("", "")
158 if olderr: oinv = "invalid "
159 if newerr: ninv = "invalid "
160 print "warning: overriding %ssuite %s to %ssuite %s" % (
161 oinv, suite, ninv, override)
162 del changes["suite"][suite]
163 changes["suite"][override] = 1
165 for suite in changes["suite"].keys():
166 for pkg in new.keys():
167 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
169 for file_entry in new[pkg]["files"]:
170 if files[file_entry].has_key("new"):
171 del files[file_entry]["new"]
175 for s in ['stable', 'oldstable']:
176 if changes["suite"].has_key(s):
177 print "WARNING: overrides will be added for %s!" % s
178 for pkg in new.keys():
179 if new[pkg].has_key("othercomponents"):
180 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
184 ################################################################################
186 def check_valid(new, session = None):
188 Check if section and priority for NEW packages exist in database.
189 Additionally does sanity checks:
190 - debian-installer packages have to be udeb (or source)
191 - non debian-installer packages can not be udeb
192 - source priority can only be assigned to dsc file types
195 @param new: Dict of new packages with their section, priority and type.
198 for pkg in new.keys():
199 section_name = new[pkg]["section"]
200 priority_name = new[pkg]["priority"]
201 file_type = new[pkg]["type"]
203 section = get_section(section_name, session)
205 new[pkg]["section id"] = -1
207 new[pkg]["section id"] = section.section_id
209 priority = get_priority(priority_name, session)
211 new[pkg]["priority id"] = -1
213 new[pkg]["priority id"] = priority.priority_id
216 di = section_name.find("debian-installer") != -1
218 # If d-i, we must be udeb and vice-versa
219 if (di and file_type not in ("udeb", "dsc")) or \
220 (not di and file_type == "udeb"):
221 new[pkg]["section id"] = -1
223 # If dsc we need to be source and vice-versa
224 if (priority == "source" and file_type != "dsc") or \
225 (priority != "source" and file_type == "dsc"):
226 new[pkg]["priority id"] = -1
228 ###############################################################################
230 # Used by Upload.check_timestamps
231 class TarTime(object):
232 def __init__(self, future_cutoff, past_cutoff):
234 self.future_cutoff = future_cutoff
235 self.past_cutoff = past_cutoff
238 self.future_files = {}
239 self.ancient_files = {}
241 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
242 if MTime > self.future_cutoff:
243 self.future_files[Name] = MTime
244 if MTime < self.past_cutoff:
245 self.ancient_files[Name] = MTime
247 ###############################################################################
249 class Upload(object):
251 Everything that has to do with an upload processed.
259 ###########################################################################
262 """ Reset a number of internal variables."""
264 # Initialize the substitution template map
267 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
268 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
269 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
270 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
276 self.later_check_files = []
280 def package_info(self):
282 Format various messages from this Upload to send to the maintainer.
286 ('Reject Reasons', self.rejects),
287 ('Warnings', self.warnings),
288 ('Notes', self.notes),
292 for title, messages in msgs:
294 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
299 ###########################################################################
300 def update_subst(self):
301 """ Set up the per-package template substitution mappings """
305 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
306 if not self.pkg.changes.has_key("architecture") or not \
307 isinstance(self.pkg.changes["architecture"], dict):
308 self.pkg.changes["architecture"] = { "Unknown" : "" }
310 # and maintainer2047 may not exist.
311 if not self.pkg.changes.has_key("maintainer2047"):
312 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
314 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
315 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
316 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
318 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
319 if self.pkg.changes["architecture"].has_key("source") and \
320 self.pkg.changes["changedby822"] != "" and \
321 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
323 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
324 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
325 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
327 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
328 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
329 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
331 if "sponsoremail" in self.pkg.changes:
332 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
334 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
335 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
337 # Apply any global override of the Maintainer field
338 if cnf.get("Dinstall::OverrideMaintainer"):
339 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
340 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
342 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
343 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
344 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
346 ###########################################################################
347 def load_changes(self, filename):
349 Load a changes file and setup a dictionary around it. Also checks for mandantory
352 @type filename: string
353 @param filename: Changes filename, full path.
356 @return: whether the changes file was valid or not. We may want to
357 reject even if this is True (see what gets put in self.rejects).
358 This is simply to prevent us even trying things later which will
359 fail because we couldn't properly parse the file.
362 self.pkg.changes_file = filename
364 # Parse the .changes field into a dictionary
366 self.pkg.changes.update(parse_changes(filename))
367 except CantOpenError:
368 self.rejects.append("%s: can't read file." % (filename))
370 except ParseChangesError, line:
371 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
373 except ChangesUnicodeError:
374 self.rejects.append("%s: changes file not proper utf-8" % (filename))
377 # Parse the Files field from the .changes into another dictionary
379 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
380 except ParseChangesError, line:
381 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
383 except UnknownFormatError, format:
384 self.rejects.append("%s: unknown format '%s'." % (filename, format))
387 # Check for mandatory fields
388 for i in ("distribution", "source", "binary", "architecture",
389 "version", "maintainer", "files", "changes", "description"):
390 if not self.pkg.changes.has_key(i):
391 # Avoid undefined errors later
392 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
395 # Strip a source version in brackets from the source field
396 if re_strip_srcver.search(self.pkg.changes["source"]):
397 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
399 # Ensure the source field is a valid package name.
400 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
401 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
403 # Split multi-value fields into a lower-level dictionary
404 for i in ("architecture", "distribution", "binary", "closes"):
405 o = self.pkg.changes.get(i, "")
407 del self.pkg.changes[i]
409 self.pkg.changes[i] = {}
412 self.pkg.changes[i][j] = 1
414 # Fix the Maintainer: field to be RFC822/2047 compatible
416 (self.pkg.changes["maintainer822"],
417 self.pkg.changes["maintainer2047"],
418 self.pkg.changes["maintainername"],
419 self.pkg.changes["maintaineremail"]) = \
420 fix_maintainer (self.pkg.changes["maintainer"])
421 except ParseMaintError, msg:
422 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
423 % (filename, self.pkg.changes["maintainer"], msg))
425 # ...likewise for the Changed-By: field if it exists.
427 (self.pkg.changes["changedby822"],
428 self.pkg.changes["changedby2047"],
429 self.pkg.changes["changedbyname"],
430 self.pkg.changes["changedbyemail"]) = \
431 fix_maintainer (self.pkg.changes.get("changed-by", ""))
432 except ParseMaintError, msg:
433 self.pkg.changes["changedby822"] = ""
434 self.pkg.changes["changedby2047"] = ""
435 self.pkg.changes["changedbyname"] = ""
436 self.pkg.changes["changedbyemail"] = ""
438 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
439 % (filename, self.pkg.changes["changed-by"], msg))
441 # Ensure all the values in Closes: are numbers
442 if self.pkg.changes.has_key("closes"):
443 for i in self.pkg.changes["closes"].keys():
444 if re_isanum.match (i) == None:
445 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
447 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
448 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
449 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
451 # Check the .changes is non-empty
452 if not self.pkg.files:
453 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
456 # Changes was syntactically valid even if we'll reject
459 ###########################################################################
461 def check_distributions(self):
462 "Check and map the Distribution field"
466 # Handle suite mappings
467 for m in Cnf.ValueList("SuiteMappings"):
470 if mtype == "map" or mtype == "silent-map":
471 (source, dest) = args[1:3]
472 if self.pkg.changes["distribution"].has_key(source):
473 del self.pkg.changes["distribution"][source]
474 self.pkg.changes["distribution"][dest] = 1
475 if mtype != "silent-map":
476 self.notes.append("Mapping %s to %s." % (source, dest))
477 if self.pkg.changes.has_key("distribution-version"):
478 if self.pkg.changes["distribution-version"].has_key(source):
479 self.pkg.changes["distribution-version"][source]=dest
480 elif mtype == "map-unreleased":
481 (source, dest) = args[1:3]
482 if self.pkg.changes["distribution"].has_key(source):
483 for arch in self.pkg.changes["architecture"].keys():
484 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
485 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
486 del self.pkg.changes["distribution"][source]
487 self.pkg.changes["distribution"][dest] = 1
489 elif mtype == "ignore":
491 if self.pkg.changes["distribution"].has_key(suite):
492 del self.pkg.changes["distribution"][suite]
493 self.warnings.append("Ignoring %s as a target suite." % (suite))
494 elif mtype == "reject":
496 if self.pkg.changes["distribution"].has_key(suite):
497 self.rejects.append("Uploads to %s are not accepted." % (suite))
498 elif mtype == "propup-version":
499 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
501 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
502 if self.pkg.changes["distribution"].has_key(args[1]):
503 self.pkg.changes.setdefault("distribution-version", {})
504 for suite in args[2:]:
505 self.pkg.changes["distribution-version"][suite] = suite
507 # Ensure there is (still) a target distribution
508 if len(self.pkg.changes["distribution"].keys()) < 1:
509 self.rejects.append("No valid distribution remaining.")
511 # Ensure target distributions exist
512 for suite in self.pkg.changes["distribution"].keys():
513 if not Cnf.has_key("Suite::%s" % (suite)):
514 self.rejects.append("Unknown distribution `%s'." % (suite))
516 ###########################################################################
518 def binary_file_checks(self, f, session):
520 entry = self.pkg.files[f]
522 # Extract package control information
523 deb_file = utils.open_file(f)
525 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
527 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
529 # Can't continue, none of the checks on control would work.
532 # Check for mandantory "Description:"
535 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
537 self.rejects.append("%s: Missing Description in binary package" % (f))
542 # Check for mandatory fields
543 for field in [ "Package", "Architecture", "Version" ]:
544 if control.Find(field) == None:
546 self.rejects.append("%s: No %s field in control." % (f, field))
549 # Ensure the package name matches the one give in the .changes
550 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
551 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
553 # Validate the package field
554 package = control.Find("Package")
555 if not re_valid_pkg_name.match(package):
556 self.rejects.append("%s: invalid package name '%s'." % (f, package))
558 # Validate the version field
559 version = control.Find("Version")
560 if not re_valid_version.match(version):
561 self.rejects.append("%s: invalid version number '%s'." % (f, version))
563 # Ensure the architecture of the .deb is one we know about.
564 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
565 architecture = control.Find("Architecture")
566 upload_suite = self.pkg.changes["distribution"].keys()[0]
568 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
569 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
570 self.rejects.append("Unknown architecture '%s'." % (architecture))
572 # Ensure the architecture of the .deb is one of the ones
573 # listed in the .changes.
574 if not self.pkg.changes["architecture"].has_key(architecture):
575 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
577 # Sanity-check the Depends field
578 depends = control.Find("Depends")
580 self.rejects.append("%s: Depends field is empty." % (f))
582 # Sanity-check the Provides field
583 provides = control.Find("Provides")
585 provide = re_spacestrip.sub('', provides)
587 self.rejects.append("%s: Provides field is empty." % (f))
588 prov_list = provide.split(",")
589 for prov in prov_list:
590 if not re_valid_pkg_name.match(prov):
591 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
593 # Check the section & priority match those given in the .changes (non-fatal)
594 if control.Find("Section") and entry["section"] != "" \
595 and entry["section"] != control.Find("Section"):
596 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
597 (f, control.Find("Section", ""), entry["section"]))
598 if control.Find("Priority") and entry["priority"] != "" \
599 and entry["priority"] != control.Find("Priority"):
600 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
601 (f, control.Find("Priority", ""), entry["priority"]))
603 entry["package"] = package
604 entry["architecture"] = architecture
605 entry["version"] = version
606 entry["maintainer"] = control.Find("Maintainer", "")
608 if f.endswith(".udeb"):
609 self.pkg.files[f]["dbtype"] = "udeb"
610 elif f.endswith(".deb"):
611 self.pkg.files[f]["dbtype"] = "deb"
613 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
615 entry["source"] = control.Find("Source", entry["package"])
617 # Get the source version
618 source = entry["source"]
621 if source.find("(") != -1:
622 m = re_extract_src_version.match(source)
624 source_version = m.group(2)
626 if not source_version:
627 source_version = self.pkg.files[f]["version"]
629 entry["source package"] = source
630 entry["source version"] = source_version
632 # Ensure the filename matches the contents of the .deb
633 m = re_isadeb.match(f)
636 file_package = m.group(1)
637 if entry["package"] != file_package:
638 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
639 (f, file_package, entry["dbtype"], entry["package"]))
640 epochless_version = re_no_epoch.sub('', control.Find("Version"))
643 file_version = m.group(2)
644 if epochless_version != file_version:
645 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
646 (f, file_version, entry["dbtype"], epochless_version))
649 file_architecture = m.group(3)
650 if entry["architecture"] != file_architecture:
651 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
652 (f, file_architecture, entry["dbtype"], entry["architecture"]))
654 # Check for existent source
655 source_version = entry["source version"]
656 source_package = entry["source package"]
657 if self.pkg.changes["architecture"].has_key("source"):
658 if source_version != self.pkg.changes["version"]:
659 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
660 (source_version, f, self.pkg.changes["version"]))
662 # Check in the SQL database
663 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
664 # Check in one of the other directories
665 source_epochless_version = re_no_epoch.sub('', source_version)
666 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
667 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
669 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
672 dsc_file_exists = False
673 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates", "Lenny-Volatile-Proposed-Updates"]:
674 if cnf.has_key("Dir::Queue::%s" % (myq)):
675 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
676 dsc_file_exists = True
679 if not dsc_file_exists:
680 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
682 # Check the version and for file overwrites
683 self.check_binary_against_db(f, session)
685 # Temporarily disable contents generation until we change the table storage layout
688 #if len(b.rejects) > 0:
689 # for j in b.rejects:
690 # self.rejects.append(j)
692 def source_file_checks(self, f, session):
693 entry = self.pkg.files[f]
695 m = re_issource.match(f)
699 entry["package"] = m.group(1)
700 entry["version"] = m.group(2)
701 entry["type"] = m.group(3)
703 # Ensure the source package name matches the Source filed in the .changes
704 if self.pkg.changes["source"] != entry["package"]:
705 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
707 # Ensure the source version matches the version in the .changes file
708 if re_is_orig_source.match(f):
709 changes_version = self.pkg.changes["chopversion2"]
711 changes_version = self.pkg.changes["chopversion"]
713 if changes_version != entry["version"]:
714 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
716 # Ensure the .changes lists source in the Architecture field
717 if not self.pkg.changes["architecture"].has_key("source"):
718 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
720 # Check the signature of a .dsc file
721 if entry["type"] == "dsc":
722 # check_signature returns either:
723 # (None, [list, of, rejects]) or (signature, [])
724 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
726 self.rejects.append(j)
728 entry["architecture"] = "source"
730 def per_suite_file_checks(self, f, suite, session):
732 entry = self.pkg.files[f]
735 if entry.has_key("byhand"):
738 # Check we have fields we need to do these checks
740 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
741 if not entry.has_key(m):
742 self.rejects.append("file '%s' does not have field %s set" % (f, m))
748 # Handle component mappings
749 for m in cnf.ValueList("ComponentMappings"):
750 (source, dest) = m.split()
751 if entry["component"] == source:
752 entry["original component"] = source
753 entry["component"] = dest
755 # Ensure the component is valid for the target suite
756 if cnf.has_key("Suite:%s::Components" % (suite)) and \
757 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
758 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
761 # Validate the component
762 if not get_component(entry["component"], session):
763 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
766 # See if the package is NEW
767 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
770 # Validate the priority
771 if entry["priority"].find('/') != -1:
772 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
774 # Determine the location
775 location = cnf["Dir::Pool"]
776 l = get_location(location, entry["component"], session=session)
778 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
779 entry["location id"] = -1
781 entry["location id"] = l.location_id
783 # Check the md5sum & size against existing files (if any)
784 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
786 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
787 entry["size"], entry["md5sum"], entry["location id"])
790 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
791 elif found is False and poolfile is not None:
792 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
795 entry["files id"] = None
797 entry["files id"] = poolfile.file_id
799 # Check for packages that have moved from one component to another
800 entry['suite'] = suite
801 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
803 entry["othercomponents"] = res.fetchone()[0]
805 def check_files(self, action=True):
806 file_keys = self.pkg.files.keys()
812 os.chdir(self.pkg.directory)
814 ret = holding.copy_to_holding(f)
816 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
820 # check we already know the changes file
821 # [NB: this check must be done post-suite mapping]
822 base_filename = os.path.basename(self.pkg.changes_file)
824 session = DBConn().session()
827 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
828 # if in the pool or in a queue other than unchecked, reject
829 if (dbc.in_queue is None) \
830 or (dbc.in_queue is not None
831 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
832 self.rejects.append("%s file already known to dak" % base_filename)
833 except NoResultFound, e:
840 for f, entry in self.pkg.files.items():
841 # Ensure the file does not already exist in one of the accepted directories
842 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
843 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
844 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
845 self.rejects.append("%s file already exists in the %s directory." % (f, d))
847 if not re_taint_free.match(f):
848 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
850 # Check the file is readable
851 if os.access(f, os.R_OK) == 0:
852 # When running in -n, copy_to_holding() won't have
853 # generated the reject_message, so we need to.
855 if os.path.exists(f):
856 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
858 # Don't directly reject, mark to check later to deal with orig's
859 # we can find in the pool
860 self.later_check_files.append(f)
861 entry["type"] = "unreadable"
864 # If it's byhand skip remaining checks
865 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
867 entry["type"] = "byhand"
869 # Checks for a binary package...
870 elif re_isadeb.match(f):
872 entry["type"] = "deb"
874 # This routine appends to self.rejects/warnings as appropriate
875 self.binary_file_checks(f, session)
877 # Checks for a source package...
878 elif re_issource.match(f):
881 # This routine appends to self.rejects/warnings as appropriate
882 self.source_file_checks(f, session)
884 # Not a binary or source package? Assume byhand...
887 entry["type"] = "byhand"
889 # Per-suite file checks
890 entry["oldfiles"] = {}
891 for suite in self.pkg.changes["distribution"].keys():
892 self.per_suite_file_checks(f, suite, session)
896 # If the .changes file says it has source, it must have source.
897 if self.pkg.changes["architecture"].has_key("source"):
899 self.rejects.append("no source found and Architecture line in changes mention source.")
901 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
902 self.rejects.append("source only uploads are not supported.")
904 ###########################################################################
905 def check_dsc(self, action=True, session=None):
906 """Returns bool indicating whether or not the source changes are valid"""
907 # Ensure there is source to check
908 if not self.pkg.changes["architecture"].has_key("source"):
913 for f, entry in self.pkg.files.items():
914 if entry["type"] == "dsc":
916 self.rejects.append("can not process a .changes file with multiple .dsc's.")
921 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
923 self.rejects.append("source uploads must contain a dsc file")
926 # Parse the .dsc file
928 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1, dsc_file=1))
929 except CantOpenError:
930 # if not -n copy_to_holding() will have done this for us...
932 self.rejects.append("%s: can't read file." % (dsc_filename))
933 except ParseChangesError, line:
934 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
935 except InvalidDscError, line:
936 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
937 except ChangesUnicodeError:
938 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
940 # Build up the file list of files mentioned by the .dsc
942 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
943 except NoFilesFieldError:
944 self.rejects.append("%s: no Files: field." % (dsc_filename))
946 except UnknownFormatError, format:
947 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
949 except ParseChangesError, line:
950 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
953 # Enforce mandatory fields
954 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
955 if not self.pkg.dsc.has_key(i):
956 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
959 # Validate the source and version fields
960 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
961 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
962 if not re_valid_version.match(self.pkg.dsc["version"]):
963 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
965 # Only a limited list of source formats are allowed in each suite
966 for dist in self.pkg.changes["distribution"].keys():
967 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
968 if self.pkg.dsc["format"] not in allowed:
969 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
971 # Validate the Maintainer field
973 # We ignore the return value
974 fix_maintainer(self.pkg.dsc["maintainer"])
975 except ParseMaintError, msg:
976 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
977 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
979 # Validate the build-depends field(s)
980 for field_name in [ "build-depends", "build-depends-indep" ]:
981 field = self.pkg.dsc.get(field_name)
983 # Have apt try to parse them...
985 apt_pkg.ParseSrcDepends(field)
987 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
989 # Ensure the version number in the .dsc matches the version number in the .changes
990 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
991 changes_version = self.pkg.files[dsc_filename]["version"]
993 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
994 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
996 # Ensure the Files field contain only what's expected
997 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
999 # Ensure source is newer than existing source in target suites
1000 session = DBConn().session()
1001 self.check_source_against_db(dsc_filename, session)
1002 self.check_dsc_against_db(dsc_filename, session)
1005 # Finally, check if we're missing any files
1006 for f in self.later_check_files:
1007 self.rejects.append("Could not find file %s references in changes" % f)
1011 ###########################################################################
1013 def get_changelog_versions(self, source_dir):
1014 """Extracts a the source package and (optionally) grabs the
1015 version history out of debian/changelog for the BTS."""
1019 # Find the .dsc (again)
1021 for f in self.pkg.files.keys():
1022 if self.pkg.files[f]["type"] == "dsc":
1025 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1026 if not dsc_filename:
1029 # Create a symlink mirror of the source files in our temporary directory
1030 for f in self.pkg.files.keys():
1031 m = re_issource.match(f)
1033 src = os.path.join(source_dir, f)
1034 # If a file is missing for whatever reason, give up.
1035 if not os.path.exists(src):
1038 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1039 self.pkg.orig_files[f].has_key("path"):
1041 dest = os.path.join(os.getcwd(), f)
1042 os.symlink(src, dest)
1044 # If the orig files are not a part of the upload, create symlinks to the
1046 for orig_file in self.pkg.orig_files.keys():
1047 if not self.pkg.orig_files[orig_file].has_key("path"):
1049 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1050 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1052 # Extract the source
1053 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1054 (result, output) = commands.getstatusoutput(cmd)
1056 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1057 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1060 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1063 # Get the upstream version
1064 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1065 if re_strip_revision.search(upstr_version):
1066 upstr_version = re_strip_revision.sub('', upstr_version)
1068 # Ensure the changelog file exists
1069 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1070 if not os.path.exists(changelog_filename):
1071 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1074 # Parse the changelog
1075 self.pkg.dsc["bts changelog"] = ""
1076 changelog_file = utils.open_file(changelog_filename)
1077 for line in changelog_file.readlines():
1078 m = re_changelog_versions.match(line)
1080 self.pkg.dsc["bts changelog"] += line
1081 changelog_file.close()
1083 # Check we found at least one revision in the changelog
1084 if not self.pkg.dsc["bts changelog"]:
1085 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1087 def check_source(self):
1089 # a) there's no source
1090 if not self.pkg.changes["architecture"].has_key("source"):
1093 tmpdir = utils.temp_dirname()
1095 # Move into the temporary directory
1099 # Get the changelog version history
1100 self.get_changelog_versions(cwd)
1102 # Move back and cleanup the temporary tree
1106 shutil.rmtree(tmpdir)
1108 if e.errno != errno.EACCES:
1110 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1112 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1113 # We probably have u-r or u-w directories so chmod everything
1115 cmd = "chmod -R u+rwx %s" % (tmpdir)
1116 result = os.system(cmd)
1118 utils.fubar("'%s' failed with result %s." % (cmd, result))
1119 shutil.rmtree(tmpdir)
1120 except Exception, e:
1121 print "foobar2 (%s)" % e
1122 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1124 ###########################################################################
1125 def ensure_hashes(self):
1126 # Make sure we recognise the format of the Files: field in the .changes
1127 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1128 if len(format) == 2:
1129 format = int(format[0]), int(format[1])
1131 format = int(float(format[0])), 0
1133 # We need to deal with the original changes blob, as the fields we need
1134 # might not be in the changes dict serialised into the .dak anymore.
1135 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1137 # Copy the checksums over to the current changes dict. This will keep
1138 # the existing modifications to it intact.
1139 for field in orig_changes:
1140 if field.startswith('checksums-'):
1141 self.pkg.changes[field] = orig_changes[field]
1143 # Check for unsupported hashes
1144 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1145 self.rejects.append(j)
1147 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1148 self.rejects.append(j)
1150 # We have to calculate the hash if we have an earlier changes version than
1151 # the hash appears in rather than require it exist in the changes file
1152 for hashname, hashfunc, version in utils.known_hashes:
1153 # TODO: Move _ensure_changes_hash into this class
1154 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1155 self.rejects.append(j)
1156 if "source" in self.pkg.changes["architecture"]:
1157 # TODO: Move _ensure_dsc_hash into this class
1158 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1159 self.rejects.append(j)
1161 def check_hashes(self):
1162 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1163 self.rejects.append(m)
1165 for m in utils.check_size(".changes", self.pkg.files):
1166 self.rejects.append(m)
1168 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1169 self.rejects.append(m)
1171 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1172 self.rejects.append(m)
1174 self.ensure_hashes()
1176 ###########################################################################
1178 def ensure_orig(self, target_dir='.', session=None):
1180 Ensures that all orig files mentioned in the changes file are present
1181 in target_dir. If they do not exist, they are symlinked into place.
1183 An list containing the symlinks that were created are returned (so they
1190 for filename, entry in self.pkg.dsc_files.iteritems():
1191 if not re_is_orig_source.match(filename):
1192 # File is not an orig; ignore
1195 if os.path.exists(filename):
1196 # File exists, no need to continue
1199 def symlink_if_valid(path):
1200 f = utils.open_file(path)
1201 md5sum = apt_pkg.md5sum(f)
1204 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1205 expected = (int(entry['size']), entry['md5sum'])
1207 if fingerprint != expected:
1210 dest = os.path.join(target_dir, filename)
1212 os.symlink(path, dest)
1213 symlinked.append(dest)
1219 session_ = DBConn().session()
1224 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1225 poolfile_path = os.path.join(
1226 poolfile.location.path, poolfile.filename
1229 if symlink_if_valid(poolfile_path):
1239 # Look in some other queues for the file
1240 queues = ('New', 'Byhand', 'ProposedUpdates',
1241 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1243 for queue in queues:
1244 if not cnf.get('Dir::Queue::%s' % queue):
1247 queuefile_path = os.path.join(
1248 cnf['Dir::Queue::%s' % queue], filename
1251 if not os.path.exists(queuefile_path):
1252 # Does not exist in this queue
1255 if symlink_if_valid(queuefile_path):
1260 ###########################################################################
1262 def check_lintian(self):
1264 Extends self.rejects by checking the output of lintian against tags
1265 specified in Dinstall::LintianTags.
1270 # Don't reject binary uploads
1271 if not self.pkg.changes['architecture'].has_key('source'):
1274 # Only check some distributions
1275 for dist in ('unstable', 'experimental'):
1276 if dist in self.pkg.changes['distribution']:
1281 # If we do not have a tagfile, don't do anything
1282 tagfile = cnf.get("Dinstall::LintianTags")
1286 # Parse the yaml file
1287 sourcefile = file(tagfile, 'r')
1288 sourcecontent = sourcefile.read()
1292 lintiantags = yaml.load(sourcecontent)['lintian']
1293 except yaml.YAMLError, msg:
1294 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1297 # Try and find all orig mentioned in the .dsc
1298 symlinked = self.ensure_orig()
1300 # Setup the input file for lintian
1301 fd, temp_filename = utils.temp_filename()
1302 temptagfile = os.fdopen(fd, 'w')
1303 for tags in lintiantags.values():
1304 temptagfile.writelines(['%s\n' % x for x in tags])
1308 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1309 (temp_filename, self.pkg.changes_file)
1311 result, output = commands.getstatusoutput(cmd)
1313 # Remove our tempfile and any symlinks we created
1314 os.unlink(temp_filename)
1316 for symlink in symlinked:
1320 utils.warn("lintian failed for %s [return code: %s]." % \
1321 (self.pkg.changes_file, result))
1322 utils.warn(utils.prefix_multi_line_string(output, \
1323 " [possible output:] "))
1328 [self.pkg.changes_file, "check_lintian"] + list(txt)
1332 parsed_tags = parse_lintian_output(output)
1333 self.rejects.extend(
1334 generate_reject_messages(parsed_tags, lintiantags, log=log)
1337 ###########################################################################
1338 def check_urgency(self):
1340 if self.pkg.changes["architecture"].has_key("source"):
1341 if not self.pkg.changes.has_key("urgency"):
1342 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1343 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1344 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1345 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1346 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1347 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1349 ###########################################################################
1351 # Sanity check the time stamps of files inside debs.
1352 # [Files in the near future cause ugly warnings and extreme time
1353 # travel can cause errors on extraction]
1355 def check_timestamps(self):
1358 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1359 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1360 tar = TarTime(future_cutoff, past_cutoff)
1362 for filename, entry in self.pkg.files.items():
1363 if entry["type"] == "deb":
1366 deb_file = utils.open_file(filename)
1367 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1370 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1371 except SystemError, e:
1372 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1373 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1376 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1380 future_files = tar.future_files.keys()
1382 num_future_files = len(future_files)
1383 future_file = future_files[0]
1384 future_date = tar.future_files[future_file]
1385 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1386 % (filename, num_future_files, future_file, time.ctime(future_date)))
1388 ancient_files = tar.ancient_files.keys()
1390 num_ancient_files = len(ancient_files)
1391 ancient_file = ancient_files[0]
1392 ancient_date = tar.ancient_files[ancient_file]
1393 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1394 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1396 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1398 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1399 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1401 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1407 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1408 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1409 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1410 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1411 self.pkg.changes["sponsoremail"] = uid_email
1416 ###########################################################################
1417 # check_signed_by_key checks
1418 ###########################################################################
1420 def check_signed_by_key(self):
1421 """Ensure the .changes is signed by an authorized uploader."""
1422 session = DBConn().session()
1424 # First of all we check that the person has proper upload permissions
1425 # and that this upload isn't blocked
1426 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1429 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1432 # TODO: Check that import-keyring adds UIDs properly
1434 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1437 # Check that the fingerprint which uploaded has permission to do so
1438 self.check_upload_permissions(fpr, session)
1440 # Check that this package is not in a transition
1441 self.check_transition(session)
1446 def check_upload_permissions(self, fpr, session):
1447 # Check any one-off upload blocks
1448 self.check_upload_blocks(fpr, session)
1450 # Start with DM as a special case
1451 # DM is a special case unfortunately, so we check it first
1452 # (keys with no source access get more access than DMs in one
1453 # way; DMs can only upload for their packages whether source
1454 # or binary, whereas keys with no access might be able to
1455 # upload some binaries)
1456 if fpr.source_acl.access_level == 'dm':
1457 self.check_dm_upload(fpr, session)
1459 # Check source-based permissions for other types
1460 if self.pkg.changes["architecture"].has_key("source") and \
1461 fpr.source_acl.access_level is None:
1462 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1463 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1464 self.rejects.append(rej)
1466 # If not a DM, we allow full upload rights
1467 uid_email = "%s@debian.org" % (fpr.uid.uid)
1468 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1471 # Check binary upload permissions
1472 # By this point we know that DMs can't have got here unless they
1473 # are allowed to deal with the package concerned so just apply
1475 if fpr.binary_acl.access_level == 'full':
1478 # Otherwise we're in the map case
1479 tmparches = self.pkg.changes["architecture"].copy()
1480 tmparches.pop('source', None)
1482 for bam in fpr.binary_acl_map:
1483 tmparches.pop(bam.architecture.arch_string, None)
1485 if len(tmparches.keys()) > 0:
1486 if fpr.binary_reject:
1487 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1488 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1489 self.rejects.append(rej)
1491 # TODO: This is where we'll implement reject vs throw away binaries later
1492 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1493 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1494 rej += "\nFingerprint: %s", (fpr.fingerprint)
1495 self.rejects.append(rej)
1498 def check_upload_blocks(self, fpr, session):
1499 """Check whether any upload blocks apply to this source, source
1500 version, uid / fpr combination"""
1502 def block_rej_template(fb):
1503 rej = 'Manual upload block in place for package %s' % fb.source
1504 if fb.version is not None:
1505 rej += ', version %s' % fb.version
1508 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1509 # version is None if the block applies to all versions
1510 if fb.version is None or fb.version == self.pkg.changes['version']:
1511 # Check both fpr and uid - either is enough to cause a reject
1512 if fb.fpr is not None:
1513 if fb.fpr.fingerprint == fpr.fingerprint:
1514 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1515 if fb.uid is not None:
1516 if fb.uid == fpr.uid:
1517 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1520 def check_dm_upload(self, fpr, session):
1521 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1522 ## none of the uploaded packages are NEW
1524 for f in self.pkg.files.keys():
1525 if self.pkg.files[f].has_key("byhand"):
1526 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1528 if self.pkg.files[f].has_key("new"):
1529 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1535 ## the most recent version of the package uploaded to unstable or
1536 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1537 ## section of its control file
1538 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1539 q = q.join(SrcAssociation)
1540 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1541 q = q.order_by(desc('source.version')).limit(1)
1546 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1547 self.rejects.append(rej)
1551 if not r.dm_upload_allowed:
1552 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1553 self.rejects.append(rej)
1556 ## the Maintainer: field of the uploaded .changes file corresponds with
1557 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1559 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1560 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1562 ## the most recent version of the package uploaded to unstable or
1563 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1564 ## non-developer maintainers cannot NMU or hijack packages)
1566 # srcuploaders includes the maintainer
1568 for sup in r.srcuploaders:
1569 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1570 # Eww - I hope we never have two people with the same name in Debian
1571 if email == fpr.uid.uid or name == fpr.uid.name:
1576 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1579 ## none of the packages are being taken over from other source packages
1580 for b in self.pkg.changes["binary"].keys():
1581 for suite in self.pkg.changes["distribution"].keys():
1582 q = session.query(DBSource)
1583 q = q.join(DBBinary).filter_by(package=b)
1584 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1587 if s.source != self.pkg.changes["source"]:
1588 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1592 def check_transition(self, session):
1595 sourcepkg = self.pkg.changes["source"]
1597 # No sourceful upload -> no need to do anything else, direct return
1598 # We also work with unstable uploads, not experimental or those going to some
1599 # proposed-updates queue
1600 if "source" not in self.pkg.changes["architecture"] or \
1601 "unstable" not in self.pkg.changes["distribution"]:
1604 # Also only check if there is a file defined (and existant) with
1606 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1607 if transpath == "" or not os.path.exists(transpath):
1610 # Parse the yaml file
1611 sourcefile = file(transpath, 'r')
1612 sourcecontent = sourcefile.read()
1614 transitions = yaml.load(sourcecontent)
1615 except yaml.YAMLError, msg:
1616 # This shouldn't happen, there is a wrapper to edit the file which
1617 # checks it, but we prefer to be safe than ending up rejecting
1619 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1622 # Now look through all defined transitions
1623 for trans in transitions:
1624 t = transitions[trans]
1625 source = t["source"]
1628 # Will be None if nothing is in testing.
1629 current = get_source_in_suite(source, "testing", session)
1630 if current is not None:
1631 compare = apt_pkg.VersionCompare(current.version, expected)
1633 if current is None or compare < 0:
1634 # This is still valid, the current version in testing is older than
1635 # the new version we wait for, or there is none in testing yet
1637 # Check if the source we look at is affected by this.
1638 if sourcepkg in t['packages']:
1639 # The source is affected, lets reject it.
1641 rejectmsg = "%s: part of the %s transition.\n\n" % (
1644 if current is not None:
1645 currentlymsg = "at version %s" % (current.version)
1647 currentlymsg = "not present in testing"
1649 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1651 rejectmsg += "\n".join(textwrap.wrap("""Your package
1652 is part of a testing transition designed to get %s migrated (it is
1653 currently %s, we need version %s). This transition is managed by the
1654 Release Team, and %s is the Release-Team member responsible for it.
1655 Please mail debian-release@lists.debian.org or contact %s directly if you
1656 need further assistance. You might want to upload to experimental until this
1657 transition is done."""
1658 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1660 self.rejects.append(rejectmsg)
1663 ###########################################################################
1664 # End check_signed_by_key checks
1665 ###########################################################################
1667 def build_summaries(self):
1668 """ Build a summary of changes the upload introduces. """
1670 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1672 short_summary = summary
1674 # This is for direport's benefit...
1675 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1678 summary += "Changes: " + f
1680 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1682 summary += self.announce(short_summary, 0)
1684 return (summary, short_summary)
1686 ###########################################################################
1688 def close_bugs(self, summary, action):
1690 Send mail to close bugs as instructed by the closes field in the changes file.
1691 Also add a line to summary if any work was done.
1693 @type summary: string
1694 @param summary: summary text, as given by L{build_summaries}
1697 @param action: Set to false no real action will be done.
1700 @return: summary. If action was taken, extended by the list of closed bugs.
1704 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1706 bugs = self.pkg.changes["closes"].keys()
1712 summary += "Closing bugs: "
1714 summary += "%s " % (bug)
1717 self.Subst["__BUG_NUMBER__"] = bug
1718 if self.pkg.changes["distribution"].has_key("stable"):
1719 self.Subst["__STABLE_WARNING__"] = """
1720 Note that this package is not part of the released stable Debian
1721 distribution. It may have dependencies on other unreleased software,
1722 or other instabilities. Please take care if you wish to install it.
1723 The update will eventually make its way into the next released Debian
1726 self.Subst["__STABLE_WARNING__"] = ""
1727 mail_message = utils.TemplateSubst(self.Subst, template)
1728 utils.send_mail(mail_message)
1730 # Clear up after ourselves
1731 del self.Subst["__BUG_NUMBER__"]
1732 del self.Subst["__STABLE_WARNING__"]
1734 if action and self.logger:
1735 self.logger.log(["closing bugs"] + bugs)
1741 ###########################################################################
1743 def announce(self, short_summary, action):
1745 Send an announce mail about a new upload.
1747 @type short_summary: string
1748 @param short_summary: Short summary text to include in the mail
1751 @param action: Set to false no real action will be done.
1754 @return: Textstring about action taken.
1759 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1761 # Only do announcements for source uploads with a recent dpkg-dev installed
1762 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1763 self.pkg.changes["architecture"].has_key("source"):
1769 self.Subst["__SHORT_SUMMARY__"] = short_summary
1771 for dist in self.pkg.changes["distribution"].keys():
1772 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1773 if announce_list == "" or lists_done.has_key(announce_list):
1776 lists_done[announce_list] = 1
1777 summary += "Announcing to %s\n" % (announce_list)
1781 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1782 if cnf.get("Dinstall::TrackingServer") and \
1783 self.pkg.changes["architecture"].has_key("source"):
1784 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1785 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1787 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1788 utils.send_mail(mail_message)
1790 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1792 if cnf.FindB("Dinstall::CloseBugs"):
1793 summary = self.close_bugs(summary, action)
1795 del self.Subst["__SHORT_SUMMARY__"]
1799 ###########################################################################
1801 def accept (self, summary, short_summary, session=None):
1805 This moves all files referenced from the .changes into the pool,
1806 sends the accepted mail, announces to lists, closes bugs and
1807 also checks for override disparities. If enabled it will write out
1808 the version history for the BTS Version Tracking and will finally call
1811 @type summary: string
1812 @param summary: Summary text
1814 @type short_summary: string
1815 @param short_summary: Short summary
1819 stats = SummaryStats()
1822 self.logger.log(["installing changes", self.pkg.changes_file])
1826 # Add the .dsc file to the DB first
1827 for newfile, entry in self.pkg.files.items():
1828 if entry["type"] == "dsc":
1829 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
1833 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1834 for newfile, entry in self.pkg.files.items():
1835 if entry["type"] == "deb":
1836 poolfiles.append(add_deb_to_db(self, newfile, session))
1838 # If this is a sourceful diff only upload that is moving
1839 # cross-component we need to copy the .orig files into the new
1840 # component too for the same reasons as above.
1841 # XXX: mhy: I think this should be in add_dsc_to_db
1842 if self.pkg.changes["architecture"].has_key("source"):
1843 for orig_file in self.pkg.orig_files.keys():
1844 if not self.pkg.orig_files[orig_file].has_key("id"):
1845 continue # Skip if it's not in the pool
1846 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1847 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1848 continue # Skip if the location didn't change
1851 oldf = get_poolfile_by_id(orig_file_id, session)
1852 old_filename = os.path.join(oldf.location.path, oldf.filename)
1853 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1854 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1856 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1858 # TODO: Care about size/md5sum collisions etc
1859 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
1861 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
1863 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1864 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1868 # Don't reference the old file from this changes
1870 if p.file_id == oldf.file_id:
1873 poolfiles.append(newf)
1875 # Fix up the DSC references
1878 for df in source.srcfiles:
1879 if df.poolfile.file_id == oldf.file_id:
1880 # Add a new DSC entry and mark the old one for deletion
1881 # Don't do it in the loop so we don't change the thing we're iterating over
1883 newdscf.source_id = source.source_id
1884 newdscf.poolfile_id = newf.file_id
1885 session.add(newdscf)
1895 # Make sure that our source object is up-to-date
1896 session.expire(source)
1898 # Add changelog information to the database
1899 self.store_changelog()
1901 # Install the files into the pool
1902 for newfile, entry in self.pkg.files.items():
1903 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1904 utils.move(newfile, destination)
1905 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1906 stats.accept_bytes += float(entry["size"])
1908 # Copy the .changes file across for suite which need it.
1910 for suite_name in self.pkg.changes["distribution"].keys():
1911 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1912 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1914 for dest in copy_changes.keys():
1915 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1917 # We're done - commit the database changes
1919 # Our SQL session will automatically start a new transaction after
1922 # Move the .changes into the 'done' directory
1923 utils.move(self.pkg.changes_file,
1924 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1926 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1927 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1930 self.Subst["__SUITE__"] = ""
1931 self.Subst["__SUMMARY__"] = summary
1932 mail_message = utils.TemplateSubst(self.Subst,
1933 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1934 utils.send_mail(mail_message)
1935 self.announce(short_summary, 1)
1937 ## Helper stuff for DebBugs Version Tracking
1938 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1939 if self.pkg.changes["architecture"].has_key("source"):
1940 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1941 version_history = os.fdopen(fd, 'w')
1942 version_history.write(self.pkg.dsc["bts changelog"])
1943 version_history.close()
1944 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1945 self.pkg.changes_file[:-8]+".versions")
1946 os.rename(temp_filename, filename)
1947 os.chmod(filename, 0644)
1949 # Write out the binary -> source mapping.
1950 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1951 debinfo = os.fdopen(fd, 'w')
1952 for name, entry in sorted(self.pkg.files.items()):
1953 if entry["type"] == "deb":
1954 line = " ".join([entry["package"], entry["version"],
1955 entry["architecture"], entry["source package"],
1956 entry["source version"]])
1957 debinfo.write(line+"\n")
1959 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1960 self.pkg.changes_file[:-8]+".debinfo")
1961 os.rename(temp_filename, filename)
1962 os.chmod(filename, 0644)
1966 # Set up our copy queues (e.g. buildd queues)
1967 for suite_name in self.pkg.changes["distribution"].keys():
1968 suite = get_suite(suite_name, session)
1969 for q in suite.copy_queues:
1971 q.add_file_from_pool(f)
1976 stats.accept_count += 1
1978 def check_override(self):
1980 Checks override entries for validity. Mails "Override disparity" warnings,
1981 if that feature is enabled.
1983 Abandons the check if
1984 - override disparity checks are disabled
1985 - mail sending is disabled
1990 # Abandon the check if override disparity checks have been disabled
1991 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
1994 summary = self.pkg.check_override()
1999 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2002 self.Subst["__SUMMARY__"] = summary
2003 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2004 utils.send_mail(mail_message)
2005 del self.Subst["__SUMMARY__"]
2007 ###########################################################################
2009 def remove(self, from_dir=None):
2011 Used (for instance) in p-u to remove the package from unchecked
2013 Also removes the package from holding area.
2015 if from_dir is None:
2016 from_dir = self.pkg.directory
2019 for f in self.pkg.files.keys():
2020 os.unlink(os.path.join(from_dir, f))
2021 if os.path.exists(os.path.join(h.holding_dir, f)):
2022 os.unlink(os.path.join(h.holding_dir, f))
2024 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2025 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2026 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2028 ###########################################################################
2030 def move_to_queue (self, queue):
2032 Move files to a destination queue using the permissions in the table
2035 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2036 queue.path, perms=int(queue.change_perms, 8))
2037 for f in self.pkg.files.keys():
2038 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2040 ###########################################################################
2042 def force_reject(self, reject_files):
2044 Forcefully move files from the current directory to the
2045 reject directory. If any file already exists in the reject
2046 directory it will be moved to the morgue to make way for
2049 @type reject_files: dict
2050 @param reject_files: file dictionary
2056 for file_entry in reject_files:
2057 # Skip any files which don't exist or which we don't have permission to copy.
2058 if os.access(file_entry, os.R_OK) == 0:
2061 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2064 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2066 # File exists? Let's find a new name by adding a number
2067 if e.errno == errno.EEXIST:
2069 dest_file = utils.find_next_free(dest_file, 255)
2070 except NoFreeFilenameError:
2071 # Something's either gone badly Pete Tong, or
2072 # someone is trying to exploit us.
2073 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2076 # Make sure we really got it
2078 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2081 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2085 # If we got here, we own the destination file, so we can
2086 # safely overwrite it.
2087 utils.move(file_entry, dest_file, 1, perms=0660)
2090 ###########################################################################
2091 def do_reject (self, manual=0, reject_message="", notes=""):
2093 Reject an upload. If called without a reject message or C{manual} is
2094 true, spawn an editor so the user can write one.
2097 @param manual: manual or automated rejection
2099 @type reject_message: string
2100 @param reject_message: A reject message
2105 # If we weren't given a manual rejection message, spawn an
2106 # editor so the user can add one in...
2107 if manual and not reject_message:
2108 (fd, temp_filename) = utils.temp_filename()
2109 temp_file = os.fdopen(fd, 'w')
2112 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2113 % (note.author, note.version, note.notedate, note.comment))
2115 editor = os.environ.get("EDITOR","vi")
2117 while answer == 'E':
2118 os.system("%s %s" % (editor, temp_filename))
2119 temp_fh = utils.open_file(temp_filename)
2120 reject_message = "".join(temp_fh.readlines())
2122 print "Reject message:"
2123 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2124 prompt = "[R]eject, Edit, Abandon, Quit ?"
2126 while prompt.find(answer) == -1:
2127 answer = utils.our_raw_input(prompt)
2128 m = re_default_answer.search(prompt)
2131 answer = answer[:1].upper()
2132 os.unlink(temp_filename)
2138 print "Rejecting.\n"
2142 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2143 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2145 # Move all the files into the reject directory
2146 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2147 self.force_reject(reject_files)
2149 # If we fail here someone is probably trying to exploit the race
2150 # so let's just raise an exception ...
2151 if os.path.exists(reason_filename):
2152 os.unlink(reason_filename)
2153 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2155 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2159 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2160 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2161 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2162 os.write(reason_fd, reject_message)
2163 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2165 # Build up the rejection email
2166 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2167 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2168 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2169 self.Subst["__REJECT_MESSAGE__"] = ""
2170 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2171 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2172 # Write the rejection email out as the <foo>.reason file
2173 os.write(reason_fd, reject_mail_message)
2175 del self.Subst["__REJECTOR_ADDRESS__"]
2176 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2177 del self.Subst["__CC__"]
2181 # Send the rejection mail
2182 utils.send_mail(reject_mail_message)
2185 self.logger.log(["rejected", self.pkg.changes_file])
2189 ################################################################################
2190 def in_override_p(self, package, component, suite, binary_type, filename, session):
2192 Check if a package already has override entries in the DB
2194 @type package: string
2195 @param package: package name
2197 @type component: string
2198 @param component: database id of the component
2201 @param suite: database id of the suite
2203 @type binary_type: string
2204 @param binary_type: type of the package
2206 @type filename: string
2207 @param filename: filename we check
2209 @return: the database result. But noone cares anyway.
2215 if binary_type == "": # must be source
2218 file_type = binary_type
2220 # Override suite name; used for example with proposed-updates
2221 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2222 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2224 result = get_override(package, suite, component, file_type, session)
2226 # If checking for a source package fall back on the binary override type
2227 if file_type == "dsc" and len(result) < 1:
2228 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2230 # Remember the section and priority so we can check them later if appropriate
2233 self.pkg.files[filename]["override section"] = result.section.section
2234 self.pkg.files[filename]["override priority"] = result.priority.priority
2239 ################################################################################
2240 def get_anyversion(self, sv_list, suite):
2243 @param sv_list: list of (suite, version) tuples to check
2246 @param suite: suite name
2252 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2253 for (s, v) in sv_list:
2254 if s in [ x.lower() for x in anysuite ]:
2255 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2260 ################################################################################
2262 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2265 @param sv_list: list of (suite, version) tuples to check
2267 @type filename: string
2268 @param filename: XXX
2270 @type new_version: string
2271 @param new_version: XXX
2273 Ensure versions are newer than existing packages in target
2274 suites and that cross-suite version checking rules as
2275 set out in the conf file are satisfied.
2280 # Check versions for each target suite
2281 for target_suite in self.pkg.changes["distribution"].keys():
2282 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2283 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2285 # Enforce "must be newer than target suite" even if conffile omits it
2286 if target_suite not in must_be_newer_than:
2287 must_be_newer_than.append(target_suite)
2289 for (suite, existent_version) in sv_list:
2290 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2292 if suite in must_be_newer_than and sourceful and vercmp < 1:
2293 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2295 if suite in must_be_older_than and vercmp > -1:
2298 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2299 # we really use the other suite, ignoring the conflicting one ...
2300 addsuite = self.pkg.changes["distribution-version"][suite]
2302 add_version = self.get_anyversion(sv_list, addsuite)
2303 target_version = self.get_anyversion(sv_list, target_suite)
2306 # not add_version can only happen if we map to a suite
2307 # that doesn't enhance the suite we're propup'ing from.
2308 # so "propup-ver x a b c; map a d" is a problem only if
2309 # d doesn't enhance a.
2311 # i think we could always propagate in this case, rather
2312 # than complaining. either way, this isn't a REJECT issue
2314 # And - we really should complain to the dorks who configured dak
2315 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2316 self.pkg.changes.setdefault("propdistribution", {})
2317 self.pkg.changes["propdistribution"][addsuite] = 1
2319 elif not target_version:
2320 # not targets_version is true when the package is NEW
2321 # we could just stick with the "...old version..." REJECT
2322 # for this, I think.
2323 self.rejects.append("Won't propogate NEW packages.")
2324 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2325 # propogation would be redundant. no need to reject though.
2326 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2328 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2329 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2331 self.warnings.append("Propogating upload to %s" % (addsuite))
2332 self.pkg.changes.setdefault("propdistribution", {})
2333 self.pkg.changes["propdistribution"][addsuite] = 1
2337 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2339 ################################################################################
2340 def check_binary_against_db(self, filename, session):
2341 # Ensure version is sane
2342 q = session.query(BinAssociation)
2343 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2344 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2346 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2347 filename, self.pkg.files[filename]["version"], sourceful=False)
2349 # Check for any existing copies of the file
2350 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2351 q = q.filter_by(version=self.pkg.files[filename]["version"])
2352 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2355 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2357 ################################################################################
2359 def check_source_against_db(self, filename, session):
2360 source = self.pkg.dsc.get("source")
2361 version = self.pkg.dsc.get("version")
2363 # Ensure version is sane
2364 q = session.query(SrcAssociation)
2365 q = q.join(DBSource).filter(DBSource.source==source)
2367 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2368 filename, version, sourceful=True)
2370 ################################################################################
2371 def check_dsc_against_db(self, filename, session):
2374 @warning: NB: this function can remove entries from the 'files' index [if
2375 the orig tarball is a duplicate of the one in the archive]; if
2376 you're iterating over 'files' and call this function as part of
2377 the loop, be sure to add a check to the top of the loop to
2378 ensure you haven't just tried to dereference the deleted entry.
2383 self.pkg.orig_files = {} # XXX: do we need to clear it?
2384 orig_files = self.pkg.orig_files
2386 # Try and find all files mentioned in the .dsc. This has
2387 # to work harder to cope with the multiple possible
2388 # locations of an .orig.tar.gz.
2389 # The ordering on the select is needed to pick the newest orig
2390 # when it exists in multiple places.
2391 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2393 if self.pkg.files.has_key(dsc_name):
2394 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2395 actual_size = int(self.pkg.files[dsc_name]["size"])
2396 found = "%s in incoming" % (dsc_name)
2398 # Check the file does not already exist in the archive
2399 ql = get_poolfile_like_name(dsc_name, session)
2401 # Strip out anything that isn't '%s' or '/%s$'
2403 if not i.filename.endswith(dsc_name):
2406 # "[dak] has not broken them. [dak] has fixed a
2407 # brokenness. Your crappy hack exploited a bug in
2410 # "(Come on! I thought it was always obvious that
2411 # one just doesn't release different files with
2412 # the same name and version.)"
2413 # -- ajk@ on d-devel@l.d.o
2416 # Ignore exact matches for .orig.tar.gz
2418 if re_is_orig_source.match(dsc_name):
2420 if self.pkg.files.has_key(dsc_name) and \
2421 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2422 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2423 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2424 # TODO: Don't delete the entry, just mark it as not needed
2425 # This would fix the stupidity of changing something we often iterate over
2426 # whilst we're doing it
2427 del self.pkg.files[dsc_name]
2428 dsc_entry["files id"] = i.file_id
2429 if not orig_files.has_key(dsc_name):
2430 orig_files[dsc_name] = {}
2431 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2434 # Don't bitch that we couldn't find this file later
2436 self.later_check_files.remove(dsc_name)
2442 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2444 elif re_is_orig_source.match(dsc_name):
2446 ql = get_poolfile_like_name(dsc_name, session)
2448 # Strip out anything that isn't '%s' or '/%s$'
2449 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2451 if not i.filename.endswith(dsc_name):
2455 # Unfortunately, we may get more than one match here if,
2456 # for example, the package was in potato but had an -sa
2457 # upload in woody. So we need to choose the right one.
2459 # default to something sane in case we don't match any or have only one
2464 old_file = os.path.join(i.location.path, i.filename)
2465 old_file_fh = utils.open_file(old_file)
2466 actual_md5 = apt_pkg.md5sum(old_file_fh)
2468 actual_size = os.stat(old_file)[stat.ST_SIZE]
2469 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2472 old_file = os.path.join(i.location.path, i.filename)
2473 old_file_fh = utils.open_file(old_file)
2474 actual_md5 = apt_pkg.md5sum(old_file_fh)
2476 actual_size = os.stat(old_file)[stat.ST_SIZE]
2478 suite_type = x.location.archive_type
2479 # need this for updating dsc_files in install()
2480 dsc_entry["files id"] = x.file_id
2481 # See install() in process-accepted...
2482 if not orig_files.has_key(dsc_name):
2483 orig_files[dsc_name] = {}
2484 orig_files[dsc_name]["id"] = x.file_id
2485 orig_files[dsc_name]["path"] = old_file
2486 orig_files[dsc_name]["location"] = x.location.location_id
2488 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2489 # Not there? Check the queue directories...
2490 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2491 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2493 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2494 if os.path.exists(in_otherdir):
2495 in_otherdir_fh = utils.open_file(in_otherdir)
2496 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2497 in_otherdir_fh.close()
2498 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2500 if not orig_files.has_key(dsc_name):
2501 orig_files[dsc_name] = {}
2502 orig_files[dsc_name]["path"] = in_otherdir
2505 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2508 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2510 if actual_md5 != dsc_entry["md5sum"]:
2511 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2512 if actual_size != int(dsc_entry["size"]):
2513 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2515 ################################################################################
2516 # This is used by process-new and process-holding to recheck a changes file
2517 # at the time we're running. It mainly wraps various other internal functions
2518 # and is similar to accepted_checks - these should probably be tidied up
2520 def recheck(self, session):
2522 for f in self.pkg.files.keys():
2523 # The .orig.tar.gz can disappear out from under us is it's a
2524 # duplicate of one in the archive.
2525 if not self.pkg.files.has_key(f):
2528 entry = self.pkg.files[f]
2530 # Check that the source still exists
2531 if entry["type"] == "deb":
2532 source_version = entry["source version"]
2533 source_package = entry["source package"]
2534 if not self.pkg.changes["architecture"].has_key("source") \
2535 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2536 source_epochless_version = re_no_epoch.sub('', source_version)
2537 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2539 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2540 if cnf.has_key("Dir::Queue::%s" % (q)):
2541 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2544 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2546 # Version and file overwrite checks
2547 if entry["type"] == "deb":
2548 self.check_binary_against_db(f, session)
2549 elif entry["type"] == "dsc":
2550 self.check_source_against_db(f, session)
2551 self.check_dsc_against_db(f, session)
2553 ################################################################################
2554 def accepted_checks(self, overwrite_checks, session):
2555 # Recheck anything that relies on the database; since that's not
2556 # frozen between accept and our run time when called from p-a.
2558 # overwrite_checks is set to False when installing to stable/oldstable
2563 # Find the .dsc (again)
2565 for f in self.pkg.files.keys():
2566 if self.pkg.files[f]["type"] == "dsc":
2569 for checkfile in self.pkg.files.keys():
2570 # The .orig.tar.gz can disappear out from under us is it's a
2571 # duplicate of one in the archive.
2572 if not self.pkg.files.has_key(checkfile):
2575 entry = self.pkg.files[checkfile]
2577 # Check that the source still exists
2578 if entry["type"] == "deb":
2579 source_version = entry["source version"]
2580 source_package = entry["source package"]
2581 if not self.pkg.changes["architecture"].has_key("source") \
2582 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2583 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2585 # Version and file overwrite checks
2586 if overwrite_checks:
2587 if entry["type"] == "deb":
2588 self.check_binary_against_db(checkfile, session)
2589 elif entry["type"] == "dsc":
2590 self.check_source_against_db(checkfile, session)
2591 self.check_dsc_against_db(dsc_filename, session)
2593 # propogate in the case it is in the override tables:
2594 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2595 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2596 propogate[suite] = 1
2598 nopropogate[suite] = 1
2600 for suite in propogate.keys():
2601 if suite in nopropogate:
2603 self.pkg.changes["distribution"][suite] = 1
2605 for checkfile in self.pkg.files.keys():
2606 # Check the package is still in the override tables
2607 for suite in self.pkg.changes["distribution"].keys():
2608 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2609 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2611 ################################################################################
2612 # This is not really a reject, but an unaccept, but since a) the code for
2613 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2614 # extremely rare, for now we'll go with whining at our admin folks...
2616 def do_unaccept(self):
2620 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2621 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2622 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2623 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2624 if cnf.has_key("Dinstall::Bcc"):
2625 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2627 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2629 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2631 # Write the rejection email out as the <foo>.reason file
2632 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2633 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2635 # If we fail here someone is probably trying to exploit the race
2636 # so let's just raise an exception ...
2637 if os.path.exists(reject_filename):
2638 os.unlink(reject_filename)
2640 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2641 os.write(fd, reject_mail_message)
2644 utils.send_mail(reject_mail_message)
2646 del self.Subst["__REJECTOR_ADDRESS__"]
2647 del self.Subst["__REJECT_MESSAGE__"]
2648 del self.Subst["__CC__"]
2650 ################################################################################
2651 # If any file of an upload has a recent mtime then chances are good
2652 # the file is still being uploaded.
2654 def upload_too_new(self):
2657 # Move back to the original directory to get accurate time stamps
2659 os.chdir(self.pkg.directory)
2660 file_list = self.pkg.files.keys()
2661 file_list.extend(self.pkg.dsc_files.keys())
2662 file_list.append(self.pkg.changes_file)
2665 last_modified = time.time()-os.path.getmtime(f)
2666 if last_modified < int(cnf["Dinstall::SkipTime"]):
2675 def store_changelog(self):
2677 # Skip binary-only upload if it is not a bin-NMU
2678 if not self.pkg.changes['architecture'].has_key('source'):
2679 from daklib.regexes import re_bin_only_nmu
2680 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2683 session = DBConn().session()
2685 # Check if upload already has a changelog entry
2686 query = """SELECT changelog_id FROM changes WHERE source = :source
2687 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2688 if session.execute(query, {'source': self.pkg.changes['source'], \
2689 'version': self.pkg.changes['version'], \
2690 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2694 # Add current changelog text into changelogs_text table, return created ID
2695 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2696 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2698 # Link ID to the upload available in changes table
2699 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2700 AND version = :version AND architecture = :architecture"""
2701 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2702 'version': self.pkg.changes['version'], \
2703 'architecture': " ".join(self.pkg.changes['architecture'].keys())})