5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
52 from summarystats import SummaryStats
53 from utils import parse_changes, check_dsc_files
54 from textutils import fix_maintainer
55 from binary import Binary
57 ###############################################################################
59 def get_type(f, session):
61 Get the file type of C{f}
64 @param f: file entry from Changes object
66 @type session: SQLA Session
67 @param session: SQL Alchemy session object
74 if f.has_key("dbtype"):
75 file_type = f["dbtype"]
76 elif re_source_ext.match(f["type"]):
79 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
81 # Validate the override type
82 type_id = get_override_type(file_type, session)
84 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
88 ################################################################################
90 # Determine what parts in a .changes are NEW
92 def determine_new(changes, files, warn=1):
94 Determine what parts in a C{changes} file are NEW.
96 @type changes: Upload.Pkg.changes dict
97 @param changes: Changes dictionary
99 @type files: Upload.Pkg.files dict
100 @param files: Files dictionary
103 @param warn: Warn if overrides are added for (old)stable
106 @return: dictionary of NEW components.
111 session = DBConn().session()
113 # Build up a list of potentially new things
114 for name, f in files.items():
115 # Skip byhand elements
116 if f["type"] == "byhand":
119 priority = f["priority"]
120 section = f["section"]
121 file_type = get_type(f, session)
122 component = f["component"]
124 if file_type == "dsc":
127 if not new.has_key(pkg):
129 new[pkg]["priority"] = priority
130 new[pkg]["section"] = section
131 new[pkg]["type"] = file_type
132 new[pkg]["component"] = component
133 new[pkg]["files"] = []
135 old_type = new[pkg]["type"]
136 if old_type != file_type:
137 # source gets trumped by deb or udeb
138 if old_type == "dsc":
139 new[pkg]["priority"] = priority
140 new[pkg]["section"] = section
141 new[pkg]["type"] = file_type
142 new[pkg]["component"] = component
144 new[pkg]["files"].append(name)
146 if f.has_key("othercomponents"):
147 new[pkg]["othercomponents"] = f["othercomponents"]
149 for suite in changes["suite"].keys():
150 for pkg in new.keys():
151 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
153 for file_entry in new[pkg]["files"]:
154 if files[file_entry].has_key("new"):
155 del files[file_entry]["new"]
159 for s in ['stable', 'oldstable']:
160 if changes["suite"].has_key(s):
161 print "WARNING: overrides will be added for %s!" % s
162 for pkg in new.keys():
163 if new[pkg].has_key("othercomponents"):
164 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
170 ################################################################################
172 def check_valid(new):
174 Check if section and priority for NEW packages exist in database.
175 Additionally does sanity checks:
176 - debian-installer packages have to be udeb (or source)
177 - non debian-installer packages can not be udeb
178 - source priority can only be assigned to dsc file types
181 @param new: Dict of new packages with their section, priority and type.
184 for pkg in new.keys():
185 section_name = new[pkg]["section"]
186 priority_name = new[pkg]["priority"]
187 file_type = new[pkg]["type"]
189 section = get_section(section_name)
191 new[pkg]["section id"] = -1
193 new[pkg]["section id"] = section.section_id
195 priority = get_priority(priority_name)
197 new[pkg]["priority id"] = -1
199 new[pkg]["priority id"] = priority.priority_id
202 di = section_name.find("debian-installer") != -1
204 # If d-i, we must be udeb and vice-versa
205 if (di and file_type not in ("udeb", "dsc")) or \
206 (not di and file_type == "udeb"):
207 new[pkg]["section id"] = -1
209 # If dsc we need to be source and vice-versa
210 if (priority == "source" and file_type != "dsc") or \
211 (priority != "source" and file_type == "dsc"):
212 new[pkg]["priority id"] = -1
214 ###############################################################################
216 def lookup_uid_from_fingerprint(fpr, session):
219 # This is a stupid default, but see the comments below
222 user = get_uid_from_fingerprint(fpr, session)
226 if user.name is None:
231 # Check the relevant fingerprint (which we have to have)
232 for f in user.fingerprint:
233 if f.fingerprint == fpr:
234 is_dm = f.keyring.debian_maintainer
237 return (uid, uid_name, is_dm)
239 ###############################################################################
241 def check_status(files):
243 for f in files.keys():
244 if files[f]["type"] == "byhand":
246 elif files[f].has_key("new"):
250 ###############################################################################
252 # Used by Upload.check_timestamps
253 class TarTime(object):
254 def __init__(self, future_cutoff, past_cutoff):
256 self.future_cutoff = future_cutoff
257 self.past_cutoff = past_cutoff
260 self.future_files = {}
261 self.ancient_files = {}
263 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
264 if MTime > self.future_cutoff:
265 self.future_files[Name] = MTime
266 if MTime < self.past_cutoff:
267 self.ancient_files[Name] = MTime
269 ###############################################################################
271 class Upload(object):
273 Everything that has to do with an upload processed.
281 ###########################################################################
284 """ Reset a number of internal variables."""
286 # Initialize the substitution template map
289 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
290 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
291 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
292 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
300 def package_info(self):
303 if len(self.rejects) > 0:
304 msg += "Reject Reasons:\n"
305 msg += "\n".join(self.rejects)
307 if len(self.warnings) > 0:
309 msg += "\n".join(self.warnings)
311 if len(self.notes) > 0:
313 msg += "\n".join(self.notes)
317 ###########################################################################
318 def update_subst(self):
319 """ Set up the per-package template substitution mappings """
323 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
324 if not self.pkg.changes.has_key("architecture") or not \
325 isinstance(self.pkg.changes["architecture"], DictType):
326 self.pkg.changes["architecture"] = { "Unknown" : "" }
328 # and maintainer2047 may not exist.
329 if not self.pkg.changes.has_key("maintainer2047"):
330 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
332 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
333 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
334 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
336 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
337 if self.pkg.changes["architecture"].has_key("source") and \
338 self.pkg.changes["changedby822"] != "" and \
339 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
341 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
342 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
343 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
345 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
346 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
347 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
349 if "sponsoremail" in self.pkg.changes:
350 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
352 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
353 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
355 # Apply any global override of the Maintainer field
356 if cnf.get("Dinstall::OverrideMaintainer"):
357 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
358 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
360 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
361 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
362 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
364 ###########################################################################
365 def load_changes(self, filename):
368 @rvalue: whether the changes file was valid or not. We may want to
369 reject even if this is True (see what gets put in self.rejects).
370 This is simply to prevent us even trying things later which will
371 fail because we couldn't properly parse the file.
374 self.pkg.changes_file = filename
376 # Parse the .changes field into a dictionary
378 self.pkg.changes.update(parse_changes(filename))
379 except CantOpenError:
380 self.rejects.append("%s: can't read file." % (filename))
382 except ParseChangesError, line:
383 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
385 except ChangesUnicodeError:
386 self.rejects.append("%s: changes file not proper utf-8" % (filename))
389 # Parse the Files field from the .changes into another dictionary
391 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
392 except ParseChangesError, line:
393 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
395 except UnknownFormatError, format:
396 self.rejects.append("%s: unknown format '%s'." % (filename, format))
399 # Check for mandatory fields
400 for i in ("distribution", "source", "binary", "architecture",
401 "version", "maintainer", "files", "changes", "description"):
402 if not self.pkg.changes.has_key(i):
403 # Avoid undefined errors later
404 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
407 # Strip a source version in brackets from the source field
408 if re_strip_srcver.search(self.pkg.changes["source"]):
409 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
411 # Ensure the source field is a valid package name.
412 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
413 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
415 # Split multi-value fields into a lower-level dictionary
416 for i in ("architecture", "distribution", "binary", "closes"):
417 o = self.pkg.changes.get(i, "")
419 del self.pkg.changes[i]
421 self.pkg.changes[i] = {}
424 self.pkg.changes[i][j] = 1
426 # Fix the Maintainer: field to be RFC822/2047 compatible
428 (self.pkg.changes["maintainer822"],
429 self.pkg.changes["maintainer2047"],
430 self.pkg.changes["maintainername"],
431 self.pkg.changes["maintaineremail"]) = \
432 fix_maintainer (self.pkg.changes["maintainer"])
433 except ParseMaintError, msg:
434 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
435 % (filename, changes["maintainer"], msg))
437 # ...likewise for the Changed-By: field if it exists.
439 (self.pkg.changes["changedby822"],
440 self.pkg.changes["changedby2047"],
441 self.pkg.changes["changedbyname"],
442 self.pkg.changes["changedbyemail"]) = \
443 fix_maintainer (self.pkg.changes.get("changed-by", ""))
444 except ParseMaintError, msg:
445 self.pkg.changes["changedby822"] = ""
446 self.pkg.changes["changedby2047"] = ""
447 self.pkg.changes["changedbyname"] = ""
448 self.pkg.changes["changedbyemail"] = ""
450 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
451 % (filename, changes["changed-by"], msg))
453 # Ensure all the values in Closes: are numbers
454 if self.pkg.changes.has_key("closes"):
455 for i in self.pkg.changes["closes"].keys():
456 if re_isanum.match (i) == None:
457 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
459 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
460 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
461 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
463 # Check there isn't already a changes file of the same name in one
464 # of the queue directories.
465 base_filename = os.path.basename(filename)
466 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
467 if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
468 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
470 # Check the .changes is non-empty
471 if not self.pkg.files:
472 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
475 # Changes was syntactically valid even if we'll reject
478 ###########################################################################
480 def check_distributions(self):
481 "Check and map the Distribution field"
485 # Handle suite mappings
486 for m in Cnf.ValueList("SuiteMappings"):
489 if mtype == "map" or mtype == "silent-map":
490 (source, dest) = args[1:3]
491 if self.pkg.changes["distribution"].has_key(source):
492 del self.pkg.changes["distribution"][source]
493 self.pkg.changes["distribution"][dest] = 1
494 if mtype != "silent-map":
495 self.notes.append("Mapping %s to %s." % (source, dest))
496 if self.pkg.changes.has_key("distribution-version"):
497 if self.pkg.changes["distribution-version"].has_key(source):
498 self.pkg.changes["distribution-version"][source]=dest
499 elif mtype == "map-unreleased":
500 (source, dest) = args[1:3]
501 if self.pkg.changes["distribution"].has_key(source):
502 for arch in self.pkg.changes["architecture"].keys():
503 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
504 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
505 del self.pkg.changes["distribution"][source]
506 self.pkg.changes["distribution"][dest] = 1
508 elif mtype == "ignore":
510 if self.pkg.changes["distribution"].has_key(suite):
511 del self.pkg.changes["distribution"][suite]
512 self.warnings.append("Ignoring %s as a target suite." % (suite))
513 elif mtype == "reject":
515 if self.pkg.changes["distribution"].has_key(suite):
516 self.rejects.append("Uploads to %s are not accepted." % (suite))
517 elif mtype == "propup-version":
518 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
520 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
521 if self.pkg.changes["distribution"].has_key(args[1]):
522 self.pkg.changes.setdefault("distribution-version", {})
523 for suite in args[2:]:
524 self.pkg.changes["distribution-version"][suite] = suite
526 # Ensure there is (still) a target distribution
527 if len(self.pkg.changes["distribution"].keys()) < 1:
528 self.rejects.append("No valid distribution remaining.")
530 # Ensure target distributions exist
531 for suite in self.pkg.changes["distribution"].keys():
532 if not Cnf.has_key("Suite::%s" % (suite)):
533 self.rejects.append("Unknown distribution `%s'." % (suite))
535 ###########################################################################
537 def binary_file_checks(self, f, session):
539 entry = self.pkg.files[f]
541 # Extract package control information
542 deb_file = utils.open_file(f)
544 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
546 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
548 # Can't continue, none of the checks on control would work.
551 # Check for mandantory "Description:"
554 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
556 self.rejects.append("%s: Missing Description in binary package" % (f))
561 # Check for mandatory fields
562 for field in [ "Package", "Architecture", "Version" ]:
563 if control.Find(field) == None:
565 self.rejects.append("%s: No %s field in control." % (f, field))
568 # Ensure the package name matches the one give in the .changes
569 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
570 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
572 # Validate the package field
573 package = control.Find("Package")
574 if not re_valid_pkg_name.match(package):
575 self.rejects.append("%s: invalid package name '%s'." % (f, package))
577 # Validate the version field
578 version = control.Find("Version")
579 if not re_valid_version.match(version):
580 self.rejects.append("%s: invalid version number '%s'." % (f, version))
582 # Ensure the architecture of the .deb is one we know about.
583 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
584 architecture = control.Find("Architecture")
585 upload_suite = self.pkg.changes["distribution"].keys()[0]
587 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
588 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
589 self.rejects.append("Unknown architecture '%s'." % (architecture))
591 # Ensure the architecture of the .deb is one of the ones
592 # listed in the .changes.
593 if not self.pkg.changes["architecture"].has_key(architecture):
594 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
596 # Sanity-check the Depends field
597 depends = control.Find("Depends")
599 self.rejects.append("%s: Depends field is empty." % (f))
601 # Sanity-check the Provides field
602 provides = control.Find("Provides")
604 provide = re_spacestrip.sub('', provides)
606 self.rejects.append("%s: Provides field is empty." % (f))
607 prov_list = provide.split(",")
608 for prov in prov_list:
609 if not re_valid_pkg_name.match(prov):
610 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
612 # Check the section & priority match those given in the .changes (non-fatal)
613 if control.Find("Section") and entry["section"] != "" \
614 and entry["section"] != control.Find("Section"):
615 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
616 (f, control.Find("Section", ""), entry["section"]))
617 if control.Find("Priority") and entry["priority"] != "" \
618 and entry["priority"] != control.Find("Priority"):
619 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
620 (f, control.Find("Priority", ""), entry["priority"]))
622 entry["package"] = package
623 entry["architecture"] = architecture
624 entry["version"] = version
625 entry["maintainer"] = control.Find("Maintainer", "")
627 if f.endswith(".udeb"):
628 self.pkg.files[f]["dbtype"] = "udeb"
629 elif f.endswith(".deb"):
630 self.pkg.files[f]["dbtype"] = "deb"
632 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
634 entry["source"] = control.Find("Source", entry["package"])
636 # Get the source version
637 source = entry["source"]
640 if source.find("(") != -1:
641 m = re_extract_src_version.match(source)
643 source_version = m.group(2)
645 if not source_version:
646 source_version = self.pkg.files[f]["version"]
648 entry["source package"] = source
649 entry["source version"] = source_version
651 # Ensure the filename matches the contents of the .deb
652 m = re_isadeb.match(f)
655 file_package = m.group(1)
656 if entry["package"] != file_package:
657 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
658 (f, file_package, entry["dbtype"], entry["package"]))
659 epochless_version = re_no_epoch.sub('', control.Find("Version"))
662 file_version = m.group(2)
663 if epochless_version != file_version:
664 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
665 (f, file_version, entry["dbtype"], epochless_version))
668 file_architecture = m.group(3)
669 if entry["architecture"] != file_architecture:
670 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
671 (f, file_architecture, entry["dbtype"], entry["architecture"]))
673 # Check for existent source
674 source_version = entry["source version"]
675 source_package = entry["source package"]
676 if self.pkg.changes["architecture"].has_key("source"):
677 if source_version != self.pkg.changes["version"]:
678 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
679 (source_version, f, self.pkg.changes["version"]))
681 # Check in the SQL database
682 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
683 # Check in one of the other directories
684 source_epochless_version = re_no_epoch.sub('', source_version)
685 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
686 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
688 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
691 dsc_file_exists = False
692 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
693 if cnf.has_key("Dir::Queue::%s" % (myq)):
694 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
695 dsc_file_exists = True
698 if not dsc_file_exists:
699 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
701 # Check the version and for file overwrites
702 self.check_binary_against_db(f, session)
704 # Temporarily disable contents generation until we change the table storage layout
707 #if len(b.rejects) > 0:
708 # for j in b.rejects:
709 # self.rejects.append(j)
711 def source_file_checks(self, f, session):
712 entry = self.pkg.files[f]
714 m = re_issource.match(f)
718 entry["package"] = m.group(1)
719 entry["version"] = m.group(2)
720 entry["type"] = m.group(3)
722 # Ensure the source package name matches the Source filed in the .changes
723 if self.pkg.changes["source"] != entry["package"]:
724 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
726 # Ensure the source version matches the version in the .changes file
727 if re_is_orig_source.match(f):
728 changes_version = self.pkg.changes["chopversion2"]
730 changes_version = self.pkg.changes["chopversion"]
732 if changes_version != entry["version"]:
733 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
735 # Ensure the .changes lists source in the Architecture field
736 if not self.pkg.changes["architecture"].has_key("source"):
737 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
739 # Check the signature of a .dsc file
740 if entry["type"] == "dsc":
741 # check_signature returns either:
742 # (None, [list, of, rejects]) or (signature, [])
743 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
745 self.rejects.append(j)
747 entry["architecture"] = "source"
749 def per_suite_file_checks(self, f, suite, session):
751 entry = self.pkg.files[f]
752 archive = utils.where_am_i()
755 if entry.has_key("byhand"):
758 # Check we have fields we need to do these checks
760 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
761 if not entry.has_key(m):
762 self.rejects.append("file '%s' does not have field %s set" % (f, m))
768 # Handle component mappings
769 for m in cnf.ValueList("ComponentMappings"):
770 (source, dest) = m.split()
771 if entry["component"] == source:
772 entry["original component"] = source
773 entry["component"] = dest
775 # Ensure the component is valid for the target suite
776 if cnf.has_key("Suite:%s::Components" % (suite)) and \
777 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
778 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
781 # Validate the component
782 if not get_component(entry["component"], session):
783 self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
786 # See if the package is NEW
787 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
790 # Validate the priority
791 if entry["priority"].find('/') != -1:
792 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
794 # Determine the location
795 location = cnf["Dir::Pool"]
796 l = get_location(location, entry["component"], archive, session)
798 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
799 entry["location id"] = -1
801 entry["location id"] = l.location_id
803 # Check the md5sum & size against existing files (if any)
804 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
806 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
807 entry["size"], entry["md5sum"], entry["location id"])
810 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
811 elif found is False and poolfile is not None:
812 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
815 entry["files id"] = None
817 entry["files id"] = poolfile.file_id
819 # Check for packages that have moved from one component to another
820 entry['suite'] = suite
821 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
823 entry["othercomponents"] = res.fetchone()[0]
825 def check_files(self, action=True):
826 archive = utils.where_am_i()
827 file_keys = self.pkg.files.keys()
831 # XXX: As far as I can tell, this can no longer happen - see
832 # comments by AJ in old revisions - mhy
833 # if reprocess is 2 we've already done this and we're checking
834 # things again for the new .orig.tar.gz.
835 # [Yes, I'm fully aware of how disgusting this is]
836 if action and self.reprocess < 2:
838 os.chdir(self.pkg.directory)
840 ret = holding.copy_to_holding(f)
842 # XXX: Should we bail out here or try and continue?
843 self.rejects.append(ret)
847 # Check there isn't already a .changes or .dak file of the same name in
848 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
849 # [NB: this check must be done post-suite mapping]
850 base_filename = os.path.basename(self.pkg.changes_file)
851 dot_dak_filename = base_filename[:-8] + ".dak"
853 for suite in self.pkg.changes["distribution"].keys():
854 copychanges = "Suite::%s::CopyChanges" % (suite)
855 if cnf.has_key(copychanges) and \
856 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
857 self.rejects.append("%s: a file with this name already exists in %s" \
858 % (base_filename, cnf[copychanges]))
860 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
861 if cnf.has_key(copy_dot_dak) and \
862 os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
863 self.rejects.append("%s: a file with this name already exists in %s" \
864 % (dot_dak_filename, Cnf[copy_dot_dak]))
870 session = DBConn().session()
872 for f, entry in self.pkg.files.items():
873 # Ensure the file does not already exist in one of the accepted directories
874 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
875 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
876 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
877 self.rejects.append("%s file already exists in the %s directory." % (f, d))
879 if not re_taint_free.match(f):
880 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
882 # Check the file is readable
883 if os.access(f, os.R_OK) == 0:
884 # When running in -n, copy_to_holding() won't have
885 # generated the reject_message, so we need to.
887 if os.path.exists(f):
888 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
890 self.rejects.append("Can't read `%s'. [file not found]" % (f))
891 entry["type"] = "unreadable"
894 # If it's byhand skip remaining checks
895 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
897 entry["type"] = "byhand"
899 # Checks for a binary package...
900 elif re_isadeb.match(f):
902 entry["type"] = "deb"
904 # This routine appends to self.rejects/warnings as appropriate
905 self.binary_file_checks(f, session)
907 # Checks for a source package...
908 elif re_issource.match(f):
911 # This routine appends to self.rejects/warnings as appropriate
912 self.source_file_checks(f, session)
914 # Not a binary or source package? Assume byhand...
917 entry["type"] = "byhand"
919 # Per-suite file checks
920 entry["oldfiles"] = {}
921 for suite in self.pkg.changes["distribution"].keys():
922 self.per_suite_file_checks(f, suite, session)
926 # If the .changes file says it has source, it must have source.
927 if self.pkg.changes["architecture"].has_key("source"):
929 self.rejects.append("no source found and Architecture line in changes mention source.")
931 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
932 self.rejects.append("source only uploads are not supported.")
934 ###########################################################################
935 def check_dsc(self, action=True, session=None):
936 """Returns bool indicating whether or not the source changes are valid"""
937 # Ensure there is source to check
938 if not self.pkg.changes["architecture"].has_key("source"):
943 for f, entry in self.pkg.files.items():
944 if entry["type"] == "dsc":
946 self.rejects.append("can not process a .changes file with multiple .dsc's.")
951 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
953 self.rejects.append("source uploads must contain a dsc file")
956 # Parse the .dsc file
958 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
959 except CantOpenError:
960 # if not -n copy_to_holding() will have done this for us...
962 self.rejects.append("%s: can't read file." % (dsc_filename))
963 except ParseChangesError, line:
964 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
965 except InvalidDscError, line:
966 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
967 except ChangesUnicodeError:
968 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
970 # Build up the file list of files mentioned by the .dsc
972 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
973 except NoFilesFieldError:
974 self.rejects.append("%s: no Files: field." % (dsc_filename))
976 except UnknownFormatError, format:
977 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
979 except ParseChangesError, line:
980 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
983 # Enforce mandatory fields
984 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
985 if not self.pkg.dsc.has_key(i):
986 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
989 # Validate the source and version fields
990 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
991 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
992 if not re_valid_version.match(self.pkg.dsc["version"]):
993 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
995 # Only a limited list of source formats are allowed in each suite
996 for dist in self.pkg.changes["distribution"].keys():
997 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
998 if self.pkg.dsc["format"] not in allowed:
999 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1001 # Validate the Maintainer field
1003 # We ignore the return value
1004 fix_maintainer(self.pkg.dsc["maintainer"])
1005 except ParseMaintError, msg:
1006 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1007 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1009 # Validate the build-depends field(s)
1010 for field_name in [ "build-depends", "build-depends-indep" ]:
1011 field = self.pkg.dsc.get(field_name)
1013 # Have apt try to parse them...
1015 apt_pkg.ParseSrcDepends(field)
1017 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1019 # Ensure the version number in the .dsc matches the version number in the .changes
1020 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1021 changes_version = self.pkg.files[dsc_filename]["version"]
1023 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1024 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1026 # Ensure the Files field contain only what's expected
1027 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1029 # Ensure source is newer than existing source in target suites
1030 session = DBConn().session()
1031 self.check_source_against_db(dsc_filename, session)
1032 self.check_dsc_against_db(dsc_filename, session)
1037 ###########################################################################
1039 def get_changelog_versions(self, source_dir):
1040 """Extracts a the source package and (optionally) grabs the
1041 version history out of debian/changelog for the BTS."""
1045 # Find the .dsc (again)
1047 for f in self.pkg.files.keys():
1048 if self.pkg.files[f]["type"] == "dsc":
1051 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1052 if not dsc_filename:
1055 # Create a symlink mirror of the source files in our temporary directory
1056 for f in self.pkg.files.keys():
1057 m = re_issource.match(f)
1059 src = os.path.join(source_dir, f)
1060 # If a file is missing for whatever reason, give up.
1061 if not os.path.exists(src):
1064 if re_is_orig_source.match(f) and pkg.orig_files.has_key(f) and \
1065 pkg.orig_files[f].has_key("path"):
1067 dest = os.path.join(os.getcwd(), f)
1068 os.symlink(src, dest)
1070 # If the orig files are not a part of the upload, create symlinks to the
1072 for orig_file in self.pkg.orig_files.keys():
1073 if not self.pkg.orig_files[orig_file].has_key("path"):
1075 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1076 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1078 # Extract the source
1079 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1080 (result, output) = commands.getstatusoutput(cmd)
1082 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1083 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1086 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1089 # Get the upstream version
1090 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1091 if re_strip_revision.search(upstr_version):
1092 upstr_version = re_strip_revision.sub('', upstr_version)
1094 # Ensure the changelog file exists
1095 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1096 if not os.path.exists(changelog_filename):
1097 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1100 # Parse the changelog
1101 self.pkg.dsc["bts changelog"] = ""
1102 changelog_file = utils.open_file(changelog_filename)
1103 for line in changelog_file.readlines():
1104 m = re_changelog_versions.match(line)
1106 self.pkg.dsc["bts changelog"] += line
1107 changelog_file.close()
1109 # Check we found at least one revision in the changelog
1110 if not self.pkg.dsc["bts changelog"]:
1111 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1113 def check_source(self):
1114 # XXX: I'm fairly sure reprocess == 2 can never happen
1115 # AJT disabled the is_incoming check years ago - mhy
1116 # We should probably scrap or rethink the whole reprocess thing
1118 # a) there's no source
1119 # or b) reprocess is 2 - we will do this check next time when orig
1120 # tarball is in 'files'
1121 # or c) the orig files are MIA
1122 if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1123 or len(self.pkg.orig_files) == 0:
1126 tmpdir = utils.temp_dirname()
1128 # Move into the temporary directory
1132 # Get the changelog version history
1133 self.get_changelog_versions(cwd)
1135 # Move back and cleanup the temporary tree
1139 shutil.rmtree(tmpdir)
1141 if e.errno != errno.EACCES:
1143 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1145 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1146 # We probably have u-r or u-w directories so chmod everything
1148 cmd = "chmod -R u+rwx %s" % (tmpdir)
1149 result = os.system(cmd)
1151 utils.fubar("'%s' failed with result %s." % (cmd, result))
1152 shutil.rmtree(tmpdir)
1153 except Exception, e:
1154 print "foobar2 (%s)" % e
1155 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1157 ###########################################################################
1158 def ensure_hashes(self):
1159 # Make sure we recognise the format of the Files: field in the .changes
1160 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1161 if len(format) == 2:
1162 format = int(format[0]), int(format[1])
1164 format = int(float(format[0])), 0
1166 # We need to deal with the original changes blob, as the fields we need
1167 # might not be in the changes dict serialised into the .dak anymore.
1168 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1170 # Copy the checksums over to the current changes dict. This will keep
1171 # the existing modifications to it intact.
1172 for field in orig_changes:
1173 if field.startswith('checksums-'):
1174 self.pkg.changes[field] = orig_changes[field]
1176 # Check for unsupported hashes
1177 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1178 self.rejects.append(j)
1180 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1181 self.rejects.append(j)
1183 # We have to calculate the hash if we have an earlier changes version than
1184 # the hash appears in rather than require it exist in the changes file
1185 for hashname, hashfunc, version in utils.known_hashes:
1186 # TODO: Move _ensure_changes_hash into this class
1187 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1188 self.rejects.append(j)
1189 if "source" in self.pkg.changes["architecture"]:
1190 # TODO: Move _ensure_dsc_hash into this class
1191 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1192 self.rejects.append(j)
1194 def check_hashes(self):
1195 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1196 self.rejects.append(m)
1198 for m in utils.check_size(".changes", self.pkg.files):
1199 self.rejects.append(m)
1201 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1202 self.rejects.append(m)
1204 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1205 self.rejects.append(m)
1207 self.ensure_hashes()
1209 ###########################################################################
1210 def check_lintian(self):
1211 # Only check some distributions
1213 for dist in ('unstable', 'experimental'):
1214 if dist in self.pkg.changes['distribution']:
1222 tagfile = cnf.get("Dinstall::LintianTags")
1224 # We don't have a tagfile, so just don't do anything.
1226 # Parse the yaml file
1227 sourcefile = file(tagfile, 'r')
1228 sourcecontent = sourcefile.read()
1231 lintiantags = yaml.load(sourcecontent)['lintian']
1232 except yaml.YAMLError, msg:
1233 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1236 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1237 # so put it together like it. We put all types of tags in one file and then sort
1238 # through lintians output later to see if its a fatal tag we detected, or not.
1239 # So we only run lintian once on all tags, even if we might reject on some, but not
1241 # Additionally build up a set of tags
1243 (fd, temp_filename) = utils.temp_filename()
1244 temptagfile = os.fdopen(fd, 'w')
1245 for tagtype in lintiantags:
1246 for tag in lintiantags[tagtype]:
1247 temptagfile.write("%s\n" % tag)
1251 # So now we should look at running lintian at the .changes file, capturing output
1253 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1254 (result, output) = commands.getstatusoutput(command)
1255 # We are done with lintian, remove our tempfile
1256 os.unlink(temp_filename)
1258 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1259 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1261 if len(output) == 0:
1264 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1265 # are having a victim for a reject.
1266 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1267 for line in output.split('\n'):
1268 m = re_parse_lintian.match(line)
1273 epackage = m.group(2)
1277 # So lets check if we know the tag at all.
1278 if etag not in tags:
1282 # We know it and it is overriden. Check that override is allowed.
1283 if etag in lintiantags['warning']:
1284 # The tag is overriden, and it is allowed to be overriden.
1285 # Don't add a reject message.
1287 elif etag in lintiantags['error']:
1288 # The tag is overriden - but is not allowed to be
1289 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1291 # Tag is known, it is not overriden, direct reject.
1292 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1293 # Now tell if they *might* override it.
1294 if etag in lintiantags['warning']:
1295 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1297 ###########################################################################
1298 def check_urgency(self):
1300 if self.pkg.changes["architecture"].has_key("source"):
1301 if not self.pkg.changes.has_key("urgency"):
1302 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1303 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1304 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1305 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1306 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1307 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1309 ###########################################################################
1311 # Sanity check the time stamps of files inside debs.
1312 # [Files in the near future cause ugly warnings and extreme time
1313 # travel can cause errors on extraction]
1315 def check_timestamps(self):
1318 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1319 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1320 tar = TarTime(future_cutoff, past_cutoff)
1322 for filename, entry in self.pkg.files.items():
1323 if entry["type"] == "deb":
1326 deb_file = utils.open_file(filename)
1327 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1330 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1331 except SystemError, e:
1332 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1333 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1336 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1340 future_files = tar.future_files.keys()
1342 num_future_files = len(future_files)
1343 future_file = future_files[0]
1344 future_date = tar.future_files[future_file]
1345 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1346 % (filename, num_future_files, future_file, time.ctime(future_date)))
1348 ancient_files = tar.ancient_files.keys()
1350 num_ancient_files = len(ancient_files)
1351 ancient_file = ancient_files[0]
1352 ancient_date = tar.ancient_files[ancient_file]
1353 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1354 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1356 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1358 ###########################################################################
1359 def check_transition(self, session):
1362 sourcepkg = self.pkg.changes["source"]
1364 # No sourceful upload -> no need to do anything else, direct return
1365 # We also work with unstable uploads, not experimental or those going to some
1366 # proposed-updates queue
1367 if "source" not in self.pkg.changes["architecture"] or \
1368 "unstable" not in self.pkg.changes["distribution"]:
1371 # Also only check if there is a file defined (and existant) with
1373 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1374 if transpath == "" or not os.path.exists(transpath):
1377 # Parse the yaml file
1378 sourcefile = file(transpath, 'r')
1379 sourcecontent = sourcefile.read()
1381 transitions = yaml.load(sourcecontent)
1382 except yaml.YAMLError, msg:
1383 # This shouldn't happen, there is a wrapper to edit the file which
1384 # checks it, but we prefer to be safe than ending up rejecting
1386 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1389 # Now look through all defined transitions
1390 for trans in transitions:
1391 t = transitions[trans]
1392 source = t["source"]
1395 # Will be None if nothing is in testing.
1396 current = get_source_in_suite(source, "testing", session)
1397 if current is not None:
1398 compare = apt_pkg.VersionCompare(current.version, expected)
1400 if current is None or compare < 0:
1401 # This is still valid, the current version in testing is older than
1402 # the new version we wait for, or there is none in testing yet
1404 # Check if the source we look at is affected by this.
1405 if sourcepkg in t['packages']:
1406 # The source is affected, lets reject it.
1408 rejectmsg = "%s: part of the %s transition.\n\n" % (
1411 if current is not None:
1412 currentlymsg = "at version %s" % (current.version)
1414 currentlymsg = "not present in testing"
1416 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1418 rejectmsg += "\n".join(textwrap.wrap("""Your package
1419 is part of a testing transition designed to get %s migrated (it is
1420 currently %s, we need version %s). This transition is managed by the
1421 Release Team, and %s is the Release-Team member responsible for it.
1422 Please mail debian-release@lists.debian.org or contact %s directly if you
1423 need further assistance. You might want to upload to experimental until this
1424 transition is done."""
1425 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1427 self.rejects.append(rejectmsg)
1430 ###########################################################################
1431 def check_signed_by_key(self):
1432 """Ensure the .changes is signed by an authorized uploader."""
1433 session = DBConn().session()
1435 self.check_transition(session)
1437 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1439 # match claimed name with actual name:
1441 # This is fundamentally broken but need us to refactor how we get
1442 # the UIDs/Fingerprints in order for us to fix it properly
1443 uid, uid_email = self.pkg.changes["fingerprint"], uid
1444 may_nmu, may_sponsor = 1, 1
1445 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1446 # and can't get one in there if we don't allow nmu/sponsorship
1447 elif is_dm is False:
1448 # If is_dm is False, we allow full upload rights
1449 uid_email = "%s@debian.org" % (uid)
1450 may_nmu, may_sponsor = 1, 1
1452 # Assume limited upload rights unless we've discovered otherwise
1454 may_nmu, may_sponsor = 0, 0
1456 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1458 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1460 if uid_name == "": sponsored = 1
1463 if ("source" in self.pkg.changes["architecture"] and
1464 uid_email and utils.is_email_alias(uid_email)):
1465 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1466 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1467 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1468 self.pkg.changes["sponsoremail"] = uid_email
1470 if sponsored and not may_sponsor:
1471 self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1473 if not sponsored and not may_nmu:
1474 should_reject = True
1475 highest_sid, highest_version = None, None
1477 # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1478 # It ignores higher versions with the dm_upload_allowed flag set to false
1479 # I'm keeping the existing behaviour for now until I've gone back and
1480 # checked exactly what the GR says - mhy
1481 for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1482 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1483 highest_sid = si.source_id
1484 highest_version = si.version
1486 if highest_sid is None:
1487 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1489 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1490 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1491 if email == uid_email or name == uid_name:
1492 should_reject = False
1495 if should_reject is True:
1496 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1498 for b in self.pkg.changes["binary"].keys():
1499 for suite in self.pkg.changes["distribution"].keys():
1500 q = session.query(DBSource)
1501 q = q.join(DBBinary).filter_by(package=b)
1502 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1505 if s.source != self.pkg.changes["source"]:
1506 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1508 for f in self.pkg.files.keys():
1509 if self.pkg.files[f].has_key("byhand"):
1510 self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1511 if self.pkg.files[f].has_key("new"):
1512 self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1516 ###########################################################################
1517 def build_summaries(self):
1518 """ Build a summary of changes the upload introduces. """
1520 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1522 short_summary = summary
1524 # This is for direport's benefit...
1525 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1528 summary += "Changes: " + f
1530 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1532 summary += self.announce(short_summary, 0)
1534 return (summary, short_summary)
1536 ###########################################################################
1538 def close_bugs(self, summary, action):
1540 Send mail to close bugs as instructed by the closes field in the changes file.
1541 Also add a line to summary if any work was done.
1543 @type summary: string
1544 @param summary: summary text, as given by L{build_summaries}
1547 @param action: Set to false no real action will be done.
1550 @return: summary. If action was taken, extended by the list of closed bugs.
1554 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1556 bugs = self.pkg.changes["closes"].keys()
1562 summary += "Closing bugs: "
1564 summary += "%s " % (bug)
1567 self.Subst["__BUG_NUMBER__"] = bug
1568 if self.pkg.changes["distribution"].has_key("stable"):
1569 self.Subst["__STABLE_WARNING__"] = """
1570 Note that this package is not part of the released stable Debian
1571 distribution. It may have dependencies on other unreleased software,
1572 or other instabilities. Please take care if you wish to install it.
1573 The update will eventually make its way into the next released Debian
1576 self.Subst["__STABLE_WARNING__"] = ""
1577 mail_message = utils.TemplateSubst(self.Subst, template)
1578 utils.send_mail(mail_message)
1580 # Clear up after ourselves
1581 del self.Subst["__BUG_NUMBER__"]
1582 del self.Subst["__STABLE_WARNING__"]
1584 if action and self.logger:
1585 self.logger.log(["closing bugs"] + bugs)
1591 ###########################################################################
1593 def announce(self, short_summary, action):
1595 Send an announce mail about a new upload.
1597 @type short_summary: string
1598 @param short_summary: Short summary text to include in the mail
1601 @param action: Set to false no real action will be done.
1604 @return: Textstring about action taken.
1609 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1611 # Only do announcements for source uploads with a recent dpkg-dev installed
1612 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1613 self.pkg.changes["architecture"].has_key("source"):
1619 self.Subst["__SHORT_SUMMARY__"] = short_summary
1621 for dist in self.pkg.changes["distribution"].keys():
1622 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1623 if announce_list == "" or lists_done.has_key(announce_list):
1626 lists_done[announce_list] = 1
1627 summary += "Announcing to %s\n" % (announce_list)
1631 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1632 if cnf.get("Dinstall::TrackingServer") and \
1633 self.pkg.changes["architecture"].has_key("source"):
1634 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1635 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1637 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1638 utils.send_mail(mail_message)
1640 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1642 if cnf.FindB("Dinstall::CloseBugs"):
1643 summary = self.close_bugs(summary, action)
1645 del self.Subst["__SHORT_SUMMARY__"]
1649 ###########################################################################
1651 def accept (self, summary, short_summary, targetdir=None):
1655 This moves all files referenced from the .changes into the I{accepted}
1656 queue, sends the accepted mail, announces to lists, closes bugs and
1657 also checks for override disparities. If enabled it will write out
1658 the version history for the BTS Version Tracking and will finally call
1661 @type summary: string
1662 @param summary: Summary text
1664 @type short_summary: string
1665 @param short_summary: Short summary
1670 stats = SummaryStats()
1672 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1674 if targetdir is None:
1675 targetdir = cnf["Dir::Queue::Accepted"]
1679 self.logger.log(["Accepting changes", self.pkg.changes_file])
1681 self.pkg.write_dot_dak(targetdir)
1683 # Move all the files into the accepted directory
1684 utils.move(self.pkg.changes_file, targetdir)
1686 for name, entry in sorted(self.pkg.files.items()):
1687 utils.move(name, targetdir)
1688 stats.accept_bytes += float(entry["size"])
1690 stats.accept_count += 1
1692 # Send accept mail, announce to lists, close bugs and check for
1693 # override disparities
1694 if not cnf["Dinstall::Options::No-Mail"]:
1696 self.Subst["__SUITE__"] = ""
1697 self.Subst["__SUMMARY__"] = summary
1698 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1699 utils.send_mail(mail_message)
1700 self.announce(short_summary, 1)
1702 ## Helper stuff for DebBugs Version Tracking
1703 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1704 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1705 # the conditionalization on dsc["bts changelog"] should be
1708 # Write out the version history from the changelog
1709 if self.pkg.changes["architecture"].has_key("source") and \
1710 self.pkg.dsc.has_key("bts changelog"):
1712 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1713 version_history = os.fdopen(fd, 'w')
1714 version_history.write(self.pkg.dsc["bts changelog"])
1715 version_history.close()
1716 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1717 self.pkg.changes_file[:-8]+".versions")
1718 os.rename(temp_filename, filename)
1719 os.chmod(filename, 0644)
1721 # Write out the binary -> source mapping.
1722 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1723 debinfo = os.fdopen(fd, 'w')
1724 for name, entry in sorted(self.pkg.files.items()):
1725 if entry["type"] == "deb":
1726 line = " ".join([entry["package"], entry["version"],
1727 entry["architecture"], entry["source package"],
1728 entry["source version"]])
1729 debinfo.write(line+"\n")
1731 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1732 self.pkg.changes_file[:-8]+".debinfo")
1733 os.rename(temp_filename, filename)
1734 os.chmod(filename, 0644)
1736 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1737 # <Ganneff> we do call queue_build too
1738 # <mhy> well yes, we'd have had to if we were inserting into accepted
1739 # <Ganneff> now. thats database only.
1740 # <mhy> urgh, that's going to get messy
1741 # <Ganneff> so i make the p-n call to it *also* using accepted/
1742 # <mhy> but then the packages will be in the queue_build table without the files being there
1743 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1744 # <mhy> ah, good point
1745 # <Ganneff> so it will work out, as unchecked move it over
1746 # <mhy> that's all completely sick
1749 # This routine returns None on success or an error on failure
1750 res = get_or_set_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1755 def check_override(self):
1757 Checks override entries for validity. Mails "Override disparity" warnings,
1758 if that feature is enabled.
1760 Abandons the check if
1761 - override disparity checks are disabled
1762 - mail sending is disabled
1767 # Abandon the check if:
1768 # a) override disparity checks have been disabled
1769 # b) we're not sending mail
1770 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1771 cnf["Dinstall::Options::No-Mail"]:
1774 summary = self.pkg.check_override()
1779 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1782 self.Subst["__SUMMARY__"] = summary
1783 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1784 utils.send_mail(mail_message)
1785 del self.Subst["__SUMMARY__"]
1787 ###########################################################################
1789 def remove(self, dir=None):
1791 Used (for instance) in p-u to remove the package from unchecked
1794 os.chdir(self.pkg.directory)
1798 for f in self.pkg.files.keys():
1800 os.unlink(self.pkg.changes_file)
1802 ###########################################################################
1804 def move_to_dir (self, dest, perms=0660, changesperms=0664):
1806 Move files to dest with certain perms/changesperms
1808 utils.move(self.pkg.changes_file, dest, perms=changesperms)
1809 for f in self.pkg.files.keys():
1810 utils.move(f, dest, perms=perms)
1812 ###########################################################################
1814 def force_reject(self, reject_files):
1816 Forcefully move files from the current directory to the
1817 reject directory. If any file already exists in the reject
1818 directory it will be moved to the morgue to make way for
1822 @param files: file dictionary
1828 for file_entry in reject_files:
1829 # Skip any files which don't exist or which we don't have permission to copy.
1830 if os.access(file_entry, os.R_OK) == 0:
1833 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1836 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1838 # File exists? Let's try and move it to the morgue
1839 if e.errno == errno.EEXIST:
1840 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1842 morgue_file = utils.find_next_free(morgue_file)
1843 except NoFreeFilenameError:
1844 # Something's either gone badly Pete Tong, or
1845 # someone is trying to exploit us.
1846 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1848 utils.move(dest_file, morgue_file, perms=0660)
1850 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1853 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1857 # If we got here, we own the destination file, so we can
1858 # safely overwrite it.
1859 utils.move(file_entry, dest_file, 1, perms=0660)
1862 ###########################################################################
1863 def do_reject (self, manual=0, reject_message="", note=""):
1865 Reject an upload. If called without a reject message or C{manual} is
1866 true, spawn an editor so the user can write one.
1869 @param manual: manual or automated rejection
1871 @type reject_message: string
1872 @param reject_message: A reject message
1877 # If we weren't given a manual rejection message, spawn an
1878 # editor so the user can add one in...
1879 if manual and not reject_message:
1880 (fd, temp_filename) = utils.temp_filename()
1881 temp_file = os.fdopen(fd, 'w')
1884 temp_file.write(line)
1886 editor = os.environ.get("EDITOR","vi")
1888 while answer == 'E':
1889 os.system("%s %s" % (editor, temp_filename))
1890 temp_fh = utils.open_file(temp_filename)
1891 reject_message = "".join(temp_fh.readlines())
1893 print "Reject message:"
1894 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
1895 prompt = "[R]eject, Edit, Abandon, Quit ?"
1897 while prompt.find(answer) == -1:
1898 answer = utils.our_raw_input(prompt)
1899 m = re_default_answer.search(prompt)
1902 answer = answer[:1].upper()
1903 os.unlink(temp_filename)
1909 print "Rejecting.\n"
1913 reason_filename = self.pkg.changes_file[:-8] + ".reason"
1914 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
1916 # Move all the files into the reject directory
1917 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
1918 self.force_reject(reject_files)
1920 # If we fail here someone is probably trying to exploit the race
1921 # so let's just raise an exception ...
1922 if os.path.exists(reason_filename):
1923 os.unlink(reason_filename)
1924 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1926 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
1930 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
1931 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
1932 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
1933 os.write(reason_fd, reject_message)
1934 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1936 # Build up the rejection email
1937 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
1938 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
1939 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
1940 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
1941 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1942 # Write the rejection email out as the <foo>.reason file
1943 os.write(reason_fd, reject_mail_message)
1945 del self.Subst["__REJECTOR_ADDRESS__"]
1946 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
1947 del self.Subst["__CC__"]
1951 # Send the rejection mail if appropriate
1952 if not cnf["Dinstall::Options::No-Mail"]:
1953 utils.send_mail(reject_mail_message)
1956 self.logger.log(["rejected", self.pkg.changes_file])
1960 ################################################################################
1961 def in_override_p(self, package, component, suite, binary_type, file, session):
1963 Check if a package already has override entries in the DB
1965 @type package: string
1966 @param package: package name
1968 @type component: string
1969 @param component: database id of the component
1972 @param suite: database id of the suite
1974 @type binary_type: string
1975 @param binary_type: type of the package
1978 @param file: filename we check
1980 @return: the database result. But noone cares anyway.
1986 if binary_type == "": # must be source
1989 file_type = binary_type
1991 # Override suite name; used for example with proposed-updates
1992 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
1993 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
1995 result = get_override(package, suite, component, file_type, session)
1997 # If checking for a source package fall back on the binary override type
1998 if file_type == "dsc" and len(result) < 1:
1999 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2001 # Remember the section and priority so we can check them later if appropriate
2004 self.pkg.files[file]["override section"] = result.section.section
2005 self.pkg.files[file]["override priority"] = result.priority.priority
2010 ################################################################################
2011 def get_anyversion(self, sv_list, suite):
2014 @param sv_list: list of (suite, version) tuples to check
2017 @param suite: suite name
2023 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2024 for (s, v) in sv_list:
2025 if s in [ x.lower() for x in anysuite ]:
2026 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2031 ################################################################################
2033 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
2036 @param sv_list: list of (suite, version) tuples to check
2041 @type new_version: string
2042 @param new_version: XXX
2044 Ensure versions are newer than existing packages in target
2045 suites and that cross-suite version checking rules as
2046 set out in the conf file are satisfied.
2051 # Check versions for each target suite
2052 for target_suite in self.pkg.changes["distribution"].keys():
2053 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2054 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2056 # Enforce "must be newer than target suite" even if conffile omits it
2057 if target_suite not in must_be_newer_than:
2058 must_be_newer_than.append(target_suite)
2060 for (suite, existent_version) in sv_list:
2061 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2063 if suite in must_be_newer_than and sourceful and vercmp < 1:
2064 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2066 if suite in must_be_older_than and vercmp > -1:
2069 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2070 # we really use the other suite, ignoring the conflicting one ...
2071 addsuite = self.pkg.changes["distribution-version"][suite]
2073 add_version = self.get_anyversion(sv_list, addsuite)
2074 target_version = self.get_anyversion(sv_list, target_suite)
2077 # not add_version can only happen if we map to a suite
2078 # that doesn't enhance the suite we're propup'ing from.
2079 # so "propup-ver x a b c; map a d" is a problem only if
2080 # d doesn't enhance a.
2082 # i think we could always propagate in this case, rather
2083 # than complaining. either way, this isn't a REJECT issue
2085 # And - we really should complain to the dorks who configured dak
2086 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2087 self.pkg.changes.setdefault("propdistribution", {})
2088 self.pkg.changes["propdistribution"][addsuite] = 1
2090 elif not target_version:
2091 # not targets_version is true when the package is NEW
2092 # we could just stick with the "...old version..." REJECT
2093 # for this, I think.
2094 self.rejects.append("Won't propogate NEW packages.")
2095 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2096 # propogation would be redundant. no need to reject though.
2097 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2099 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2100 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2102 self.warnings.append("Propogating upload to %s" % (addsuite))
2103 self.pkg.changes.setdefault("propdistribution", {})
2104 self.pkg.changes["propdistribution"][addsuite] = 1
2108 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2110 ################################################################################
2111 def check_binary_against_db(self, file, session):
2112 # Ensure version is sane
2113 q = session.query(BinAssociation)
2114 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
2115 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
2117 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2118 file, self.pkg.files[file]["version"], sourceful=False)
2120 # Check for any existing copies of the file
2121 q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
2122 q = q.filter_by(version=self.pkg.files[file]["version"])
2123 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
2126 self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
2128 ################################################################################
2130 def check_source_against_db(self, file, session):
2133 source = self.pkg.dsc.get("source")
2134 version = self.pkg.dsc.get("version")
2136 # Ensure version is sane
2137 q = session.query(SrcAssociation)
2138 q = q.join(DBSource).filter(DBSource.source==source)
2140 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2141 file, version, sourceful=True)
2143 ################################################################################
2144 def check_dsc_against_db(self, file, session):
2147 @warning: NB: this function can remove entries from the 'files' index [if
2148 the orig tarball is a duplicate of the one in the archive]; if
2149 you're iterating over 'files' and call this function as part of
2150 the loop, be sure to add a check to the top of the loop to
2151 ensure you haven't just tried to dereference the deleted entry.
2156 self.pkg.orig_files = {} # XXX: do we need to clear it?
2157 orig_files = self.pkg.orig_files
2159 # Try and find all files mentioned in the .dsc. This has
2160 # to work harder to cope with the multiple possible
2161 # locations of an .orig.tar.gz.
2162 # The ordering on the select is needed to pick the newest orig
2163 # when it exists in multiple places.
2164 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2166 if self.pkg.files.has_key(dsc_name):
2167 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2168 actual_size = int(self.pkg.files[dsc_name]["size"])
2169 found = "%s in incoming" % (dsc_name)
2171 # Check the file does not already exist in the archive
2172 ql = get_poolfile_like_name(dsc_name, session)
2174 # Strip out anything that isn't '%s' or '/%s$'
2176 if not i.filename.endswith(dsc_name):
2179 # "[dak] has not broken them. [dak] has fixed a
2180 # brokenness. Your crappy hack exploited a bug in
2183 # "(Come on! I thought it was always obvious that
2184 # one just doesn't release different files with
2185 # the same name and version.)"
2186 # -- ajk@ on d-devel@l.d.o
2189 # Ignore exact matches for .orig.tar.gz
2191 if re_is_orig_source.match(dsc_name):
2193 if self.pkg.files.has_key(dsc_name) and \
2194 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2195 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2196 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2197 # TODO: Don't delete the entry, just mark it as not needed
2198 # This would fix the stupidity of changing something we often iterate over
2199 # whilst we're doing it
2200 del self.pkg.files[dsc_name]
2201 if not orig_files.has_key(dsc_name):
2202 orig_files[dsc_name] = {}
2203 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2207 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2209 elif re_is_orig_source.match(dsc_name):
2211 ql = get_poolfile_like_name(dsc_name, session)
2213 # Strip out anything that isn't '%s' or '/%s$'
2214 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2216 if not i.filename.endswith(dsc_name):
2220 # Unfortunately, we may get more than one match here if,
2221 # for example, the package was in potato but had an -sa
2222 # upload in woody. So we need to choose the right one.
2224 # default to something sane in case we don't match any or have only one
2229 old_file = os.path.join(i.location.path, i.filename)
2230 old_file_fh = utils.open_file(old_file)
2231 actual_md5 = apt_pkg.md5sum(old_file_fh)
2233 actual_size = os.stat(old_file)[stat.ST_SIZE]
2234 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2237 old_file = os.path.join(i.location.path, i.filename)
2238 old_file_fh = utils.open_file(old_file)
2239 actual_md5 = apt_pkg.md5sum(old_file_fh)
2241 actual_size = os.stat(old_file)[stat.ST_SIZE]
2243 suite_type = x.location.archive_type
2244 # need this for updating dsc_files in install()
2245 dsc_entry["files id"] = x.file_id
2246 # See install() in process-accepted...
2247 if not orig_files.has_key(dsc_name):
2248 orig_files[dsc_name] = {}
2249 orig_files[dsc_name]["id"] = x.file_id
2250 orig_files[dsc_name]["path"] = old_file
2251 orig_files[dsc_name]["location"] = x.location.location_id
2253 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2254 # Not there? Check the queue directories...
2255 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2256 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2258 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2259 if os.path.exists(in_otherdir):
2260 in_otherdir_fh = utils.open_file(in_otherdir)
2261 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2262 in_otherdir_fh.close()
2263 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2265 if not orig_files.has_key(dsc_name):
2266 orig_files[dsc_name] = {}
2267 orig_files[dsc_name]["path"] = in_otherdir
2270 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2273 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2275 if actual_md5 != dsc_entry["md5sum"]:
2276 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2277 if actual_size != int(dsc_entry["size"]):
2278 self.rejects.append("size for %s doesn't match %s." % (found, file))
2280 ################################################################################
2281 # This is used by process-new and process-holding to recheck a changes file
2282 # at the time we're running. It mainly wraps various other internal functions
2283 # and is similar to accepted_checks - these should probably be tidied up
2285 def recheck(self, session):
2287 for f in self.pkg.files.keys():
2288 # The .orig.tar.gz can disappear out from under us is it's a
2289 # duplicate of one in the archive.
2290 if not self.pkg.files.has_key(f):
2293 entry = self.pkg.files[f]
2295 # Check that the source still exists
2296 if entry["type"] == "deb":
2297 source_version = entry["source version"]
2298 source_package = entry["source package"]
2299 if not self.pkg.changes["architecture"].has_key("source") \
2300 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2301 source_epochless_version = re_no_epoch.sub('', source_version)
2302 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2304 for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
2305 if cnf.has_key("Dir::Queue::%s" % (q)):
2306 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2309 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2311 # Version and file overwrite checks
2312 if entry["type"] == "deb":
2313 self.check_binary_against_db(f, session)
2314 elif entry["type"] == "dsc":
2315 self.check_source_against_db(f, session)
2316 self.check_dsc_against_db(f, session)
2318 ################################################################################
2319 def accepted_checks(self, overwrite_checks, session):
2320 # Recheck anything that relies on the database; since that's not
2321 # frozen between accept and our run time when called from p-a.
2323 # overwrite_checks is set to False when installing to stable/oldstable
2328 # Find the .dsc (again)
2330 for f in self.pkg.files.keys():
2331 if self.pkg.files[f]["type"] == "dsc":
2334 for checkfile in self.pkg.files.keys():
2335 # The .orig.tar.gz can disappear out from under us is it's a
2336 # duplicate of one in the archive.
2337 if not self.pkg.files.has_key(checkfile):
2340 entry = self.pkg.files[checkfile]
2342 # Check that the source still exists
2343 if entry["type"] == "deb":
2344 source_version = entry["source version"]
2345 source_package = entry["source package"]
2346 if not self.pkg.changes["architecture"].has_key("source") \
2347 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2348 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2350 # Version and file overwrite checks
2351 if overwrite_checks:
2352 if entry["type"] == "deb":
2353 self.check_binary_against_db(checkfile, session)
2354 elif entry["type"] == "dsc":
2355 self.check_source_against_db(checkfile, session)
2356 self.check_dsc_against_db(dsc_filename, session)
2358 # propogate in the case it is in the override tables:
2359 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2360 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2361 propogate[suite] = 1
2363 nopropogate[suite] = 1
2365 for suite in propogate.keys():
2366 if suite in nopropogate:
2368 self.pkg.changes["distribution"][suite] = 1
2370 for checkfile in self.pkg.files.keys():
2371 # Check the package is still in the override tables
2372 for suite in self.pkg.changes["distribution"].keys():
2373 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2374 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2376 ################################################################################
2377 # This is not really a reject, but an unaccept, but since a) the code for
2378 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2379 # extremely rare, for now we'll go with whining at our admin folks...
2381 def do_unaccept(self):
2385 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2386 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2387 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2388 self.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
2389 if cnf.has_key("Dinstall::Bcc"):
2390 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2392 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2394 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2396 # Write the rejection email out as the <foo>.reason file
2397 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2398 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2400 # If we fail here someone is probably trying to exploit the race
2401 # so let's just raise an exception ...
2402 if os.path.exists(reject_filename):
2403 os.unlink(reject_filename)
2405 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2406 os.write(fd, reject_mail_message)
2409 utils.send_mail(reject_mail_message)
2411 del self.Subst["__REJECTOR_ADDRESS__"]
2412 del self.Subst["__REJECT_MESSAGE__"]
2413 del self.Subst["__CC__"]
2415 ################################################################################
2416 # If any file of an upload has a recent mtime then chances are good
2417 # the file is still being uploaded.
2419 def upload_too_new(self):
2422 # Move back to the original directory to get accurate time stamps
2424 os.chdir(self.pkg.directory)
2425 file_list = self.pkg.files.keys()
2426 file_list.extend(self.pkg.dsc_files.keys())
2427 file_list.append(self.pkg.changes_file)
2430 last_modified = time.time()-os.path.getmtime(f)
2431 if last_modified < int(cnf["Dinstall::SkipTime"]):