5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
52 from summarystats import SummaryStats
53 from utils import parse_changes
54 from textutils import fix_maintainer
55 from binary import Binary
57 ###############################################################################
59 def get_type(f, session):
61 Get the file type of C{f}
64 @param f: file entry from Changes object
66 @type session: SQLA Session
67 @param session: SQL Alchemy session object
74 if f.has_key("dbtype"):
75 file_type = file["dbtype"]
76 elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
79 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
81 # Validate the override type
82 type_id = get_override_type(file_type, session)
84 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
88 ################################################################################
90 # Determine what parts in a .changes are NEW
92 def determine_new(changes, files, warn=1):
94 Determine what parts in a C{changes} file are NEW.
96 @type changes: Upload.Pkg.changes dict
97 @param changes: Changes dictionary
99 @type files: Upload.Pkg.files dict
100 @param files: Files dictionary
103 @param warn: Warn if overrides are added for (old)stable
106 @return: dictionary of NEW components.
111 session = DBConn().session()
113 # Build up a list of potentially new things
114 for name, f in files.items():
115 # Skip byhand elements
116 if f["type"] == "byhand":
119 priority = f["priority"]
120 section = f["section"]
121 file_type = get_type(f, session)
122 component = f["component"]
124 if file_type == "dsc":
127 if not new.has_key(pkg):
129 new[pkg]["priority"] = priority
130 new[pkg]["section"] = section
131 new[pkg]["type"] = file_type
132 new[pkg]["component"] = component
133 new[pkg]["files"] = []
135 old_type = new[pkg]["type"]
136 if old_type != file_type:
137 # source gets trumped by deb or udeb
138 if old_type == "dsc":
139 new[pkg]["priority"] = priority
140 new[pkg]["section"] = section
141 new[pkg]["type"] = file_type
142 new[pkg]["component"] = component
144 new[pkg]["files"].append(name)
146 if f.has_key("othercomponents"):
147 new[pkg]["othercomponents"] = f["othercomponents"]
149 for suite in changes["suite"].keys():
150 for pkg in new.keys():
151 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
153 for file_entry in new[pkg]["files"]:
154 if files[file_entry].has_key("new"):
155 del files[file_entry]["new"]
159 for s in ['stable', 'oldstable']:
160 if changes["suite"].has_key(s):
161 print "WARNING: overrides will be added for %s!" % s
162 for pkg in new.keys():
163 if new[pkg].has_key("othercomponents"):
164 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
170 ################################################################################
172 def check_valid(new):
174 Check if section and priority for NEW packages exist in database.
175 Additionally does sanity checks:
176 - debian-installer packages have to be udeb (or source)
177 - non debian-installer packages can not be udeb
178 - source priority can only be assigned to dsc file types
181 @param new: Dict of new packages with their section, priority and type.
184 for pkg in new.keys():
185 section_name = new[pkg]["section"]
186 priority_name = new[pkg]["priority"]
187 file_type = new[pkg]["type"]
189 section = get_section(section_name)
191 new[pkg]["section id"] = -1
193 new[pkg]["section id"] = section.section_id
195 priority = get_priority(priority_name)
197 new[pkg]["priority id"] = -1
199 new[pkg]["priority id"] = priority.priority_id
202 di = section_name.find("debian-installer") != -1
204 # If d-i, we must be udeb and vice-versa
205 if (di and file_type not in ("udeb", "dsc")) or \
206 (not di and file_type == "udeb"):
207 new[pkg]["section id"] = -1
209 # If dsc we need to be source and vice-versa
210 if (priority == "source" and file_type != "dsc") or \
211 (priority != "source" and file_type == "dsc"):
212 new[pkg]["priority id"] = -1
214 ###############################################################################
216 def lookup_uid_from_fingerprint(fpr, session):
219 # This is a stupid default, but see the comments below
222 user = get_uid_from_fingerprint(fpr, session)
226 if user.name is None:
231 # Check the relevant fingerprint (which we have to have)
232 for f in user.fingerprint:
233 if f.fingerprint == fpr:
234 is_dm = f.keyring.debian_maintainer
237 return (uid, uid_name, is_dm)
239 ###############################################################################
241 # Used by Upload.check_timestamps
242 class TarTime(object):
243 def __init__(self, future_cutoff, past_cutoff):
245 self.future_cutoff = future_cutoff
246 self.past_cutoff = past_cutoff
249 self.future_files = {}
250 self.ancient_files = {}
252 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
253 if MTime > self.future_cutoff:
254 self.future_files[Name] = MTime
255 if MTime < self.past_cutoff:
256 self.ancient_files[Name] = MTime
258 ###############################################################################
260 class Upload(object):
262 Everything that has to do with an upload processed.
270 ###########################################################################
273 """ Reset a number of internal variables."""
275 # Initialize the substitution template map
278 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
279 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
280 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
281 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
289 def package_info(self):
292 if len(self.rejects) > 0:
293 msg += "Reject Reasons:\n"
294 msg += "\n".join(self.rejects)
296 if len(self.warnings) > 0:
298 msg += "\n".join(self.warnings)
300 if len(self.notes) > 0:
302 msg += "\n".join(self.notes)
306 ###########################################################################
307 def update_subst(self):
308 """ Set up the per-package template substitution mappings """
312 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
313 if not self.pkg.changes.has_key("architecture") or not \
314 isinstance(self.pkg.changes["architecture"], DictType):
315 self.pkg.changes["architecture"] = { "Unknown" : "" }
317 # and maintainer2047 may not exist.
318 if not self.pkg.changes.has_key("maintainer2047"):
319 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
321 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
322 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
323 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
325 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
326 if self.pkg.changes["architecture"].has_key("source") and \
327 self.pkg.changes["changedby822"] != "" and \
328 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
330 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
331 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
332 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
334 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
335 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
336 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
338 if "sponsoremail" in self.pkg.changes:
339 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
341 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
342 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
344 # Apply any global override of the Maintainer field
345 if cnf.get("Dinstall::OverrideMaintainer"):
346 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
347 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
349 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
350 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
351 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
353 ###########################################################################
354 def load_changes(self, filename):
357 @rvalue: whether the changes file was valid or not. We may want to
358 reject even if this is True (see what gets put in self.rejects).
359 This is simply to prevent us even trying things later which will
360 fail because we couldn't properly parse the file.
363 self.pkg.changes_file = filename
365 # Parse the .changes field into a dictionary
367 self.pkg.changes.update(parse_changes(filename))
368 except CantOpenError:
369 self.rejects.append("%s: can't read file." % (filename))
371 except ParseChangesError, line:
372 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
374 except ChangesUnicodeError:
375 self.rejects.append("%s: changes file not proper utf-8" % (filename))
378 # Parse the Files field from the .changes into another dictionary
380 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
381 except ParseChangesError, line:
382 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
384 except UnknownFormatError, format:
385 self.rejects.append("%s: unknown format '%s'." % (filename, format))
388 # Check for mandatory fields
389 for i in ("distribution", "source", "binary", "architecture",
390 "version", "maintainer", "files", "changes", "description"):
391 if not self.pkg.changes.has_key(i):
392 # Avoid undefined errors later
393 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
396 # Strip a source version in brackets from the source field
397 if re_strip_srcver.search(self.pkg.changes["source"]):
398 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
400 # Ensure the source field is a valid package name.
401 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
402 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
404 # Split multi-value fields into a lower-level dictionary
405 for i in ("architecture", "distribution", "binary", "closes"):
406 o = self.pkg.changes.get(i, "")
408 del self.pkg.changes[i]
410 self.pkg.changes[i] = {}
413 self.pkg.changes[i][j] = 1
415 # Fix the Maintainer: field to be RFC822/2047 compatible
417 (self.pkg.changes["maintainer822"],
418 self.pkg.changes["maintainer2047"],
419 self.pkg.changes["maintainername"],
420 self.pkg.changes["maintaineremail"]) = \
421 fix_maintainer (self.pkg.changes["maintainer"])
422 except ParseMaintError, msg:
423 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
424 % (filename, changes["maintainer"], msg))
426 # ...likewise for the Changed-By: field if it exists.
428 (self.pkg.changes["changedby822"],
429 self.pkg.changes["changedby2047"],
430 self.pkg.changes["changedbyname"],
431 self.pkg.changes["changedbyemail"]) = \
432 fix_maintainer (self.pkg.changes.get("changed-by", ""))
433 except ParseMaintError, msg:
434 self.pkg.changes["changedby822"] = ""
435 self.pkg.changes["changedby2047"] = ""
436 self.pkg.changes["changedbyname"] = ""
437 self.pkg.changes["changedbyemail"] = ""
439 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
440 % (filename, changes["changed-by"], msg))
442 # Ensure all the values in Closes: are numbers
443 if self.pkg.changes.has_key("closes"):
444 for i in self.pkg.changes["closes"].keys():
445 if re_isanum.match (i) == None:
446 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
448 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
449 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
450 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
452 # Check there isn't already a changes file of the same name in one
453 # of the queue directories.
454 base_filename = os.path.basename(filename)
455 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
456 if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
457 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
459 # Check the .changes is non-empty
460 if not self.pkg.files:
461 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
464 # Changes was syntactically valid even if we'll reject
467 ###########################################################################
469 def check_distributions(self):
470 "Check and map the Distribution field"
474 # Handle suite mappings
475 for m in Cnf.ValueList("SuiteMappings"):
478 if mtype == "map" or mtype == "silent-map":
479 (source, dest) = args[1:3]
480 if self.pkg.changes["distribution"].has_key(source):
481 del self.pkg.changes["distribution"][source]
482 self.pkg.changes["distribution"][dest] = 1
483 if mtype != "silent-map":
484 self.notes.append("Mapping %s to %s." % (source, dest))
485 if self.pkg.changes.has_key("distribution-version"):
486 if self.pkg.changes["distribution-version"].has_key(source):
487 self.pkg.changes["distribution-version"][source]=dest
488 elif mtype == "map-unreleased":
489 (source, dest) = args[1:3]
490 if self.pkg.changes["distribution"].has_key(source):
491 for arch in self.pkg.changes["architecture"].keys():
492 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
493 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
494 del self.pkg.changes["distribution"][source]
495 self.pkg.changes["distribution"][dest] = 1
497 elif mtype == "ignore":
499 if self.pkg.changes["distribution"].has_key(suite):
500 del self.pkg.changes["distribution"][suite]
501 self.warnings.append("Ignoring %s as a target suite." % (suite))
502 elif mtype == "reject":
504 if self.pkg.changes["distribution"].has_key(suite):
505 self.rejects.append("Uploads to %s are not accepted." % (suite))
506 elif mtype == "propup-version":
507 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
509 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
510 if self.pkg.changes["distribution"].has_key(args[1]):
511 self.pkg.changes.setdefault("distribution-version", {})
512 for suite in args[2:]:
513 self.pkg.changes["distribution-version"][suite] = suite
515 # Ensure there is (still) a target distribution
516 if len(self.pkg.changes["distribution"].keys()) < 1:
517 self.rejects.append("No valid distribution remaining.")
519 # Ensure target distributions exist
520 for suite in self.pkg.changes["distribution"].keys():
521 if not Cnf.has_key("Suite::%s" % (suite)):
522 self.rejects.append("Unknown distribution `%s'." % (suite))
524 ###########################################################################
526 def binary_file_checks(self, f, session):
528 entry = self.pkg.files[f]
530 # Extract package control information
531 deb_file = utils.open_file(f)
533 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
535 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
537 # Can't continue, none of the checks on control would work.
540 # Check for mandantory "Description:"
543 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
545 self.rejects.append("%s: Missing Description in binary package" % (f))
550 # Check for mandatory fields
551 for field in [ "Package", "Architecture", "Version" ]:
552 if control.Find(field) == None:
554 self.rejects.append("%s: No %s field in control." % (f, field))
557 # Ensure the package name matches the one give in the .changes
558 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
559 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
561 # Validate the package field
562 package = control.Find("Package")
563 if not re_valid_pkg_name.match(package):
564 self.rejects.append("%s: invalid package name '%s'." % (f, package))
566 # Validate the version field
567 version = control.Find("Version")
568 if not re_valid_version.match(version):
569 self.rejects.append("%s: invalid version number '%s'." % (f, version))
571 # Ensure the architecture of the .deb is one we know about.
572 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
573 architecture = control.Find("Architecture")
574 upload_suite = self.pkg.changes["distribution"].keys()[0]
576 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
577 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
578 self.rejects.append("Unknown architecture '%s'." % (architecture))
580 # Ensure the architecture of the .deb is one of the ones
581 # listed in the .changes.
582 if not self.pkg.changes["architecture"].has_key(architecture):
583 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
585 # Sanity-check the Depends field
586 depends = control.Find("Depends")
588 self.rejects.append("%s: Depends field is empty." % (f))
590 # Sanity-check the Provides field
591 provides = control.Find("Provides")
593 provide = re_spacestrip.sub('', provides)
595 self.rejects.append("%s: Provides field is empty." % (f))
596 prov_list = provide.split(",")
597 for prov in prov_list:
598 if not re_valid_pkg_name.match(prov):
599 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
601 # Check the section & priority match those given in the .changes (non-fatal)
602 if control.Find("Section") and entry["section"] != "" \
603 and entry["section"] != control.Find("Section"):
604 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
605 (f, control.Find("Section", ""), entry["section"]))
606 if control.Find("Priority") and entry["priority"] != "" \
607 and entry["priority"] != control.Find("Priority"):
608 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
609 (f, control.Find("Priority", ""), entry["priority"]))
611 entry["package"] = package
612 entry["architecture"] = architecture
613 entry["version"] = version
614 entry["maintainer"] = control.Find("Maintainer", "")
616 if f.endswith(".udeb"):
617 self.pkg.files[f]["dbtype"] = "udeb"
618 elif f.endswith(".deb"):
619 self.pkg.files[f]["dbtype"] = "deb"
621 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
623 entry["source"] = control.Find("Source", entry["package"])
625 # Get the source version
626 source = entry["source"]
629 if source.find("(") != -1:
630 m = re_extract_src_version.match(source)
632 source_version = m.group(2)
634 if not source_version:
635 source_version = self.pkg.files[f]["version"]
637 entry["source package"] = source
638 entry["source version"] = source_version
640 # Ensure the filename matches the contents of the .deb
641 m = re_isadeb.match(f)
644 file_package = m.group(1)
645 if entry["package"] != file_package:
646 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
647 (f, file_package, entry["dbtype"], entry["package"]))
648 epochless_version = re_no_epoch.sub('', control.Find("Version"))
651 file_version = m.group(2)
652 if epochless_version != file_version:
653 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
654 (f, file_version, entry["dbtype"], epochless_version))
657 file_architecture = m.group(3)
658 if entry["architecture"] != file_architecture:
659 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
660 (f, file_architecture, entry["dbtype"], entry["architecture"]))
662 # Check for existent source
663 source_version = entry["source version"]
664 source_package = entry["source package"]
665 if self.pkg.changes["architecture"].has_key("source"):
666 if source_version != self.pkg.changes["version"]:
667 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
668 (source_version, f, self.pkg.changes["version"]))
670 # Check in the SQL database
671 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
672 # Check in one of the other directories
673 source_epochless_version = re_no_epoch.sub('', source_version)
674 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
675 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
677 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
680 dsc_file_exists = False
681 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
682 if cnf.has_key("Dir::Queue::%s" % (myq)):
683 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
684 dsc_file_exists = True
687 if not dsc_file_exists:
688 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
690 # Check the version and for file overwrites
691 self.check_binary_against_db(f, session)
695 if len(b.rejects) > 0:
697 self.rejects.append(j)
699 def source_file_checks(self, f, session):
700 entry = self.pkg.files[f]
702 m = re_issource.match(f)
706 entry["package"] = m.group(1)
707 entry["version"] = m.group(2)
708 entry["type"] = m.group(3)
710 # Ensure the source package name matches the Source filed in the .changes
711 if self.pkg.changes["source"] != entry["package"]:
712 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
714 # Ensure the source version matches the version in the .changes file
715 if entry["type"] == "orig.tar.gz":
716 changes_version = self.pkg.changes["chopversion2"]
718 changes_version = self.pkg.changes["chopversion"]
720 if changes_version != entry["version"]:
721 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
723 # Ensure the .changes lists source in the Architecture field
724 if not self.pkg.changes["architecture"].has_key("source"):
725 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
727 # Check the signature of a .dsc file
728 if entry["type"] == "dsc":
729 # check_signature returns either:
730 # (None, [list, of, rejects]) or (signature, [])
731 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
733 self.rejects.append(j)
735 entry["architecture"] = "source"
737 def per_suite_file_checks(self, f, suite, session):
739 entry = self.pkg.files[f]
740 archive = utils.where_am_i()
743 if entry.has_key("byhand"):
746 # Check we have fields we need to do these checks
748 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
749 if not entry.has_key(m):
750 self.rejects.append("file '%s' does not have field %s set" % (f, m))
756 # Handle component mappings
757 for m in cnf.ValueList("ComponentMappings"):
758 (source, dest) = m.split()
759 if entry["component"] == source:
760 entry["original component"] = source
761 entry["component"] = dest
763 # Ensure the component is valid for the target suite
764 if cnf.has_key("Suite:%s::Components" % (suite)) and \
765 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
766 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
769 # Validate the component
770 if not get_component(entry["component"], session):
771 self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
774 # See if the package is NEW
775 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
778 # Validate the priority
779 if entry["priority"].find('/') != -1:
780 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
782 # Determine the location
783 location = cnf["Dir::Pool"]
784 l = get_location(location, entry["component"], archive, session)
786 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
787 entry["location id"] = -1
789 entry["location id"] = l.location_id
791 # Check the md5sum & size against existing files (if any)
792 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
794 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
795 entry["size"], entry["md5sum"], entry["location id"])
798 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
799 elif found is False and poolfile is not None:
800 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
803 entry["files id"] = None
805 entry["files id"] = poolfile.file_id
807 # Check for packages that have moved from one component to another
808 entry['suite'] = suite
809 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
811 entry["othercomponents"] = res.fetchone()[0]
813 def check_files(self, action=True):
814 archive = utils.where_am_i()
815 file_keys = self.pkg.files.keys()
819 # XXX: As far as I can tell, this can no longer happen - see
820 # comments by AJ in old revisions - mhy
821 # if reprocess is 2 we've already done this and we're checking
822 # things again for the new .orig.tar.gz.
823 # [Yes, I'm fully aware of how disgusting this is]
824 if action and self.reprocess < 2:
826 os.chdir(self.pkg.directory)
828 ret = holding.copy_to_holding(f)
830 # XXX: Should we bail out here or try and continue?
831 self.rejects.append(ret)
835 # Check there isn't already a .changes or .dak file of the same name in
836 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
837 # [NB: this check must be done post-suite mapping]
838 base_filename = os.path.basename(self.pkg.changes_file)
839 dot_dak_filename = base_filename[:-8] + ".dak"
841 for suite in self.pkg.changes["distribution"].keys():
842 copychanges = "Suite::%s::CopyChanges" % (suite)
843 if cnf.has_key(copychanges) and \
844 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
845 self.rejects.append("%s: a file with this name already exists in %s" \
846 % (base_filename, cnf[copychanges]))
848 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
849 if cnf.has_key(copy_dot_dak) and \
850 os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
851 self.rejects.append("%s: a file with this name already exists in %s" \
852 % (dot_dak_filename, Cnf[copy_dot_dak]))
858 session = DBConn().session()
860 for f, entry in self.pkg.files.items():
861 # Ensure the file does not already exist in one of the accepted directories
862 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
863 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
864 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
865 self.rejects.append("%s file already exists in the %s directory." % (f, d))
867 if not re_taint_free.match(f):
868 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
870 # Check the file is readable
871 if os.access(f, os.R_OK) == 0:
872 # When running in -n, copy_to_holding() won't have
873 # generated the reject_message, so we need to.
875 if os.path.exists(f):
876 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
878 self.rejects.append("Can't read `%s'. [file not found]" % (f))
879 entry["type"] = "unreadable"
882 # If it's byhand skip remaining checks
883 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
885 entry["type"] = "byhand"
887 # Checks for a binary package...
888 elif re_isadeb.match(f):
890 entry["type"] = "deb"
892 # This routine appends to self.rejects/warnings as appropriate
893 self.binary_file_checks(f, session)
895 # Checks for a source package...
896 elif re_issource.match(f):
899 # This routine appends to self.rejects/warnings as appropriate
900 self.source_file_checks(f, session)
902 # Not a binary or source package? Assume byhand...
905 entry["type"] = "byhand"
907 # Per-suite file checks
908 entry["oldfiles"] = {}
909 for suite in self.pkg.changes["distribution"].keys():
910 self.per_suite_file_checks(f, suite, session)
914 # If the .changes file says it has source, it must have source.
915 if self.pkg.changes["architecture"].has_key("source"):
917 self.rejects.append("no source found and Architecture line in changes mention source.")
919 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
920 self.rejects.append("source only uploads are not supported.")
922 ###########################################################################
923 def check_dsc(self, action=True):
924 """Returns bool indicating whether or not the source changes are valid"""
925 # Ensure there is source to check
926 if not self.pkg.changes["architecture"].has_key("source"):
931 for f, entry in self.pkg.files.items():
932 if entry["type"] == "dsc":
934 self.rejects.append("can not process a .changes file with multiple .dsc's.")
939 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
941 self.rejects.append("source uploads must contain a dsc file")
944 # Parse the .dsc file
946 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
947 except CantOpenError:
948 # if not -n copy_to_holding() will have done this for us...
950 self.rejects.append("%s: can't read file." % (dsc_filename))
951 except ParseChangesError, line:
952 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
953 except InvalidDscError, line:
954 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
955 except ChangesUnicodeError:
956 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
958 # Build up the file list of files mentioned by the .dsc
960 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
961 except NoFilesFieldError:
962 self.rejects.append("%s: no Files: field." % (dsc_filename))
964 except UnknownFormatError, format:
965 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
967 except ParseChangesError, line:
968 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
971 # Enforce mandatory fields
972 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
973 if not self.pkg.dsc.has_key(i):
974 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
977 # Validate the source and version fields
978 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
979 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
980 if not re_valid_version.match(self.pkg.dsc["version"]):
981 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
983 # Bumping the version number of the .dsc breaks extraction by stable's
984 # dpkg-source. So let's not do that...
985 if self.pkg.dsc["format"] != "1.0":
986 self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
988 # Validate the Maintainer field
990 # We ignore the return value
991 fix_maintainer(self.pkg.dsc["maintainer"])
992 except ParseMaintError, msg:
993 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
994 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
996 # Validate the build-depends field(s)
997 for field_name in [ "build-depends", "build-depends-indep" ]:
998 field = self.pkg.dsc.get(field_name)
1000 # Check for broken dpkg-dev lossage...
1001 if field.startswith("ARRAY"):
1002 self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \
1003 (dsc_filename, field_name.title()))
1005 # Have apt try to parse them...
1007 apt_pkg.ParseSrcDepends(field)
1009 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1011 # Ensure the version number in the .dsc matches the version number in the .changes
1012 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1013 changes_version = self.pkg.files[dsc_filename]["version"]
1015 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1016 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1018 # Ensure there is a .tar.gz in the .dsc file
1020 for f in self.pkg.dsc_files.keys():
1021 m = re_issource.match(f)
1023 self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
1026 if ftype == "orig.tar.gz" or ftype == "tar.gz":
1030 self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
1032 # Ensure source is newer than existing source in target suites
1033 session = DBConn().session()
1034 self.check_source_against_db(dsc_filename, session)
1035 self.check_dsc_against_db(dsc_filename, session)
1040 ###########################################################################
1042 def get_changelog_versions(self, source_dir):
1043 """Extracts a the source package and (optionally) grabs the
1044 version history out of debian/changelog for the BTS."""
1048 # Find the .dsc (again)
1050 for f in self.pkg.files.keys():
1051 if self.pkg.files[f]["type"] == "dsc":
1054 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1055 if not dsc_filename:
1058 # Create a symlink mirror of the source files in our temporary directory
1059 for f in self.pkg.files.keys():
1060 m = re_issource.match(f)
1062 src = os.path.join(source_dir, f)
1063 # If a file is missing for whatever reason, give up.
1064 if not os.path.exists(src):
1067 if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
1069 dest = os.path.join(os.getcwd(), f)
1070 os.symlink(src, dest)
1072 # If the orig.tar.gz is not a part of the upload, create a symlink to the
1074 if self.pkg.orig_tar_gz:
1075 dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
1076 os.symlink(self.pkg.orig_tar_gz, dest)
1078 # Extract the source
1079 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1080 (result, output) = commands.getstatusoutput(cmd)
1082 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1083 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "")
1086 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1089 # Get the upstream version
1090 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1091 if re_strip_revision.search(upstr_version):
1092 upstr_version = re_strip_revision.sub('', upstr_version)
1094 # Ensure the changelog file exists
1095 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1096 if not os.path.exists(changelog_filename):
1097 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1100 # Parse the changelog
1101 self.pkg.dsc["bts changelog"] = ""
1102 changelog_file = utils.open_file(changelog_filename)
1103 for line in changelog_file.readlines():
1104 m = re_changelog_versions.match(line)
1106 self.pkg.dsc["bts changelog"] += line
1107 changelog_file.close()
1109 # Check we found at least one revision in the changelog
1110 if not self.pkg.dsc["bts changelog"]:
1111 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1113 def check_source(self):
1114 # XXX: I'm fairly sure reprocess == 2 can never happen
1115 # AJT disabled the is_incoming check years ago - mhy
1116 # We should probably scrap or rethink the whole reprocess thing
1118 # a) there's no source
1119 # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
1120 # or c) the orig.tar.gz is MIA
1121 if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1122 or self.pkg.orig_tar_gz == -1:
1125 tmpdir = utils.temp_dirname()
1127 # Move into the temporary directory
1131 # Get the changelog version history
1132 self.get_changelog_versions(cwd)
1134 # Move back and cleanup the temporary tree
1138 shutil.rmtree(tmpdir)
1140 if e.errno != errno.EACCES:
1142 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1144 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1145 # We probably have u-r or u-w directories so chmod everything
1147 cmd = "chmod -R u+rwx %s" % (tmpdir)
1148 result = os.system(cmd)
1150 utils.fubar("'%s' failed with result %s." % (cmd, result))
1151 shutil.rmtree(tmpdir)
1152 except Exception, e:
1153 print "foobar2 (%s)" % e
1154 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1156 ###########################################################################
1157 def ensure_hashes(self):
1158 # Make sure we recognise the format of the Files: field in the .changes
1159 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1160 if len(format) == 2:
1161 format = int(format[0]), int(format[1])
1163 format = int(float(format[0])), 0
1165 # We need to deal with the original changes blob, as the fields we need
1166 # might not be in the changes dict serialised into the .dak anymore.
1167 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1169 # Copy the checksums over to the current changes dict. This will keep
1170 # the existing modifications to it intact.
1171 for field in orig_changes:
1172 if field.startswith('checksums-'):
1173 self.pkg.changes[field] = orig_changes[field]
1175 # Check for unsupported hashes
1176 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1177 self.rejects.append(j)
1179 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1180 self.rejects.append(j)
1182 # We have to calculate the hash if we have an earlier changes version than
1183 # the hash appears in rather than require it exist in the changes file
1184 for hashname, hashfunc, version in utils.known_hashes:
1185 # TODO: Move _ensure_changes_hash into this class
1186 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1187 self.rejects.append(j)
1188 if "source" in self.pkg.changes["architecture"]:
1189 # TODO: Move _ensure_dsc_hash into this class
1190 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1191 self.rejects.append(j)
1193 def check_hashes(self):
1194 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1195 self.rejects.append(m)
1197 for m in utils.check_size(".changes", self.pkg.files):
1198 self.rejects.append(m)
1200 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1201 self.rejects.append(m)
1203 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1204 self.rejects.append(m)
1206 self.ensure_hashes()
1208 ###########################################################################
1209 def check_urgency(self):
1211 if self.pkg.changes["architecture"].has_key("source"):
1212 if not self.pkg.changes.has_key("urgency"):
1213 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1214 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1215 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1216 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1217 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1218 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1220 ###########################################################################
1222 # Sanity check the time stamps of files inside debs.
1223 # [Files in the near future cause ugly warnings and extreme time
1224 # travel can cause errors on extraction]
1226 def check_timestamps(self):
1229 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1230 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1231 tar = TarTime(future_cutoff, past_cutoff)
1233 for filename, entry in self.pkg.files.items():
1234 if entry["type"] == "deb":
1237 deb_file = utils.open_file(filename)
1238 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1241 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1242 except SystemError, e:
1243 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1244 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1247 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1251 future_files = tar.future_files.keys()
1253 num_future_files = len(future_files)
1254 future_file = future_files[0]
1255 future_date = tar.future_files[future_file]
1256 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1257 % (filename, num_future_files, future_file, time.ctime(future_date)))
1259 ancient_files = tar.ancient_files.keys()
1261 num_ancient_files = len(ancient_files)
1262 ancient_file = ancient_files[0]
1263 ancient_date = tar.ancient_files[ancient_file]
1264 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1265 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1267 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1269 ###########################################################################
1270 def check_transition(self, session):
1273 sourcepkg = self.pkg.changes["source"]
1275 # No sourceful upload -> no need to do anything else, direct return
1276 # We also work with unstable uploads, not experimental or those going to some
1277 # proposed-updates queue
1278 if "source" not in self.pkg.changes["architecture"] or \
1279 "unstable" not in self.pkg.changes["distribution"]:
1282 # Also only check if there is a file defined (and existant) with
1284 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1285 if transpath == "" or not os.path.exists(transpath):
1288 # Parse the yaml file
1289 sourcefile = file(transpath, 'r')
1290 sourcecontent = sourcefile.read()
1292 transitions = yaml.load(sourcecontent)
1293 except yaml.YAMLError, msg:
1294 # This shouldn't happen, there is a wrapper to edit the file which
1295 # checks it, but we prefer to be safe than ending up rejecting
1297 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1300 # Now look through all defined transitions
1301 for trans in transitions:
1302 t = transitions[trans]
1303 source = t["source"]
1306 # Will be None if nothing is in testing.
1307 current = get_source_in_suite(source, "testing", session)
1308 if current is not None:
1309 compare = apt_pkg.VersionCompare(current.version, expected)
1311 if current is None or compare < 0:
1312 # This is still valid, the current version in testing is older than
1313 # the new version we wait for, or there is none in testing yet
1315 # Check if the source we look at is affected by this.
1316 if sourcepkg in t['packages']:
1317 # The source is affected, lets reject it.
1319 rejectmsg = "%s: part of the %s transition.\n\n" % (
1322 if current is not None:
1323 currentlymsg = "at version %s" % (current)
1325 currentlymsg = "not present in testing"
1327 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1329 rejectmsg += "\n".join(textwrap.wrap("""Your package
1330 is part of a testing transition designed to get %s migrated (it is
1331 currently %s, we need version %s). This transition is managed by the
1332 Release Team, and %s is the Release-Team member responsible for it.
1333 Please mail debian-release@lists.debian.org or contact %s directly if you
1334 need further assistance. You might want to upload to experimental until this
1335 transition is done."""
1336 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1338 self.rejects.append(rejectmsg)
1341 ###########################################################################
1342 def check_signed_by_key(self):
1343 """Ensure the .changes is signed by an authorized uploader."""
1344 session = DBConn().session()
1346 self.check_transition(session)
1348 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1350 # match claimed name with actual name:
1352 # This is fundamentally broken but need us to refactor how we get
1353 # the UIDs/Fingerprints in order for us to fix it properly
1354 uid, uid_email = self.pkg.changes["fingerprint"], uid
1355 may_nmu, may_sponsor = 1, 1
1356 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1357 # and can't get one in there if we don't allow nmu/sponsorship
1358 elif is_dm is False:
1359 # If is_dm is False, we allow full upload rights
1360 uid_email = "%s@debian.org" % (uid)
1361 may_nmu, may_sponsor = 1, 1
1363 # Assume limited upload rights unless we've discovered otherwise
1365 may_nmu, may_sponsor = 0, 0
1367 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1369 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1371 if uid_name == "": sponsored = 1
1374 if ("source" in self.pkg.changes["architecture"] and
1375 uid_email and utils.is_email_alias(uid_email)):
1376 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1377 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1378 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1379 self.pkg.changes["sponsoremail"] = uid_email
1381 if sponsored and not may_sponsor:
1382 self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1384 if not sponsored and not may_nmu:
1385 should_reject = True
1386 highest_sid, highest_version = None, None
1388 # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1389 # It ignores higher versions with the dm_upload_allowed flag set to false
1390 # I'm keeping the existing behaviour for now until I've gone back and
1391 # checked exactly what the GR says - mhy
1392 for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1393 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1394 highest_sid = si.source_id
1395 highest_version = si.version
1397 if highest_sid is None:
1398 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1400 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1401 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1402 if email == uid_email or name == uid_name:
1403 should_reject = False
1406 if should_reject is True:
1407 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1409 for b in self.pkg.changes["binary"].keys():
1410 for suite in self.pkg.changes["distribution"].keys():
1411 q = session.query(DBSource)
1412 q = q.join(DBBinary).filter_by(package=b)
1413 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1416 if s.source != self.pkg.changes["source"]:
1417 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1419 for f in self.pkg.files.keys():
1420 if self.pkg.files[f].has_key("byhand"):
1421 self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1422 if self.pkg.files[f].has_key("new"):
1423 self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1427 ###########################################################################
1428 def build_summaries(self):
1429 """ Build a summary of changes the upload introduces. """
1431 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1433 short_summary = summary
1435 # This is for direport's benefit...
1436 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1439 summary += "Changes: " + f
1441 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1443 summary += self.announce(short_summary, 0)
1445 return (summary, short_summary)
1447 ###########################################################################
1449 def close_bugs(self, summary, action):
1451 Send mail to close bugs as instructed by the closes field in the changes file.
1452 Also add a line to summary if any work was done.
1454 @type summary: string
1455 @param summary: summary text, as given by L{build_summaries}
1458 @param action: Set to false no real action will be done.
1461 @return: summary. If action was taken, extended by the list of closed bugs.
1465 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1467 bugs = self.pkg.changes["closes"].keys()
1473 summary += "Closing bugs: "
1475 summary += "%s " % (bug)
1477 self.Subst["__BUG_NUMBER__"] = bug
1478 if self.pkg.changes["distribution"].has_key("stable"):
1479 self.Subst["__STABLE_WARNING__"] = """
1480 Note that this package is not part of the released stable Debian
1481 distribution. It may have dependencies on other unreleased software,
1482 or other instabilities. Please take care if you wish to install it.
1483 The update will eventually make its way into the next released Debian
1486 self.Subst["__STABLE_WARNING__"] = ""
1487 mail_message = utils.TemplateSubst(self.Subst, template)
1488 utils.send_mail(mail_message)
1490 # Clear up after ourselves
1491 del self.Subst["__BUG_NUMBER__"]
1492 del self.Subst["__STABLE_WARNING__"]
1494 if action and self.logger:
1495 self.logger.log(["closing bugs"] + bugs)
1501 ###########################################################################
1503 def announce(self, short_summary, action):
1505 Send an announce mail about a new upload.
1507 @type short_summary: string
1508 @param short_summary: Short summary text to include in the mail
1511 @param action: Set to false no real action will be done.
1514 @return: Textstring about action taken.
1519 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1521 # Only do announcements for source uploads with a recent dpkg-dev installed
1522 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1523 self.pkg.changes["architecture"].has_key("source"):
1529 self.Subst["__SHORT_SUMMARY__"] = short_summary
1531 for dist in self.pkg.changes["distribution"].keys():
1532 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1533 if announce_list == "" or lists_done.has_key(announce_list):
1536 lists_done[announce_list] = 1
1537 summary += "Announcing to %s\n" % (announce_list)
1540 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1541 if cnf.get("Dinstall::TrackingServer") and \
1542 self.pkg.changes["architecture"].has_key("source"):
1543 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1544 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1546 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1547 utils.send_mail(mail_message)
1549 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1551 if cnf.FindB("Dinstall::CloseBugs"):
1552 summary = self.close_bugs(summary, action)
1554 del self.Subst["__SHORT_SUMMARY__"]
1558 ###########################################################################
1560 def accept (self, summary, short_summary, targetdir=None):
1564 This moves all files referenced from the .changes into the I{accepted}
1565 queue, sends the accepted mail, announces to lists, closes bugs and
1566 also checks for override disparities. If enabled it will write out
1567 the version history for the BTS Version Tracking and will finally call
1570 @type summary: string
1571 @param summary: Summary text
1573 @type short_summary: string
1574 @param short_summary: Short summary
1579 stats = SummaryStats()
1581 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1583 if targetdir is None:
1584 targetdir = cnf["Dir::Queue::Accepted"]
1588 self.logger.log(["Accepting changes", self.pkg.changes_file])
1590 self.pkg.write_dot_dak(targetdir)
1592 # Move all the files into the accepted directory
1593 utils.move(self.pkg.changes_file, targetdir)
1595 for name, entry in sorted(self.pkg.files.items()):
1596 utils.move(name, targetdir)
1597 stats.accept_bytes += float(entry["size"])
1599 stats.accept_count += 1
1601 # Send accept mail, announce to lists, close bugs and check for
1602 # override disparities
1603 if not cnf["Dinstall::Options::No-Mail"]:
1604 self.Subst["__SUITE__"] = ""
1605 self.Subst["__SUMMARY__"] = summary
1606 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1607 utils.send_mail(mail_message)
1608 self.announce(short_summary, 1)
1610 ## Helper stuff for DebBugs Version Tracking
1611 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1612 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1613 # the conditionalization on dsc["bts changelog"] should be
1616 # Write out the version history from the changelog
1617 if self.pkg.changes["architecture"].has_key("source") and \
1618 self.pkg.dsc.has_key("bts changelog"):
1620 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1621 version_history = os.fdopen(fd, 'w')
1622 version_history.write(self.pkg.dsc["bts changelog"])
1623 version_history.close()
1624 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1625 self.pkg.changes_file[:-8]+".versions")
1626 os.rename(temp_filename, filename)
1627 os.chmod(filename, 0644)
1629 # Write out the binary -> source mapping.
1630 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1631 debinfo = os.fdopen(fd, 'w')
1632 for name, entry in sorted(self.pkg.files.items()):
1633 if entry["type"] == "deb":
1634 line = " ".join([entry["package"], entry["version"],
1635 entry["architecture"], entry["source package"],
1636 entry["source version"]])
1637 debinfo.write(line+"\n")
1639 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1640 self.pkg.changes_file[:-8]+".debinfo")
1641 os.rename(temp_filename, filename)
1642 os.chmod(filename, 0644)
1644 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1645 # <Ganneff> we do call queue_build too
1646 # <mhy> well yes, we'd have had to if we were inserting into accepted
1647 # <Ganneff> now. thats database only.
1648 # <mhy> urgh, that's going to get messy
1649 # <Ganneff> so i make the p-n call to it *also* using accepted/
1650 # <mhy> but then the packages will be in the queue_build table without the files being there
1651 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1652 # <mhy> ah, good point
1653 # <Ganneff> so it will work out, as unchecked move it over
1654 # <mhy> that's all completely sick
1657 # This routine returns None on success or an error on failure
1658 res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1663 def check_override(self):
1665 Checks override entries for validity. Mails "Override disparity" warnings,
1666 if that feature is enabled.
1668 Abandons the check if
1669 - override disparity checks are disabled
1670 - mail sending is disabled
1675 # Abandon the check if:
1676 # a) override disparity checks have been disabled
1677 # b) we're not sending mail
1678 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1679 cnf["Dinstall::Options::No-Mail"]:
1682 summary = self.pkg.check_override()
1687 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1689 self.Subst["__SUMMARY__"] = summary
1690 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1691 utils.send_mail(mail_message)
1692 del self.Subst["__SUMMARY__"]
1694 ###########################################################################
1696 def remove(self, dir=None):
1698 Used (for instance) in p-u to remove the package from unchecked
1701 os.chdir(self.pkg.directory)
1705 for f in self.pkg.files.keys():
1707 os.unlink(self.pkg.changes_file)
1709 ###########################################################################
1711 def move_to_dir (self, dest, perms=0660, changesperms=0664):
1713 Move files to dest with certain perms/changesperms
1715 utils.move(self.pkg.changes_file, dest, perms=changesperms)
1716 for f in self.pkg.files.keys():
1717 utils.move(f, dest, perms=perms)
1719 ###########################################################################
1721 def force_reject(self, reject_files):
1723 Forcefully move files from the current directory to the
1724 reject directory. If any file already exists in the reject
1725 directory it will be moved to the morgue to make way for
1729 @param files: file dictionary
1735 for file_entry in reject_files:
1736 # Skip any files which don't exist or which we don't have permission to copy.
1737 if os.access(file_entry, os.R_OK) == 0:
1740 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1743 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1745 # File exists? Let's try and move it to the morgue
1746 if e.errno == errno.EEXIST:
1747 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1749 morgue_file = utils.find_next_free(morgue_file)
1750 except NoFreeFilenameError:
1751 # Something's either gone badly Pete Tong, or
1752 # someone is trying to exploit us.
1753 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1755 utils.move(dest_file, morgue_file, perms=0660)
1757 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1760 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1764 # If we got here, we own the destination file, so we can
1765 # safely overwrite it.
1766 utils.move(file_entry, dest_file, 1, perms=0660)
1769 ###########################################################################
1770 def do_reject (self, manual=0, reject_message="", note=""):
1772 Reject an upload. If called without a reject message or C{manual} is
1773 true, spawn an editor so the user can write one.
1776 @param manual: manual or automated rejection
1778 @type reject_message: string
1779 @param reject_message: A reject message
1784 # If we weren't given a manual rejection message, spawn an
1785 # editor so the user can add one in...
1786 if manual and not reject_message:
1787 (fd, temp_filename) = utils.temp_filename()
1788 temp_file = os.fdopen(fd, 'w')
1791 temp_file.write(line)
1793 editor = os.environ.get("EDITOR","vi")
1795 while answer == 'E':
1796 os.system("%s %s" % (editor, temp_filename))
1797 temp_fh = utils.open_file(temp_filename)
1798 reject_message = "".join(temp_fh.readlines())
1800 print "Reject message:"
1801 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
1802 prompt = "[R]eject, Edit, Abandon, Quit ?"
1804 while prompt.find(answer) == -1:
1805 answer = utils.our_raw_input(prompt)
1806 m = re_default_answer.search(prompt)
1809 answer = answer[:1].upper()
1810 os.unlink(temp_filename)
1816 print "Rejecting.\n"
1820 reason_filename = self.pkg.changes_file[:-8] + ".reason"
1821 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
1823 # Move all the files into the reject directory
1824 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
1825 self.force_reject(reject_files)
1827 # If we fail here someone is probably trying to exploit the race
1828 # so let's just raise an exception ...
1829 if os.path.exists(reason_filename):
1830 os.unlink(reason_filename)
1831 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1833 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
1836 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
1837 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
1838 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
1839 os.write(reason_fd, reject_message)
1840 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1842 # Build up the rejection email
1843 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
1844 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
1845 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
1846 self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
1847 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1848 # Write the rejection email out as the <foo>.reason file
1849 os.write(reason_fd, reject_mail_message)
1851 del self.Subst["__REJECTOR_ADDRESS__"]
1852 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
1853 del self.Subst["__CC__"]
1857 # Send the rejection mail if appropriate
1858 if not cnf["Dinstall::Options::No-Mail"]:
1859 utils.send_mail(reject_mail_message)
1862 self.logger.log(["rejected", self.pkg.changes_file])
1866 ################################################################################
1867 def in_override_p(self, package, component, suite, binary_type, file, session):
1869 Check if a package already has override entries in the DB
1871 @type package: string
1872 @param package: package name
1874 @type component: string
1875 @param component: database id of the component
1878 @param suite: database id of the suite
1880 @type binary_type: string
1881 @param binary_type: type of the package
1884 @param file: filename we check
1886 @return: the database result. But noone cares anyway.
1892 if binary_type == "": # must be source
1895 file_type = binary_type
1897 # Override suite name; used for example with proposed-updates
1898 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
1899 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
1901 result = get_override(package, suite, component, file_type, session)
1903 # If checking for a source package fall back on the binary override type
1904 if file_type == "dsc" and len(result) < 1:
1905 result = get_override(package, suite, component, ['deb', 'udeb'], session)
1907 # Remember the section and priority so we can check them later if appropriate
1910 self.pkg.files[file]["override section"] = result.section.section
1911 self.pkg.files[file]["override priority"] = result.priority.priority
1916 ################################################################################
1917 def get_anyversion(self, sv_list, suite):
1920 @param sv_list: list of (suite, version) tuples to check
1923 @param suite: suite name
1929 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
1930 for (s, v) in sv_list:
1931 if s in [ x.lower() for x in anysuite ]:
1932 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
1937 ################################################################################
1939 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
1942 @param sv_list: list of (suite, version) tuples to check
1947 @type new_version: string
1948 @param new_version: XXX
1950 Ensure versions are newer than existing packages in target
1951 suites and that cross-suite version checking rules as
1952 set out in the conf file are satisfied.
1957 # Check versions for each target suite
1958 for target_suite in self.pkg.changes["distribution"].keys():
1959 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
1960 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
1962 # Enforce "must be newer than target suite" even if conffile omits it
1963 if target_suite not in must_be_newer_than:
1964 must_be_newer_than.append(target_suite)
1966 for (suite, existent_version) in sv_list:
1967 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
1969 if suite in must_be_newer_than and sourceful and vercmp < 1:
1970 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
1972 if suite in must_be_older_than and vercmp > -1:
1975 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
1976 # we really use the other suite, ignoring the conflicting one ...
1977 addsuite = self.pkg.changes["distribution-version"][suite]
1979 add_version = self.get_anyversion(sv_list, addsuite)
1980 target_version = self.get_anyversion(sv_list, target_suite)
1983 # not add_version can only happen if we map to a suite
1984 # that doesn't enhance the suite we're propup'ing from.
1985 # so "propup-ver x a b c; map a d" is a problem only if
1986 # d doesn't enhance a.
1988 # i think we could always propagate in this case, rather
1989 # than complaining. either way, this isn't a REJECT issue
1991 # And - we really should complain to the dorks who configured dak
1992 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
1993 self.pkg.changes.setdefault("propdistribution", {})
1994 self.pkg.changes["propdistribution"][addsuite] = 1
1996 elif not target_version:
1997 # not targets_version is true when the package is NEW
1998 # we could just stick with the "...old version..." REJECT
1999 # for this, I think.
2000 self.rejects.append("Won't propogate NEW packages.")
2001 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2002 # propogation would be redundant. no need to reject though.
2003 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2005 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2006 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2008 self.warnings.append("Propogating upload to %s" % (addsuite))
2009 self.pkg.changes.setdefault("propdistribution", {})
2010 self.pkg.changes["propdistribution"][addsuite] = 1
2014 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2016 ################################################################################
2017 def check_binary_against_db(self, file, session):
2018 # Ensure version is sane
2019 q = session.query(BinAssociation)
2020 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
2021 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
2023 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2024 file, self.pkg.files[file]["version"], sourceful=False)
2026 # Check for any existing copies of the file
2027 q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
2028 q = q.filter_by(version=self.pkg.files[file]["version"])
2029 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
2032 self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
2034 ################################################################################
2036 def check_source_against_db(self, file, session):
2039 source = self.pkg.dsc.get("source")
2040 version = self.pkg.dsc.get("version")
2042 # Ensure version is sane
2043 q = session.query(SrcAssociation)
2044 q = q.join(DBSource).filter(DBSource.source==source)
2046 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2047 file, version, sourceful=True)
2049 ################################################################################
2050 def check_dsc_against_db(self, file, session):
2053 @warning: NB: this function can remove entries from the 'files' index [if
2054 the .orig.tar.gz is a duplicate of the one in the archive]; if
2055 you're iterating over 'files' and call this function as part of
2056 the loop, be sure to add a check to the top of the loop to
2057 ensure you haven't just tried to dereference the deleted entry.
2062 self.pkg.orig_tar_gz = None
2064 # Try and find all files mentioned in the .dsc. This has
2065 # to work harder to cope with the multiple possible
2066 # locations of an .orig.tar.gz.
2067 # The ordering on the select is needed to pick the newest orig
2068 # when it exists in multiple places.
2069 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2071 if self.pkg.files.has_key(dsc_name):
2072 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2073 actual_size = int(self.pkg.files[dsc_name]["size"])
2074 found = "%s in incoming" % (dsc_name)
2076 # Check the file does not already exist in the archive
2077 ql = get_poolfile_like_name(dsc_name, session)
2079 # Strip out anything that isn't '%s' or '/%s$'
2081 if not i.filename.endswith(dsc_name):
2084 # "[dak] has not broken them. [dak] has fixed a
2085 # brokenness. Your crappy hack exploited a bug in
2088 # "(Come on! I thought it was always obvious that
2089 # one just doesn't release different files with
2090 # the same name and version.)"
2091 # -- ajk@ on d-devel@l.d.o
2094 # Ignore exact matches for .orig.tar.gz
2096 if dsc_name.endswith(".orig.tar.gz"):
2098 if self.pkg.files.has_key(dsc_name) and \
2099 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2100 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2101 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2102 # TODO: Don't delete the entry, just mark it as not needed
2103 # This would fix the stupidity of changing something we often iterate over
2104 # whilst we're doing it
2105 del self.pkg.files[dsc_name]
2106 self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
2110 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2112 elif dsc_name.endswith(".orig.tar.gz"):
2114 ql = get_poolfile_like_name(dsc_name, session)
2116 # Strip out anything that isn't '%s' or '/%s$'
2117 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2119 if not i.filename.endswith(dsc_name):
2123 # Unfortunately, we may get more than one match here if,
2124 # for example, the package was in potato but had an -sa
2125 # upload in woody. So we need to choose the right one.
2127 # default to something sane in case we don't match any or have only one
2132 old_file = os.path.join(i.location.path, i.filename)
2133 old_file_fh = utils.open_file(old_file)
2134 actual_md5 = apt_pkg.md5sum(old_file_fh)
2136 actual_size = os.stat(old_file)[stat.ST_SIZE]
2137 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2140 old_file = os.path.join(i.location.path, i.filename)
2141 old_file_fh = utils.open_file(old_file)
2142 actual_md5 = apt_pkg.md5sum(old_file_fh)
2144 actual_size = os.stat(old_file)[stat.ST_SIZE]
2146 suite_type = x.location.archive_type
2147 # need this for updating dsc_files in install()
2148 dsc_entry["files id"] = x.file_id
2149 # See install() in process-accepted...
2150 self.pkg.orig_tar_id = x.file_id
2151 self.pkg.orig_tar_gz = old_file
2152 self.pkg.orig_tar_location = x.location.location_id
2154 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2155 # Not there? Check the queue directories...
2156 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2157 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2159 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2160 if os.path.exists(in_otherdir):
2161 in_otherdir_fh = utils.open_file(in_otherdir)
2162 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2163 in_otherdir_fh.close()
2164 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2166 self.pkg.orig_tar_gz = in_otherdir
2169 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2170 self.pkg.orig_tar_gz = -1
2173 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2175 if actual_md5 != dsc_entry["md5sum"]:
2176 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2177 if actual_size != int(dsc_entry["size"]):
2178 self.rejects.append("size for %s doesn't match %s." % (found, file))
2180 ################################################################################
2181 def accepted_checks(self, overwrite_checks, session):
2182 # Recheck anything that relies on the database; since that's not
2183 # frozen between accept and our run time when called from p-a.
2185 # overwrite_checks is set to False when installing to stable/oldstable
2190 # Find the .dsc (again)
2192 for f in self.pkg.files.keys():
2193 if self.pkg.files[f]["type"] == "dsc":
2196 for checkfile in self.pkg.files.keys():
2197 # The .orig.tar.gz can disappear out from under us is it's a
2198 # duplicate of one in the archive.
2199 if not self.pkg.files.has_key(checkfile):
2202 entry = self.pkg.files[checkfile]
2204 # Check that the source still exists
2205 if entry["type"] == "deb":
2206 source_version = entry["source version"]
2207 source_package = entry["source package"]
2208 if not self.pkg.changes["architecture"].has_key("source") \
2209 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2210 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2212 # Version and file overwrite checks
2213 if overwrite_checks:
2214 if entry["type"] == "deb":
2215 self.check_binary_against_db(checkfile, session)
2216 elif entry["type"] == "dsc":
2217 self.check_source_against_db(checkfile, session)
2218 self.check_dsc_against_db(dsc_filename, session)
2220 # propogate in the case it is in the override tables:
2221 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2222 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2223 propogate[suite] = 1
2225 nopropogate[suite] = 1
2227 for suite in propogate.keys():
2228 if suite in nopropogate:
2230 self.pkg.changes["distribution"][suite] = 1
2232 for checkfile in self.pkg.files.keys():
2233 # Check the package is still in the override tables
2234 for suite in self.pkg.changes["distribution"].keys():
2235 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2236 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2238 ################################################################################
2239 # This is not really a reject, but an unaccept, but since a) the code for
2240 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2241 # extremely rare, for now we'll go with whining at our admin folks...
2243 def do_unaccept(self):
2246 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2247 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2248 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2249 self.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $"
2250 if cnf.has_key("Dinstall::Bcc"):
2251 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2253 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2255 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2257 # Write the rejection email out as the <foo>.reason file
2258 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2259 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2261 # If we fail here someone is probably trying to exploit the race
2262 # so let's just raise an exception ...
2263 if os.path.exists(reject_filename):
2264 os.unlink(reject_filename)
2266 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2267 os.write(fd, reject_mail_message)
2270 utils.send_mail(reject_mail_message)
2272 del self.Subst["__REJECTOR_ADDRESS__"]
2273 del self.Subst["__REJECT_MESSAGE__"]
2274 del self.Subst["__CC__"]
2276 ################################################################################
2277 # If any file of an upload has a recent mtime then chances are good
2278 # the file is still being uploaded.
2280 def upload_too_new(self):
2283 # Move back to the original directory to get accurate time stamps
2285 os.chdir(self.pkg.directory)
2286 file_list = self.pkg.files.keys()
2287 file_list.extend(self.pkg.dsc_files.keys())
2288 file_list.append(self.pkg.changes_file)
2291 last_modified = time.time()-os.path.getmtime(f)
2292 if last_modified < int(cnf["Dinstall::SkipTime"]):