5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
52 from summarystats import SummaryStats
53 from utils import parse_changes, check_dsc_files
54 from textutils import fix_maintainer
55 from binary import Binary
57 ###############################################################################
59 def get_type(f, session):
61 Get the file type of C{f}
64 @param f: file entry from Changes object
66 @type session: SQLA Session
67 @param session: SQL Alchemy session object
74 if f.has_key("dbtype"):
75 file_type = f["dbtype"]
76 elif re_source_ext.match(f["type"]):
79 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
81 # Validate the override type
82 type_id = get_override_type(file_type, session)
84 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
88 ################################################################################
90 # Determine what parts in a .changes are NEW
92 def determine_new(changes, files, warn=1):
94 Determine what parts in a C{changes} file are NEW.
96 @type changes: Upload.Pkg.changes dict
97 @param changes: Changes dictionary
99 @type files: Upload.Pkg.files dict
100 @param files: Files dictionary
103 @param warn: Warn if overrides are added for (old)stable
106 @return: dictionary of NEW components.
111 session = DBConn().session()
113 # Build up a list of potentially new things
114 for name, f in files.items():
115 # Skip byhand elements
116 if f["type"] == "byhand":
119 priority = f["priority"]
120 section = f["section"]
121 file_type = get_type(f, session)
122 component = f["component"]
124 if file_type == "dsc":
127 if not new.has_key(pkg):
129 new[pkg]["priority"] = priority
130 new[pkg]["section"] = section
131 new[pkg]["type"] = file_type
132 new[pkg]["component"] = component
133 new[pkg]["files"] = []
135 old_type = new[pkg]["type"]
136 if old_type != file_type:
137 # source gets trumped by deb or udeb
138 if old_type == "dsc":
139 new[pkg]["priority"] = priority
140 new[pkg]["section"] = section
141 new[pkg]["type"] = file_type
142 new[pkg]["component"] = component
144 new[pkg]["files"].append(name)
146 if f.has_key("othercomponents"):
147 new[pkg]["othercomponents"] = f["othercomponents"]
149 for suite in changes["suite"].keys():
150 for pkg in new.keys():
151 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
153 for file_entry in new[pkg]["files"]:
154 if files[file_entry].has_key("new"):
155 del files[file_entry]["new"]
159 for s in ['stable', 'oldstable']:
160 if changes["suite"].has_key(s):
161 print "WARNING: overrides will be added for %s!" % s
162 for pkg in new.keys():
163 if new[pkg].has_key("othercomponents"):
164 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
170 ################################################################################
172 def check_valid(new):
174 Check if section and priority for NEW packages exist in database.
175 Additionally does sanity checks:
176 - debian-installer packages have to be udeb (or source)
177 - non debian-installer packages can not be udeb
178 - source priority can only be assigned to dsc file types
181 @param new: Dict of new packages with their section, priority and type.
184 for pkg in new.keys():
185 section_name = new[pkg]["section"]
186 priority_name = new[pkg]["priority"]
187 file_type = new[pkg]["type"]
189 section = get_section(section_name)
191 new[pkg]["section id"] = -1
193 new[pkg]["section id"] = section.section_id
195 priority = get_priority(priority_name)
197 new[pkg]["priority id"] = -1
199 new[pkg]["priority id"] = priority.priority_id
202 di = section_name.find("debian-installer") != -1
204 # If d-i, we must be udeb and vice-versa
205 if (di and file_type not in ("udeb", "dsc")) or \
206 (not di and file_type == "udeb"):
207 new[pkg]["section id"] = -1
209 # If dsc we need to be source and vice-versa
210 if (priority == "source" and file_type != "dsc") or \
211 (priority != "source" and file_type == "dsc"):
212 new[pkg]["priority id"] = -1
214 ###############################################################################
216 def lookup_uid_from_fingerprint(fpr, session):
219 # This is a stupid default, but see the comments below
222 user = get_uid_from_fingerprint(fpr, session)
226 if user.name is None:
231 # Check the relevant fingerprint (which we have to have)
232 for f in user.fingerprint:
233 if f.fingerprint == fpr:
234 is_dm = f.keyring.debian_maintainer
237 return (uid, uid_name, is_dm)
239 ###############################################################################
241 # Used by Upload.check_timestamps
242 class TarTime(object):
243 def __init__(self, future_cutoff, past_cutoff):
245 self.future_cutoff = future_cutoff
246 self.past_cutoff = past_cutoff
249 self.future_files = {}
250 self.ancient_files = {}
252 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
253 if MTime > self.future_cutoff:
254 self.future_files[Name] = MTime
255 if MTime < self.past_cutoff:
256 self.ancient_files[Name] = MTime
258 ###############################################################################
260 class Upload(object):
262 Everything that has to do with an upload processed.
270 ###########################################################################
273 """ Reset a number of internal variables."""
275 # Initialize the substitution template map
278 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
279 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
280 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
281 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
289 def package_info(self):
292 if len(self.rejects) > 0:
293 msg += "Reject Reasons:\n"
294 msg += "\n".join(self.rejects)
296 if len(self.warnings) > 0:
298 msg += "\n".join(self.warnings)
300 if len(self.notes) > 0:
302 msg += "\n".join(self.notes)
306 ###########################################################################
307 def update_subst(self):
308 """ Set up the per-package template substitution mappings """
312 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
313 if not self.pkg.changes.has_key("architecture") or not \
314 isinstance(self.pkg.changes["architecture"], DictType):
315 self.pkg.changes["architecture"] = { "Unknown" : "" }
317 # and maintainer2047 may not exist.
318 if not self.pkg.changes.has_key("maintainer2047"):
319 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
321 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
322 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
323 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
325 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
326 if self.pkg.changes["architecture"].has_key("source") and \
327 self.pkg.changes["changedby822"] != "" and \
328 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
330 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
331 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
332 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
334 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
335 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
336 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
338 if "sponsoremail" in self.pkg.changes:
339 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
341 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
342 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
344 # Apply any global override of the Maintainer field
345 if cnf.get("Dinstall::OverrideMaintainer"):
346 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
347 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
349 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
350 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
351 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
353 ###########################################################################
354 def load_changes(self, filename):
357 @rvalue: whether the changes file was valid or not. We may want to
358 reject even if this is True (see what gets put in self.rejects).
359 This is simply to prevent us even trying things later which will
360 fail because we couldn't properly parse the file.
363 self.pkg.changes_file = filename
365 # Parse the .changes field into a dictionary
367 self.pkg.changes.update(parse_changes(filename))
368 except CantOpenError:
369 self.rejects.append("%s: can't read file." % (filename))
371 except ParseChangesError, line:
372 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
374 except ChangesUnicodeError:
375 self.rejects.append("%s: changes file not proper utf-8" % (filename))
378 # Parse the Files field from the .changes into another dictionary
380 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
381 except ParseChangesError, line:
382 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
384 except UnknownFormatError, format:
385 self.rejects.append("%s: unknown format '%s'." % (filename, format))
388 # Check for mandatory fields
389 for i in ("distribution", "source", "binary", "architecture",
390 "version", "maintainer", "files", "changes", "description"):
391 if not self.pkg.changes.has_key(i):
392 # Avoid undefined errors later
393 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
396 # Strip a source version in brackets from the source field
397 if re_strip_srcver.search(self.pkg.changes["source"]):
398 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
400 # Ensure the source field is a valid package name.
401 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
402 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
404 # Split multi-value fields into a lower-level dictionary
405 for i in ("architecture", "distribution", "binary", "closes"):
406 o = self.pkg.changes.get(i, "")
408 del self.pkg.changes[i]
410 self.pkg.changes[i] = {}
413 self.pkg.changes[i][j] = 1
415 # Fix the Maintainer: field to be RFC822/2047 compatible
417 (self.pkg.changes["maintainer822"],
418 self.pkg.changes["maintainer2047"],
419 self.pkg.changes["maintainername"],
420 self.pkg.changes["maintaineremail"]) = \
421 fix_maintainer (self.pkg.changes["maintainer"])
422 except ParseMaintError, msg:
423 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
424 % (filename, changes["maintainer"], msg))
426 # ...likewise for the Changed-By: field if it exists.
428 (self.pkg.changes["changedby822"],
429 self.pkg.changes["changedby2047"],
430 self.pkg.changes["changedbyname"],
431 self.pkg.changes["changedbyemail"]) = \
432 fix_maintainer (self.pkg.changes.get("changed-by", ""))
433 except ParseMaintError, msg:
434 self.pkg.changes["changedby822"] = ""
435 self.pkg.changes["changedby2047"] = ""
436 self.pkg.changes["changedbyname"] = ""
437 self.pkg.changes["changedbyemail"] = ""
439 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
440 % (filename, changes["changed-by"], msg))
442 # Ensure all the values in Closes: are numbers
443 if self.pkg.changes.has_key("closes"):
444 for i in self.pkg.changes["closes"].keys():
445 if re_isanum.match (i) == None:
446 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
448 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
449 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
450 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
452 # Check there isn't already a changes file of the same name in one
453 # of the queue directories.
454 base_filename = os.path.basename(filename)
455 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
456 if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
457 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
459 # Check the .changes is non-empty
460 if not self.pkg.files:
461 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
464 # Changes was syntactically valid even if we'll reject
467 ###########################################################################
469 def check_distributions(self):
470 "Check and map the Distribution field"
474 # Handle suite mappings
475 for m in Cnf.ValueList("SuiteMappings"):
478 if mtype == "map" or mtype == "silent-map":
479 (source, dest) = args[1:3]
480 if self.pkg.changes["distribution"].has_key(source):
481 del self.pkg.changes["distribution"][source]
482 self.pkg.changes["distribution"][dest] = 1
483 if mtype != "silent-map":
484 self.notes.append("Mapping %s to %s." % (source, dest))
485 if self.pkg.changes.has_key("distribution-version"):
486 if self.pkg.changes["distribution-version"].has_key(source):
487 self.pkg.changes["distribution-version"][source]=dest
488 elif mtype == "map-unreleased":
489 (source, dest) = args[1:3]
490 if self.pkg.changes["distribution"].has_key(source):
491 for arch in self.pkg.changes["architecture"].keys():
492 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
493 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
494 del self.pkg.changes["distribution"][source]
495 self.pkg.changes["distribution"][dest] = 1
497 elif mtype == "ignore":
499 if self.pkg.changes["distribution"].has_key(suite):
500 del self.pkg.changes["distribution"][suite]
501 self.warnings.append("Ignoring %s as a target suite." % (suite))
502 elif mtype == "reject":
504 if self.pkg.changes["distribution"].has_key(suite):
505 self.rejects.append("Uploads to %s are not accepted." % (suite))
506 elif mtype == "propup-version":
507 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
509 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
510 if self.pkg.changes["distribution"].has_key(args[1]):
511 self.pkg.changes.setdefault("distribution-version", {})
512 for suite in args[2:]:
513 self.pkg.changes["distribution-version"][suite] = suite
515 # Ensure there is (still) a target distribution
516 if len(self.pkg.changes["distribution"].keys()) < 1:
517 self.rejects.append("No valid distribution remaining.")
519 # Ensure target distributions exist
520 for suite in self.pkg.changes["distribution"].keys():
521 if not Cnf.has_key("Suite::%s" % (suite)):
522 self.rejects.append("Unknown distribution `%s'." % (suite))
524 ###########################################################################
526 def binary_file_checks(self, f, session):
528 entry = self.pkg.files[f]
530 # Extract package control information
531 deb_file = utils.open_file(f)
533 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
535 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
537 # Can't continue, none of the checks on control would work.
540 # Check for mandantory "Description:"
543 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
545 self.rejects.append("%s: Missing Description in binary package" % (f))
550 # Check for mandatory fields
551 for field in [ "Package", "Architecture", "Version" ]:
552 if control.Find(field) == None:
554 self.rejects.append("%s: No %s field in control." % (f, field))
557 # Ensure the package name matches the one give in the .changes
558 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
559 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
561 # Validate the package field
562 package = control.Find("Package")
563 if not re_valid_pkg_name.match(package):
564 self.rejects.append("%s: invalid package name '%s'." % (f, package))
566 # Validate the version field
567 version = control.Find("Version")
568 if not re_valid_version.match(version):
569 self.rejects.append("%s: invalid version number '%s'." % (f, version))
571 # Ensure the architecture of the .deb is one we know about.
572 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
573 architecture = control.Find("Architecture")
574 upload_suite = self.pkg.changes["distribution"].keys()[0]
576 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
577 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
578 self.rejects.append("Unknown architecture '%s'." % (architecture))
580 # Ensure the architecture of the .deb is one of the ones
581 # listed in the .changes.
582 if not self.pkg.changes["architecture"].has_key(architecture):
583 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
585 # Sanity-check the Depends field
586 depends = control.Find("Depends")
588 self.rejects.append("%s: Depends field is empty." % (f))
590 # Sanity-check the Provides field
591 provides = control.Find("Provides")
593 provide = re_spacestrip.sub('', provides)
595 self.rejects.append("%s: Provides field is empty." % (f))
596 prov_list = provide.split(",")
597 for prov in prov_list:
598 if not re_valid_pkg_name.match(prov):
599 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
601 # Check the section & priority match those given in the .changes (non-fatal)
602 if control.Find("Section") and entry["section"] != "" \
603 and entry["section"] != control.Find("Section"):
604 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
605 (f, control.Find("Section", ""), entry["section"]))
606 if control.Find("Priority") and entry["priority"] != "" \
607 and entry["priority"] != control.Find("Priority"):
608 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
609 (f, control.Find("Priority", ""), entry["priority"]))
611 entry["package"] = package
612 entry["architecture"] = architecture
613 entry["version"] = version
614 entry["maintainer"] = control.Find("Maintainer", "")
616 if f.endswith(".udeb"):
617 self.pkg.files[f]["dbtype"] = "udeb"
618 elif f.endswith(".deb"):
619 self.pkg.files[f]["dbtype"] = "deb"
621 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
623 entry["source"] = control.Find("Source", entry["package"])
625 # Get the source version
626 source = entry["source"]
629 if source.find("(") != -1:
630 m = re_extract_src_version.match(source)
632 source_version = m.group(2)
634 if not source_version:
635 source_version = self.pkg.files[f]["version"]
637 entry["source package"] = source
638 entry["source version"] = source_version
640 # Ensure the filename matches the contents of the .deb
641 m = re_isadeb.match(f)
644 file_package = m.group(1)
645 if entry["package"] != file_package:
646 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
647 (f, file_package, entry["dbtype"], entry["package"]))
648 epochless_version = re_no_epoch.sub('', control.Find("Version"))
651 file_version = m.group(2)
652 if epochless_version != file_version:
653 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
654 (f, file_version, entry["dbtype"], epochless_version))
657 file_architecture = m.group(3)
658 if entry["architecture"] != file_architecture:
659 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
660 (f, file_architecture, entry["dbtype"], entry["architecture"]))
662 # Check for existent source
663 source_version = entry["source version"]
664 source_package = entry["source package"]
665 if self.pkg.changes["architecture"].has_key("source"):
666 if source_version != self.pkg.changes["version"]:
667 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
668 (source_version, f, self.pkg.changes["version"]))
670 # Check in the SQL database
671 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
672 # Check in one of the other directories
673 source_epochless_version = re_no_epoch.sub('', source_version)
674 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
675 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
677 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
680 dsc_file_exists = False
681 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
682 if cnf.has_key("Dir::Queue::%s" % (myq)):
683 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
684 dsc_file_exists = True
687 if not dsc_file_exists:
688 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
690 # Check the version and for file overwrites
691 self.check_binary_against_db(f, session)
693 # Temporarily disable contents generation until we change the table storage layout
696 #if len(b.rejects) > 0:
697 # for j in b.rejects:
698 # self.rejects.append(j)
700 def source_file_checks(self, f, session):
701 entry = self.pkg.files[f]
703 m = re_issource.match(f)
707 entry["package"] = m.group(1)
708 entry["version"] = m.group(2)
709 entry["type"] = m.group(3)
711 # Ensure the source package name matches the Source filed in the .changes
712 if self.pkg.changes["source"] != entry["package"]:
713 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
715 # Ensure the source version matches the version in the .changes file
716 if re_is_orig_source.match(f):
717 changes_version = self.pkg.changes["chopversion2"]
719 changes_version = self.pkg.changes["chopversion"]
721 if changes_version != entry["version"]:
722 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
724 # Ensure the .changes lists source in the Architecture field
725 if not self.pkg.changes["architecture"].has_key("source"):
726 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
728 # Check the signature of a .dsc file
729 if entry["type"] == "dsc":
730 # check_signature returns either:
731 # (None, [list, of, rejects]) or (signature, [])
732 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
734 self.rejects.append(j)
736 entry["architecture"] = "source"
738 def per_suite_file_checks(self, f, suite, session):
740 entry = self.pkg.files[f]
741 archive = utils.where_am_i()
744 if entry.has_key("byhand"):
747 # Check we have fields we need to do these checks
749 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
750 if not entry.has_key(m):
751 self.rejects.append("file '%s' does not have field %s set" % (f, m))
757 # Handle component mappings
758 for m in cnf.ValueList("ComponentMappings"):
759 (source, dest) = m.split()
760 if entry["component"] == source:
761 entry["original component"] = source
762 entry["component"] = dest
764 # Ensure the component is valid for the target suite
765 if cnf.has_key("Suite:%s::Components" % (suite)) and \
766 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
767 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
770 # Validate the component
771 if not get_component(entry["component"], session):
772 self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
775 # See if the package is NEW
776 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
779 # Validate the priority
780 if entry["priority"].find('/') != -1:
781 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
783 # Determine the location
784 location = cnf["Dir::Pool"]
785 l = get_location(location, entry["component"], archive, session)
787 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
788 entry["location id"] = -1
790 entry["location id"] = l.location_id
792 # Check the md5sum & size against existing files (if any)
793 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
795 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
796 entry["size"], entry["md5sum"], entry["location id"])
799 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
800 elif found is False and poolfile is not None:
801 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
804 entry["files id"] = None
806 entry["files id"] = poolfile.file_id
808 # Check for packages that have moved from one component to another
809 entry['suite'] = suite
810 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
812 entry["othercomponents"] = res.fetchone()[0]
814 def check_files(self, action=True):
815 archive = utils.where_am_i()
816 file_keys = self.pkg.files.keys()
820 # XXX: As far as I can tell, this can no longer happen - see
821 # comments by AJ in old revisions - mhy
822 # if reprocess is 2 we've already done this and we're checking
823 # things again for the new .orig.tar.gz.
824 # [Yes, I'm fully aware of how disgusting this is]
825 if action and self.reprocess < 2:
827 os.chdir(self.pkg.directory)
829 ret = holding.copy_to_holding(f)
831 # XXX: Should we bail out here or try and continue?
832 self.rejects.append(ret)
836 # Check there isn't already a .changes or .dak file of the same name in
837 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
838 # [NB: this check must be done post-suite mapping]
839 base_filename = os.path.basename(self.pkg.changes_file)
840 dot_dak_filename = base_filename[:-8] + ".dak"
842 for suite in self.pkg.changes["distribution"].keys():
843 copychanges = "Suite::%s::CopyChanges" % (suite)
844 if cnf.has_key(copychanges) and \
845 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
846 self.rejects.append("%s: a file with this name already exists in %s" \
847 % (base_filename, cnf[copychanges]))
849 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
850 if cnf.has_key(copy_dot_dak) and \
851 os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
852 self.rejects.append("%s: a file with this name already exists in %s" \
853 % (dot_dak_filename, Cnf[copy_dot_dak]))
859 session = DBConn().session()
861 for f, entry in self.pkg.files.items():
862 # Ensure the file does not already exist in one of the accepted directories
863 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
864 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
865 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
866 self.rejects.append("%s file already exists in the %s directory." % (f, d))
868 if not re_taint_free.match(f):
869 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
871 # Check the file is readable
872 if os.access(f, os.R_OK) == 0:
873 # When running in -n, copy_to_holding() won't have
874 # generated the reject_message, so we need to.
876 if os.path.exists(f):
877 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
879 self.rejects.append("Can't read `%s'. [file not found]" % (f))
880 entry["type"] = "unreadable"
883 # If it's byhand skip remaining checks
884 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
886 entry["type"] = "byhand"
888 # Checks for a binary package...
889 elif re_isadeb.match(f):
891 entry["type"] = "deb"
893 # This routine appends to self.rejects/warnings as appropriate
894 self.binary_file_checks(f, session)
896 # Checks for a source package...
897 elif re_issource.match(f):
900 # This routine appends to self.rejects/warnings as appropriate
901 self.source_file_checks(f, session)
903 # Not a binary or source package? Assume byhand...
906 entry["type"] = "byhand"
908 # Per-suite file checks
909 entry["oldfiles"] = {}
910 for suite in self.pkg.changes["distribution"].keys():
911 self.per_suite_file_checks(f, suite, session)
915 # If the .changes file says it has source, it must have source.
916 if self.pkg.changes["architecture"].has_key("source"):
918 self.rejects.append("no source found and Architecture line in changes mention source.")
920 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
921 self.rejects.append("source only uploads are not supported.")
923 ###########################################################################
924 def check_dsc(self, action=True, session=None):
925 """Returns bool indicating whether or not the source changes are valid"""
926 # Ensure there is source to check
927 if not self.pkg.changes["architecture"].has_key("source"):
932 for f, entry in self.pkg.files.items():
933 if entry["type"] == "dsc":
935 self.rejects.append("can not process a .changes file with multiple .dsc's.")
940 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
942 self.rejects.append("source uploads must contain a dsc file")
945 # Parse the .dsc file
947 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
948 except CantOpenError:
949 # if not -n copy_to_holding() will have done this for us...
951 self.rejects.append("%s: can't read file." % (dsc_filename))
952 except ParseChangesError, line:
953 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
954 except InvalidDscError, line:
955 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
956 except ChangesUnicodeError:
957 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
959 # Build up the file list of files mentioned by the .dsc
961 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
962 except NoFilesFieldError:
963 self.rejects.append("%s: no Files: field." % (dsc_filename))
965 except UnknownFormatError, format:
966 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
968 except ParseChangesError, line:
969 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
972 # Enforce mandatory fields
973 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
974 if not self.pkg.dsc.has_key(i):
975 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
978 # Validate the source and version fields
979 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
980 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
981 if not re_valid_version.match(self.pkg.dsc["version"]):
982 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
984 # Only a limited list of source formats are allowed in each suite
985 for dist in self.pkg.changes["distribution"].keys():
986 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
987 if self.pkg.dsc["format"] not in allowed:
988 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
990 # Validate the Maintainer field
992 # We ignore the return value
993 fix_maintainer(self.pkg.dsc["maintainer"])
994 except ParseMaintError, msg:
995 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
996 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
998 # Validate the build-depends field(s)
999 for field_name in [ "build-depends", "build-depends-indep" ]:
1000 field = self.pkg.dsc.get(field_name)
1002 # Have apt try to parse them...
1004 apt_pkg.ParseSrcDepends(field)
1006 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1008 # Ensure the version number in the .dsc matches the version number in the .changes
1009 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1010 changes_version = self.pkg.files[dsc_filename]["version"]
1012 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1013 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1015 # Ensure the Files field contain only what's expected
1016 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1018 # Ensure source is newer than existing source in target suites
1019 session = DBConn().session()
1020 self.check_source_against_db(dsc_filename, session)
1021 self.check_dsc_against_db(dsc_filename, session)
1026 ###########################################################################
1028 def ensure_all_source_exists(self, source_dir, dest_dir=None):
1030 Ensure that dest_dir contains all the orig tarballs for the specified
1031 changes. If it does not, symlink them into place.
1033 If dest_dir is None, populate the current directory.
1036 if dest_dir is None:
1037 dest_dir = os.getcwd()
1039 # Create a symlink mirror of the source files in our temporary directory
1040 for f in self.pkg.files.keys():
1041 m = re_issource.match(f)
1043 src = os.path.join(source_dir, f)
1044 # If a file is missing for whatever reason, give up.
1045 if not os.path.exists(src):
1048 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1049 self.pkg.orig_files[f].has_key("path"):
1051 dest = os.path.join(os.getcwd(), f)
1052 os.symlink(src, dest)
1054 # If the orig files are not a part of the upload, create symlinks to the
1056 for orig_file in self.pkg.orig_files.keys():
1057 if not self.pkg.orig_files[orig_file].has_key("path"):
1059 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1060 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1062 ###########################################################################
1064 def get_changelog_versions(self, source_dir):
1065 """Extracts a the source package and (optionally) grabs the
1066 version history out of debian/changelog for the BTS."""
1070 # Find the .dsc (again)
1072 for f in self.pkg.files.keys():
1073 if self.pkg.files[f]["type"] == "dsc":
1076 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1077 if not dsc_filename:
1080 self.ensure_all_source_exists(source_dir)
1082 # Extract the source
1083 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1084 (result, output) = commands.getstatusoutput(cmd)
1086 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1087 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1090 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1093 # Get the upstream version
1094 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1095 if re_strip_revision.search(upstr_version):
1096 upstr_version = re_strip_revision.sub('', upstr_version)
1098 # Ensure the changelog file exists
1099 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1100 if not os.path.exists(changelog_filename):
1101 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1104 # Parse the changelog
1105 self.pkg.dsc["bts changelog"] = ""
1106 changelog_file = utils.open_file(changelog_filename)
1107 for line in changelog_file.readlines():
1108 m = re_changelog_versions.match(line)
1110 self.pkg.dsc["bts changelog"] += line
1111 changelog_file.close()
1113 # Check we found at least one revision in the changelog
1114 if not self.pkg.dsc["bts changelog"]:
1115 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1117 def check_source(self):
1118 # XXX: I'm fairly sure reprocess == 2 can never happen
1119 # AJT disabled the is_incoming check years ago - mhy
1120 # We should probably scrap or rethink the whole reprocess thing
1122 # a) there's no source
1123 # or b) reprocess is 2 - we will do this check next time when orig
1124 # tarball is in 'files'
1125 # or c) the orig files are MIA
1126 if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1127 or len(self.pkg.orig_files) == 0:
1130 tmpdir = utils.temp_dirname()
1132 # Move into the temporary directory
1136 # Get the changelog version history
1137 self.get_changelog_versions(cwd)
1139 # Move back and cleanup the temporary tree
1143 shutil.rmtree(tmpdir)
1145 if e.errno != errno.EACCES:
1147 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1149 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1150 # We probably have u-r or u-w directories so chmod everything
1152 cmd = "chmod -R u+rwx %s" % (tmpdir)
1153 result = os.system(cmd)
1155 utils.fubar("'%s' failed with result %s." % (cmd, result))
1156 shutil.rmtree(tmpdir)
1157 except Exception, e:
1158 print "foobar2 (%s)" % e
1159 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1161 ###########################################################################
1162 def ensure_hashes(self):
1163 # Make sure we recognise the format of the Files: field in the .changes
1164 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1165 if len(format) == 2:
1166 format = int(format[0]), int(format[1])
1168 format = int(float(format[0])), 0
1170 # We need to deal with the original changes blob, as the fields we need
1171 # might not be in the changes dict serialised into the .dak anymore.
1172 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1174 # Copy the checksums over to the current changes dict. This will keep
1175 # the existing modifications to it intact.
1176 for field in orig_changes:
1177 if field.startswith('checksums-'):
1178 self.pkg.changes[field] = orig_changes[field]
1180 # Check for unsupported hashes
1181 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1182 self.rejects.append(j)
1184 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1185 self.rejects.append(j)
1187 # We have to calculate the hash if we have an earlier changes version than
1188 # the hash appears in rather than require it exist in the changes file
1189 for hashname, hashfunc, version in utils.known_hashes:
1190 # TODO: Move _ensure_changes_hash into this class
1191 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1192 self.rejects.append(j)
1193 if "source" in self.pkg.changes["architecture"]:
1194 # TODO: Move _ensure_dsc_hash into this class
1195 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1196 self.rejects.append(j)
1198 def check_hashes(self):
1199 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1200 self.rejects.append(m)
1202 for m in utils.check_size(".changes", self.pkg.files):
1203 self.rejects.append(m)
1205 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1206 self.rejects.append(m)
1208 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1209 self.rejects.append(m)
1211 self.ensure_hashes()
1213 ###########################################################################
1214 def check_lintian(self):
1215 # Only check some distributions
1217 for dist in ('unstable', 'experimental'):
1218 if dist in self.pkg.changes['distribution']:
1225 self.ensure_all_source_exists()
1228 tagfile = cnf.get("Dinstall::LintianTags")
1230 # We don't have a tagfile, so just don't do anything.
1232 # Parse the yaml file
1233 sourcefile = file(tagfile, 'r')
1234 sourcecontent = sourcefile.read()
1237 lintiantags = yaml.load(sourcecontent)['lintian']
1238 except yaml.YAMLError, msg:
1239 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1242 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1243 # so put it together like it. We put all types of tags in one file and then sort
1244 # through lintians output later to see if its a fatal tag we detected, or not.
1245 # So we only run lintian once on all tags, even if we might reject on some, but not
1247 # Additionally build up a set of tags
1249 (fd, temp_filename) = utils.temp_filename()
1250 temptagfile = os.fdopen(fd, 'w')
1251 for tagtype in lintiantags:
1252 for tag in lintiantags[tagtype]:
1253 temptagfile.write("%s\n" % tag)
1257 # So now we should look at running lintian at the .changes file, capturing output
1259 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1260 (result, output) = commands.getstatusoutput(command)
1261 # We are done with lintian, remove our tempfile
1262 os.unlink(temp_filename)
1264 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1265 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1267 if len(output) == 0:
1272 args = [self.pkg.changes_file, "check_lintian"]
1274 self.logger.log(args)
1276 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1277 # are having a victim for a reject.
1278 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1279 for line in output.split('\n'):
1280 m = re_parse_lintian.match(line)
1285 epackage = m.group(2)
1289 # So lets check if we know the tag at all.
1290 if etag not in tags:
1294 # We know it and it is overriden. Check that override is allowed.
1295 if etag in lintiantags['warning']:
1296 # The tag is overriden, and it is allowed to be overriden.
1297 # Don't add a reject message.
1299 elif etag in lintiantags['error']:
1300 # The tag is overriden - but is not allowed to be
1301 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1302 log("overidden tag is overridden", etag)
1304 # Tag is known, it is not overriden, direct reject.
1305 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1306 log("auto rejecting", etag)
1307 # Now tell if they *might* override it.
1308 if etag in lintiantags['warning']:
1309 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1311 ###########################################################################
1312 def check_urgency(self):
1314 if self.pkg.changes["architecture"].has_key("source"):
1315 if not self.pkg.changes.has_key("urgency"):
1316 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1317 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1318 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1319 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1320 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1321 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1323 ###########################################################################
1325 # Sanity check the time stamps of files inside debs.
1326 # [Files in the near future cause ugly warnings and extreme time
1327 # travel can cause errors on extraction]
1329 def check_timestamps(self):
1332 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1333 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1334 tar = TarTime(future_cutoff, past_cutoff)
1336 for filename, entry in self.pkg.files.items():
1337 if entry["type"] == "deb":
1340 deb_file = utils.open_file(filename)
1341 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1344 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1345 except SystemError, e:
1346 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1347 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1350 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1354 future_files = tar.future_files.keys()
1356 num_future_files = len(future_files)
1357 future_file = future_files[0]
1358 future_date = tar.future_files[future_file]
1359 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1360 % (filename, num_future_files, future_file, time.ctime(future_date)))
1362 ancient_files = tar.ancient_files.keys()
1364 num_ancient_files = len(ancient_files)
1365 ancient_file = ancient_files[0]
1366 ancient_date = tar.ancient_files[ancient_file]
1367 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1368 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1370 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1372 ###########################################################################
1373 def check_transition(self, session):
1376 sourcepkg = self.pkg.changes["source"]
1378 # No sourceful upload -> no need to do anything else, direct return
1379 # We also work with unstable uploads, not experimental or those going to some
1380 # proposed-updates queue
1381 if "source" not in self.pkg.changes["architecture"] or \
1382 "unstable" not in self.pkg.changes["distribution"]:
1385 # Also only check if there is a file defined (and existant) with
1387 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1388 if transpath == "" or not os.path.exists(transpath):
1391 # Parse the yaml file
1392 sourcefile = file(transpath, 'r')
1393 sourcecontent = sourcefile.read()
1395 transitions = yaml.load(sourcecontent)
1396 except yaml.YAMLError, msg:
1397 # This shouldn't happen, there is a wrapper to edit the file which
1398 # checks it, but we prefer to be safe than ending up rejecting
1400 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1403 # Now look through all defined transitions
1404 for trans in transitions:
1405 t = transitions[trans]
1406 source = t["source"]
1409 # Will be None if nothing is in testing.
1410 current = get_source_in_suite(source, "testing", session)
1411 if current is not None:
1412 compare = apt_pkg.VersionCompare(current.version, expected)
1414 if current is None or compare < 0:
1415 # This is still valid, the current version in testing is older than
1416 # the new version we wait for, or there is none in testing yet
1418 # Check if the source we look at is affected by this.
1419 if sourcepkg in t['packages']:
1420 # The source is affected, lets reject it.
1422 rejectmsg = "%s: part of the %s transition.\n\n" % (
1425 if current is not None:
1426 currentlymsg = "at version %s" % (current.version)
1428 currentlymsg = "not present in testing"
1430 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1432 rejectmsg += "\n".join(textwrap.wrap("""Your package
1433 is part of a testing transition designed to get %s migrated (it is
1434 currently %s, we need version %s). This transition is managed by the
1435 Release Team, and %s is the Release-Team member responsible for it.
1436 Please mail debian-release@lists.debian.org or contact %s directly if you
1437 need further assistance. You might want to upload to experimental until this
1438 transition is done."""
1439 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1441 self.rejects.append(rejectmsg)
1444 ###########################################################################
1445 def check_signed_by_key(self):
1446 """Ensure the .changes is signed by an authorized uploader."""
1447 session = DBConn().session()
1449 self.check_transition(session)
1451 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1453 # match claimed name with actual name:
1455 # This is fundamentally broken but need us to refactor how we get
1456 # the UIDs/Fingerprints in order for us to fix it properly
1457 uid, uid_email = self.pkg.changes["fingerprint"], uid
1458 may_nmu, may_sponsor = 1, 1
1459 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1460 # and can't get one in there if we don't allow nmu/sponsorship
1461 elif is_dm is False:
1462 # If is_dm is False, we allow full upload rights
1463 uid_email = "%s@debian.org" % (uid)
1464 may_nmu, may_sponsor = 1, 1
1466 # Assume limited upload rights unless we've discovered otherwise
1468 may_nmu, may_sponsor = 0, 0
1470 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1472 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1474 if uid_name == "": sponsored = 1
1477 if ("source" in self.pkg.changes["architecture"] and
1478 uid_email and utils.is_email_alias(uid_email)):
1479 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1480 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1481 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1482 self.pkg.changes["sponsoremail"] = uid_email
1484 if sponsored and not may_sponsor:
1485 self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1487 if not sponsored and not may_nmu:
1488 should_reject = True
1489 highest_sid, highest_version = None, None
1491 # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1492 # It ignores higher versions with the dm_upload_allowed flag set to false
1493 # I'm keeping the existing behaviour for now until I've gone back and
1494 # checked exactly what the GR says - mhy
1495 for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1496 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1497 highest_sid = si.source_id
1498 highest_version = si.version
1500 if highest_sid is None:
1501 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1503 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1504 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1505 if email == uid_email or name == uid_name:
1506 should_reject = False
1509 if should_reject is True:
1510 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1512 for b in self.pkg.changes["binary"].keys():
1513 for suite in self.pkg.changes["distribution"].keys():
1514 q = session.query(DBSource)
1515 q = q.join(DBBinary).filter_by(package=b)
1516 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1519 if s.source != self.pkg.changes["source"]:
1520 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1522 for f in self.pkg.files.keys():
1523 if self.pkg.files[f].has_key("byhand"):
1524 self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1525 if self.pkg.files[f].has_key("new"):
1526 self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1530 ###########################################################################
1531 def build_summaries(self):
1532 """ Build a summary of changes the upload introduces. """
1534 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1536 short_summary = summary
1538 # This is for direport's benefit...
1539 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1542 summary += "Changes: " + f
1544 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1546 summary += self.announce(short_summary, 0)
1548 return (summary, short_summary)
1550 ###########################################################################
1552 def close_bugs(self, summary, action):
1554 Send mail to close bugs as instructed by the closes field in the changes file.
1555 Also add a line to summary if any work was done.
1557 @type summary: string
1558 @param summary: summary text, as given by L{build_summaries}
1561 @param action: Set to false no real action will be done.
1564 @return: summary. If action was taken, extended by the list of closed bugs.
1568 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1570 bugs = self.pkg.changes["closes"].keys()
1576 summary += "Closing bugs: "
1578 summary += "%s " % (bug)
1581 self.Subst["__BUG_NUMBER__"] = bug
1582 if self.pkg.changes["distribution"].has_key("stable"):
1583 self.Subst["__STABLE_WARNING__"] = """
1584 Note that this package is not part of the released stable Debian
1585 distribution. It may have dependencies on other unreleased software,
1586 or other instabilities. Please take care if you wish to install it.
1587 The update will eventually make its way into the next released Debian
1590 self.Subst["__STABLE_WARNING__"] = ""
1591 mail_message = utils.TemplateSubst(self.Subst, template)
1592 utils.send_mail(mail_message)
1594 # Clear up after ourselves
1595 del self.Subst["__BUG_NUMBER__"]
1596 del self.Subst["__STABLE_WARNING__"]
1598 if action and self.logger:
1599 self.logger.log(["closing bugs"] + bugs)
1605 ###########################################################################
1607 def announce(self, short_summary, action):
1609 Send an announce mail about a new upload.
1611 @type short_summary: string
1612 @param short_summary: Short summary text to include in the mail
1615 @param action: Set to false no real action will be done.
1618 @return: Textstring about action taken.
1623 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1625 # Only do announcements for source uploads with a recent dpkg-dev installed
1626 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1627 self.pkg.changes["architecture"].has_key("source"):
1633 self.Subst["__SHORT_SUMMARY__"] = short_summary
1635 for dist in self.pkg.changes["distribution"].keys():
1636 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1637 if announce_list == "" or lists_done.has_key(announce_list):
1640 lists_done[announce_list] = 1
1641 summary += "Announcing to %s\n" % (announce_list)
1645 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1646 if cnf.get("Dinstall::TrackingServer") and \
1647 self.pkg.changes["architecture"].has_key("source"):
1648 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1649 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1651 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1652 utils.send_mail(mail_message)
1654 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1656 if cnf.FindB("Dinstall::CloseBugs"):
1657 summary = self.close_bugs(summary, action)
1659 del self.Subst["__SHORT_SUMMARY__"]
1663 ###########################################################################
1665 def accept (self, summary, short_summary, targetdir=None):
1669 This moves all files referenced from the .changes into the I{accepted}
1670 queue, sends the accepted mail, announces to lists, closes bugs and
1671 also checks for override disparities. If enabled it will write out
1672 the version history for the BTS Version Tracking and will finally call
1675 @type summary: string
1676 @param summary: Summary text
1678 @type short_summary: string
1679 @param short_summary: Short summary
1684 stats = SummaryStats()
1686 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1688 if targetdir is None:
1689 targetdir = cnf["Dir::Queue::Accepted"]
1693 self.logger.log(["Accepting changes", self.pkg.changes_file])
1695 self.pkg.write_dot_dak(targetdir)
1697 # Move all the files into the accepted directory
1698 utils.move(self.pkg.changes_file, targetdir)
1700 for name, entry in sorted(self.pkg.files.items()):
1701 utils.move(name, targetdir)
1702 stats.accept_bytes += float(entry["size"])
1704 stats.accept_count += 1
1706 # Send accept mail, announce to lists, close bugs and check for
1707 # override disparities
1708 if not cnf["Dinstall::Options::No-Mail"]:
1710 self.Subst["__SUITE__"] = ""
1711 self.Subst["__SUMMARY__"] = summary
1712 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1713 utils.send_mail(mail_message)
1714 self.announce(short_summary, 1)
1716 ## Helper stuff for DebBugs Version Tracking
1717 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1718 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1719 # the conditionalization on dsc["bts changelog"] should be
1722 # Write out the version history from the changelog
1723 if self.pkg.changes["architecture"].has_key("source") and \
1724 self.pkg.dsc.has_key("bts changelog"):
1726 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1727 version_history = os.fdopen(fd, 'w')
1728 version_history.write(self.pkg.dsc["bts changelog"])
1729 version_history.close()
1730 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1731 self.pkg.changes_file[:-8]+".versions")
1732 os.rename(temp_filename, filename)
1733 os.chmod(filename, 0644)
1735 # Write out the binary -> source mapping.
1736 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1737 debinfo = os.fdopen(fd, 'w')
1738 for name, entry in sorted(self.pkg.files.items()):
1739 if entry["type"] == "deb":
1740 line = " ".join([entry["package"], entry["version"],
1741 entry["architecture"], entry["source package"],
1742 entry["source version"]])
1743 debinfo.write(line+"\n")
1745 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1746 self.pkg.changes_file[:-8]+".debinfo")
1747 os.rename(temp_filename, filename)
1748 os.chmod(filename, 0644)
1750 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1751 # <Ganneff> we do call queue_build too
1752 # <mhy> well yes, we'd have had to if we were inserting into accepted
1753 # <Ganneff> now. thats database only.
1754 # <mhy> urgh, that's going to get messy
1755 # <Ganneff> so i make the p-n call to it *also* using accepted/
1756 # <mhy> but then the packages will be in the queue_build table without the files being there
1757 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1758 # <mhy> ah, good point
1759 # <Ganneff> so it will work out, as unchecked move it over
1760 # <mhy> that's all completely sick
1763 # This routine returns None on success or an error on failure
1764 res = get_or_set_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1769 def check_override(self):
1771 Checks override entries for validity. Mails "Override disparity" warnings,
1772 if that feature is enabled.
1774 Abandons the check if
1775 - override disparity checks are disabled
1776 - mail sending is disabled
1781 # Abandon the check if:
1782 # a) override disparity checks have been disabled
1783 # b) we're not sending mail
1784 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1785 cnf["Dinstall::Options::No-Mail"]:
1788 summary = self.pkg.check_override()
1793 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1796 self.Subst["__SUMMARY__"] = summary
1797 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1798 utils.send_mail(mail_message)
1799 del self.Subst["__SUMMARY__"]
1801 ###########################################################################
1803 def remove(self, dir=None):
1805 Used (for instance) in p-u to remove the package from unchecked
1808 os.chdir(self.pkg.directory)
1812 for f in self.pkg.files.keys():
1814 os.unlink(self.pkg.changes_file)
1816 ###########################################################################
1818 def move_to_dir (self, dest, perms=0660, changesperms=0664):
1820 Move files to dest with certain perms/changesperms
1822 utils.move(self.pkg.changes_file, dest, perms=changesperms)
1823 for f in self.pkg.files.keys():
1824 utils.move(f, dest, perms=perms)
1826 ###########################################################################
1828 def force_reject(self, reject_files):
1830 Forcefully move files from the current directory to the
1831 reject directory. If any file already exists in the reject
1832 directory it will be moved to the morgue to make way for
1836 @param files: file dictionary
1842 for file_entry in reject_files:
1843 # Skip any files which don't exist or which we don't have permission to copy.
1844 if os.access(file_entry, os.R_OK) == 0:
1847 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1850 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1852 # File exists? Let's try and move it to the morgue
1853 if e.errno == errno.EEXIST:
1854 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1856 morgue_file = utils.find_next_free(morgue_file)
1857 except NoFreeFilenameError:
1858 # Something's either gone badly Pete Tong, or
1859 # someone is trying to exploit us.
1860 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1862 utils.move(dest_file, morgue_file, perms=0660)
1864 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1867 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1871 # If we got here, we own the destination file, so we can
1872 # safely overwrite it.
1873 utils.move(file_entry, dest_file, 1, perms=0660)
1876 ###########################################################################
1877 def do_reject (self, manual=0, reject_message="", note=""):
1879 Reject an upload. If called without a reject message or C{manual} is
1880 true, spawn an editor so the user can write one.
1883 @param manual: manual or automated rejection
1885 @type reject_message: string
1886 @param reject_message: A reject message
1891 # If we weren't given a manual rejection message, spawn an
1892 # editor so the user can add one in...
1893 if manual and not reject_message:
1894 (fd, temp_filename) = utils.temp_filename()
1895 temp_file = os.fdopen(fd, 'w')
1898 temp_file.write(line)
1900 editor = os.environ.get("EDITOR","vi")
1902 while answer == 'E':
1903 os.system("%s %s" % (editor, temp_filename))
1904 temp_fh = utils.open_file(temp_filename)
1905 reject_message = "".join(temp_fh.readlines())
1907 print "Reject message:"
1908 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
1909 prompt = "[R]eject, Edit, Abandon, Quit ?"
1911 while prompt.find(answer) == -1:
1912 answer = utils.our_raw_input(prompt)
1913 m = re_default_answer.search(prompt)
1916 answer = answer[:1].upper()
1917 os.unlink(temp_filename)
1923 print "Rejecting.\n"
1927 reason_filename = self.pkg.changes_file[:-8] + ".reason"
1928 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
1930 # Move all the files into the reject directory
1931 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
1932 self.force_reject(reject_files)
1934 # If we fail here someone is probably trying to exploit the race
1935 # so let's just raise an exception ...
1936 if os.path.exists(reason_filename):
1937 os.unlink(reason_filename)
1938 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1940 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
1944 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
1945 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
1946 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
1947 os.write(reason_fd, reject_message)
1948 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1950 # Build up the rejection email
1951 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
1952 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
1953 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
1954 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
1955 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
1956 # Write the rejection email out as the <foo>.reason file
1957 os.write(reason_fd, reject_mail_message)
1959 del self.Subst["__REJECTOR_ADDRESS__"]
1960 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
1961 del self.Subst["__CC__"]
1965 # Send the rejection mail if appropriate
1966 if not cnf["Dinstall::Options::No-Mail"]:
1967 utils.send_mail(reject_mail_message)
1970 self.logger.log(["rejected", self.pkg.changes_file])
1974 ################################################################################
1975 def in_override_p(self, package, component, suite, binary_type, file, session):
1977 Check if a package already has override entries in the DB
1979 @type package: string
1980 @param package: package name
1982 @type component: string
1983 @param component: database id of the component
1986 @param suite: database id of the suite
1988 @type binary_type: string
1989 @param binary_type: type of the package
1992 @param file: filename we check
1994 @return: the database result. But noone cares anyway.
2000 if binary_type == "": # must be source
2003 file_type = binary_type
2005 # Override suite name; used for example with proposed-updates
2006 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2007 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2009 result = get_override(package, suite, component, file_type, session)
2011 # If checking for a source package fall back on the binary override type
2012 if file_type == "dsc" and len(result) < 1:
2013 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2015 # Remember the section and priority so we can check them later if appropriate
2018 self.pkg.files[file]["override section"] = result.section.section
2019 self.pkg.files[file]["override priority"] = result.priority.priority
2024 ################################################################################
2025 def get_anyversion(self, sv_list, suite):
2028 @param sv_list: list of (suite, version) tuples to check
2031 @param suite: suite name
2037 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2038 for (s, v) in sv_list:
2039 if s in [ x.lower() for x in anysuite ]:
2040 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2045 ################################################################################
2047 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
2050 @param sv_list: list of (suite, version) tuples to check
2055 @type new_version: string
2056 @param new_version: XXX
2058 Ensure versions are newer than existing packages in target
2059 suites and that cross-suite version checking rules as
2060 set out in the conf file are satisfied.
2065 # Check versions for each target suite
2066 for target_suite in self.pkg.changes["distribution"].keys():
2067 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2068 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2070 # Enforce "must be newer than target suite" even if conffile omits it
2071 if target_suite not in must_be_newer_than:
2072 must_be_newer_than.append(target_suite)
2074 for (suite, existent_version) in sv_list:
2075 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2077 if suite in must_be_newer_than and sourceful and vercmp < 1:
2078 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2080 if suite in must_be_older_than and vercmp > -1:
2083 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2084 # we really use the other suite, ignoring the conflicting one ...
2085 addsuite = self.pkg.changes["distribution-version"][suite]
2087 add_version = self.get_anyversion(sv_list, addsuite)
2088 target_version = self.get_anyversion(sv_list, target_suite)
2091 # not add_version can only happen if we map to a suite
2092 # that doesn't enhance the suite we're propup'ing from.
2093 # so "propup-ver x a b c; map a d" is a problem only if
2094 # d doesn't enhance a.
2096 # i think we could always propagate in this case, rather
2097 # than complaining. either way, this isn't a REJECT issue
2099 # And - we really should complain to the dorks who configured dak
2100 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2101 self.pkg.changes.setdefault("propdistribution", {})
2102 self.pkg.changes["propdistribution"][addsuite] = 1
2104 elif not target_version:
2105 # not targets_version is true when the package is NEW
2106 # we could just stick with the "...old version..." REJECT
2107 # for this, I think.
2108 self.rejects.append("Won't propogate NEW packages.")
2109 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2110 # propogation would be redundant. no need to reject though.
2111 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2113 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2114 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2116 self.warnings.append("Propogating upload to %s" % (addsuite))
2117 self.pkg.changes.setdefault("propdistribution", {})
2118 self.pkg.changes["propdistribution"][addsuite] = 1
2122 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2124 ################################################################################
2125 def check_binary_against_db(self, file, session):
2126 # Ensure version is sane
2127 q = session.query(BinAssociation)
2128 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
2129 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
2131 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2132 file, self.pkg.files[file]["version"], sourceful=False)
2134 # Check for any existing copies of the file
2135 q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
2136 q = q.filter_by(version=self.pkg.files[file]["version"])
2137 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
2140 self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
2142 ################################################################################
2144 def check_source_against_db(self, file, session):
2147 source = self.pkg.dsc.get("source")
2148 version = self.pkg.dsc.get("version")
2150 # Ensure version is sane
2151 q = session.query(SrcAssociation)
2152 q = q.join(DBSource).filter(DBSource.source==source)
2154 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2155 file, version, sourceful=True)
2157 ################################################################################
2158 def check_dsc_against_db(self, file, session):
2161 @warning: NB: this function can remove entries from the 'files' index [if
2162 the orig tarball is a duplicate of the one in the archive]; if
2163 you're iterating over 'files' and call this function as part of
2164 the loop, be sure to add a check to the top of the loop to
2165 ensure you haven't just tried to dereference the deleted entry.
2170 self.pkg.orig_files = {} # XXX: do we need to clear it?
2171 orig_files = self.pkg.orig_files
2173 # Try and find all files mentioned in the .dsc. This has
2174 # to work harder to cope with the multiple possible
2175 # locations of an .orig.tar.gz.
2176 # The ordering on the select is needed to pick the newest orig
2177 # when it exists in multiple places.
2178 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2180 if self.pkg.files.has_key(dsc_name):
2181 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2182 actual_size = int(self.pkg.files[dsc_name]["size"])
2183 found = "%s in incoming" % (dsc_name)
2185 # Check the file does not already exist in the archive
2186 ql = get_poolfile_like_name(dsc_name, session)
2188 # Strip out anything that isn't '%s' or '/%s$'
2190 if not i.filename.endswith(dsc_name):
2193 # "[dak] has not broken them. [dak] has fixed a
2194 # brokenness. Your crappy hack exploited a bug in
2197 # "(Come on! I thought it was always obvious that
2198 # one just doesn't release different files with
2199 # the same name and version.)"
2200 # -- ajk@ on d-devel@l.d.o
2203 # Ignore exact matches for .orig.tar.gz
2205 if re_is_orig_source.match(dsc_name):
2207 if self.pkg.files.has_key(dsc_name) and \
2208 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2209 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2210 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2211 # TODO: Don't delete the entry, just mark it as not needed
2212 # This would fix the stupidity of changing something we often iterate over
2213 # whilst we're doing it
2214 del self.pkg.files[dsc_name]
2215 if not orig_files.has_key(dsc_name):
2216 orig_files[dsc_name] = {}
2217 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2221 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2223 elif re_is_orig_source.match(dsc_name):
2225 ql = get_poolfile_like_name(dsc_name, session)
2227 # Strip out anything that isn't '%s' or '/%s$'
2228 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2230 if not i.filename.endswith(dsc_name):
2234 # Unfortunately, we may get more than one match here if,
2235 # for example, the package was in potato but had an -sa
2236 # upload in woody. So we need to choose the right one.
2238 # default to something sane in case we don't match any or have only one
2243 old_file = os.path.join(i.location.path, i.filename)
2244 old_file_fh = utils.open_file(old_file)
2245 actual_md5 = apt_pkg.md5sum(old_file_fh)
2247 actual_size = os.stat(old_file)[stat.ST_SIZE]
2248 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2251 old_file = os.path.join(i.location.path, i.filename)
2252 old_file_fh = utils.open_file(old_file)
2253 actual_md5 = apt_pkg.md5sum(old_file_fh)
2255 actual_size = os.stat(old_file)[stat.ST_SIZE]
2257 suite_type = x.location.archive_type
2258 # need this for updating dsc_files in install()
2259 dsc_entry["files id"] = x.file_id
2260 # See install() in process-accepted...
2261 if not orig_files.has_key(dsc_name):
2262 orig_files[dsc_name] = {}
2263 orig_files[dsc_name]["id"] = x.file_id
2264 orig_files[dsc_name]["path"] = old_file
2265 orig_files[dsc_name]["location"] = x.location.location_id
2267 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2268 # Not there? Check the queue directories...
2269 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2270 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2272 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2273 if os.path.exists(in_otherdir):
2274 in_otherdir_fh = utils.open_file(in_otherdir)
2275 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2276 in_otherdir_fh.close()
2277 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2279 if not orig_files.has_key(dsc_name):
2280 orig_files[dsc_name] = {}
2281 orig_files[dsc_name]["path"] = in_otherdir
2284 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2287 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2289 if actual_md5 != dsc_entry["md5sum"]:
2290 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2291 if actual_size != int(dsc_entry["size"]):
2292 self.rejects.append("size for %s doesn't match %s." % (found, file))
2294 ################################################################################
2295 def accepted_checks(self, overwrite_checks, session):
2296 # Recheck anything that relies on the database; since that's not
2297 # frozen between accept and our run time when called from p-a.
2299 # overwrite_checks is set to False when installing to stable/oldstable
2304 # Find the .dsc (again)
2306 for f in self.pkg.files.keys():
2307 if self.pkg.files[f]["type"] == "dsc":
2310 for checkfile in self.pkg.files.keys():
2311 # The .orig.tar.gz can disappear out from under us is it's a
2312 # duplicate of one in the archive.
2313 if not self.pkg.files.has_key(checkfile):
2316 entry = self.pkg.files[checkfile]
2318 # Check that the source still exists
2319 if entry["type"] == "deb":
2320 source_version = entry["source version"]
2321 source_package = entry["source package"]
2322 if not self.pkg.changes["architecture"].has_key("source") \
2323 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2324 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2326 # Version and file overwrite checks
2327 if overwrite_checks:
2328 if entry["type"] == "deb":
2329 self.check_binary_against_db(checkfile, session)
2330 elif entry["type"] == "dsc":
2331 self.check_source_against_db(checkfile, session)
2332 self.check_dsc_against_db(dsc_filename, session)
2334 # propogate in the case it is in the override tables:
2335 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2336 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2337 propogate[suite] = 1
2339 nopropogate[suite] = 1
2341 for suite in propogate.keys():
2342 if suite in nopropogate:
2344 self.pkg.changes["distribution"][suite] = 1
2346 for checkfile in self.pkg.files.keys():
2347 # Check the package is still in the override tables
2348 for suite in self.pkg.changes["distribution"].keys():
2349 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2350 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2352 ################################################################################
2353 # This is not really a reject, but an unaccept, but since a) the code for
2354 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2355 # extremely rare, for now we'll go with whining at our admin folks...
2357 def do_unaccept(self):
2361 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2362 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2363 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2364 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2365 if cnf.has_key("Dinstall::Bcc"):
2366 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2368 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2370 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2372 # Write the rejection email out as the <foo>.reason file
2373 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2374 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2376 # If we fail here someone is probably trying to exploit the race
2377 # so let's just raise an exception ...
2378 if os.path.exists(reject_filename):
2379 os.unlink(reject_filename)
2381 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2382 os.write(fd, reject_mail_message)
2385 utils.send_mail(reject_mail_message)
2387 del self.Subst["__REJECTOR_ADDRESS__"]
2388 del self.Subst["__REJECT_MESSAGE__"]
2389 del self.Subst["__CC__"]
2391 ################################################################################
2392 # If any file of an upload has a recent mtime then chances are good
2393 # the file is still being uploaded.
2395 def upload_too_new(self):
2398 # Move back to the original directory to get accurate time stamps
2400 os.chdir(self.pkg.directory)
2401 file_list = self.pkg.files.keys()
2402 file_list.extend(self.pkg.dsc_files.keys())
2403 file_list.append(self.pkg.changes_file)
2406 last_modified = time.time()-os.path.getmtime(f)
2407 if last_modified < int(cnf["Dinstall::SkipTime"]):