5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
52 from summarystats import SummaryStats
53 from utils import parse_changes, check_dsc_files
54 from textutils import fix_maintainer
55 from binary import Binary
57 ###############################################################################
59 def get_type(f, session):
61 Get the file type of C{f}
64 @param f: file entry from Changes object
66 @type session: SQLA Session
67 @param session: SQL Alchemy session object
74 if f.has_key("dbtype"):
75 file_type = f["dbtype"]
76 elif re_source_ext.match(f["type"]):
79 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
81 # Validate the override type
82 type_id = get_override_type(file_type, session)
84 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
88 ################################################################################
90 # Determine what parts in a .changes are NEW
92 def determine_new(changes, files, warn=1):
94 Determine what parts in a C{changes} file are NEW.
96 @type changes: Upload.Pkg.changes dict
97 @param changes: Changes dictionary
99 @type files: Upload.Pkg.files dict
100 @param files: Files dictionary
103 @param warn: Warn if overrides are added for (old)stable
106 @return: dictionary of NEW components.
111 session = DBConn().session()
113 # Build up a list of potentially new things
114 for name, f in files.items():
115 # Skip byhand elements
116 if f["type"] == "byhand":
119 priority = f["priority"]
120 section = f["section"]
121 file_type = get_type(f, session)
122 component = f["component"]
124 if file_type == "dsc":
127 if not new.has_key(pkg):
129 new[pkg]["priority"] = priority
130 new[pkg]["section"] = section
131 new[pkg]["type"] = file_type
132 new[pkg]["component"] = component
133 new[pkg]["files"] = []
135 old_type = new[pkg]["type"]
136 if old_type != file_type:
137 # source gets trumped by deb or udeb
138 if old_type == "dsc":
139 new[pkg]["priority"] = priority
140 new[pkg]["section"] = section
141 new[pkg]["type"] = file_type
142 new[pkg]["component"] = component
144 new[pkg]["files"].append(name)
146 if f.has_key("othercomponents"):
147 new[pkg]["othercomponents"] = f["othercomponents"]
149 for suite in changes["suite"].keys():
150 for pkg in new.keys():
151 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
153 for file_entry in new[pkg]["files"]:
154 if files[file_entry].has_key("new"):
155 del files[file_entry]["new"]
159 for s in ['stable', 'oldstable']:
160 if changes["suite"].has_key(s):
161 print "WARNING: overrides will be added for %s!" % s
162 for pkg in new.keys():
163 if new[pkg].has_key("othercomponents"):
164 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
170 ################################################################################
172 def check_valid(new):
174 Check if section and priority for NEW packages exist in database.
175 Additionally does sanity checks:
176 - debian-installer packages have to be udeb (or source)
177 - non debian-installer packages can not be udeb
178 - source priority can only be assigned to dsc file types
181 @param new: Dict of new packages with their section, priority and type.
184 for pkg in new.keys():
185 section_name = new[pkg]["section"]
186 priority_name = new[pkg]["priority"]
187 file_type = new[pkg]["type"]
189 section = get_section(section_name)
191 new[pkg]["section id"] = -1
193 new[pkg]["section id"] = section.section_id
195 priority = get_priority(priority_name)
197 new[pkg]["priority id"] = -1
199 new[pkg]["priority id"] = priority.priority_id
202 di = section_name.find("debian-installer") != -1
204 # If d-i, we must be udeb and vice-versa
205 if (di and file_type not in ("udeb", "dsc")) or \
206 (not di and file_type == "udeb"):
207 new[pkg]["section id"] = -1
209 # If dsc we need to be source and vice-versa
210 if (priority == "source" and file_type != "dsc") or \
211 (priority != "source" and file_type == "dsc"):
212 new[pkg]["priority id"] = -1
214 ###############################################################################
216 def lookup_uid_from_fingerprint(fpr, session):
219 # This is a stupid default, but see the comments below
222 user = get_uid_from_fingerprint(fpr, session)
226 if user.name is None:
231 # Check the relevant fingerprint (which we have to have)
232 for f in user.fingerprint:
233 if f.fingerprint == fpr:
234 is_dm = f.keyring.debian_maintainer
237 return (uid, uid_name, is_dm)
239 ###############################################################################
241 # Used by Upload.check_timestamps
242 class TarTime(object):
243 def __init__(self, future_cutoff, past_cutoff):
245 self.future_cutoff = future_cutoff
246 self.past_cutoff = past_cutoff
249 self.future_files = {}
250 self.ancient_files = {}
252 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
253 if MTime > self.future_cutoff:
254 self.future_files[Name] = MTime
255 if MTime < self.past_cutoff:
256 self.ancient_files[Name] = MTime
258 ###############################################################################
260 class Upload(object):
262 Everything that has to do with an upload processed.
270 ###########################################################################
273 """ Reset a number of internal variables."""
275 # Initialize the substitution template map
278 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
279 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
280 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
281 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
289 def package_info(self):
292 if len(self.rejects) > 0:
293 msg += "Reject Reasons:\n"
294 msg += "\n".join(self.rejects)
296 if len(self.warnings) > 0:
298 msg += "\n".join(self.warnings)
300 if len(self.notes) > 0:
302 msg += "\n".join(self.notes)
306 ###########################################################################
307 def update_subst(self):
308 """ Set up the per-package template substitution mappings """
312 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
313 if not self.pkg.changes.has_key("architecture") or not \
314 isinstance(self.pkg.changes["architecture"], DictType):
315 self.pkg.changes["architecture"] = { "Unknown" : "" }
317 # and maintainer2047 may not exist.
318 if not self.pkg.changes.has_key("maintainer2047"):
319 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
321 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
322 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
323 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
325 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
326 if self.pkg.changes["architecture"].has_key("source") and \
327 self.pkg.changes["changedby822"] != "" and \
328 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
330 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
331 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
332 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
334 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
335 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
336 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
338 if "sponsoremail" in self.pkg.changes:
339 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
341 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
342 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
344 # Apply any global override of the Maintainer field
345 if cnf.get("Dinstall::OverrideMaintainer"):
346 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
347 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
349 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
350 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
351 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
353 ###########################################################################
354 def load_changes(self, filename):
357 @rvalue: whether the changes file was valid or not. We may want to
358 reject even if this is True (see what gets put in self.rejects).
359 This is simply to prevent us even trying things later which will
360 fail because we couldn't properly parse the file.
363 self.pkg.changes_file = filename
365 # Parse the .changes field into a dictionary
367 self.pkg.changes.update(parse_changes(filename))
368 except CantOpenError:
369 self.rejects.append("%s: can't read file." % (filename))
371 except ParseChangesError, line:
372 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
374 except ChangesUnicodeError:
375 self.rejects.append("%s: changes file not proper utf-8" % (filename))
378 # Parse the Files field from the .changes into another dictionary
380 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
381 except ParseChangesError, line:
382 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
384 except UnknownFormatError, format:
385 self.rejects.append("%s: unknown format '%s'." % (filename, format))
388 # Check for mandatory fields
389 for i in ("distribution", "source", "binary", "architecture",
390 "version", "maintainer", "files", "changes", "description"):
391 if not self.pkg.changes.has_key(i):
392 # Avoid undefined errors later
393 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
396 # Strip a source version in brackets from the source field
397 if re_strip_srcver.search(self.pkg.changes["source"]):
398 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
400 # Ensure the source field is a valid package name.
401 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
402 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
404 # Split multi-value fields into a lower-level dictionary
405 for i in ("architecture", "distribution", "binary", "closes"):
406 o = self.pkg.changes.get(i, "")
408 del self.pkg.changes[i]
410 self.pkg.changes[i] = {}
413 self.pkg.changes[i][j] = 1
415 # Fix the Maintainer: field to be RFC822/2047 compatible
417 (self.pkg.changes["maintainer822"],
418 self.pkg.changes["maintainer2047"],
419 self.pkg.changes["maintainername"],
420 self.pkg.changes["maintaineremail"]) = \
421 fix_maintainer (self.pkg.changes["maintainer"])
422 except ParseMaintError, msg:
423 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
424 % (filename, changes["maintainer"], msg))
426 # ...likewise for the Changed-By: field if it exists.
428 (self.pkg.changes["changedby822"],
429 self.pkg.changes["changedby2047"],
430 self.pkg.changes["changedbyname"],
431 self.pkg.changes["changedbyemail"]) = \
432 fix_maintainer (self.pkg.changes.get("changed-by", ""))
433 except ParseMaintError, msg:
434 self.pkg.changes["changedby822"] = ""
435 self.pkg.changes["changedby2047"] = ""
436 self.pkg.changes["changedbyname"] = ""
437 self.pkg.changes["changedbyemail"] = ""
439 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
440 % (filename, changes["changed-by"], msg))
442 # Ensure all the values in Closes: are numbers
443 if self.pkg.changes.has_key("closes"):
444 for i in self.pkg.changes["closes"].keys():
445 if re_isanum.match (i) == None:
446 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
448 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
449 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
450 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
452 # Check there isn't already a changes file of the same name in one
453 # of the queue directories.
454 base_filename = os.path.basename(filename)
455 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
456 if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
457 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
459 # Check the .changes is non-empty
460 if not self.pkg.files:
461 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
464 # Changes was syntactically valid even if we'll reject
467 ###########################################################################
469 def check_distributions(self):
470 "Check and map the Distribution field"
474 # Handle suite mappings
475 for m in Cnf.ValueList("SuiteMappings"):
478 if mtype == "map" or mtype == "silent-map":
479 (source, dest) = args[1:3]
480 if self.pkg.changes["distribution"].has_key(source):
481 del self.pkg.changes["distribution"][source]
482 self.pkg.changes["distribution"][dest] = 1
483 if mtype != "silent-map":
484 self.notes.append("Mapping %s to %s." % (source, dest))
485 if self.pkg.changes.has_key("distribution-version"):
486 if self.pkg.changes["distribution-version"].has_key(source):
487 self.pkg.changes["distribution-version"][source]=dest
488 elif mtype == "map-unreleased":
489 (source, dest) = args[1:3]
490 if self.pkg.changes["distribution"].has_key(source):
491 for arch in self.pkg.changes["architecture"].keys():
492 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
493 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
494 del self.pkg.changes["distribution"][source]
495 self.pkg.changes["distribution"][dest] = 1
497 elif mtype == "ignore":
499 if self.pkg.changes["distribution"].has_key(suite):
500 del self.pkg.changes["distribution"][suite]
501 self.warnings.append("Ignoring %s as a target suite." % (suite))
502 elif mtype == "reject":
504 if self.pkg.changes["distribution"].has_key(suite):
505 self.rejects.append("Uploads to %s are not accepted." % (suite))
506 elif mtype == "propup-version":
507 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
509 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
510 if self.pkg.changes["distribution"].has_key(args[1]):
511 self.pkg.changes.setdefault("distribution-version", {})
512 for suite in args[2:]:
513 self.pkg.changes["distribution-version"][suite] = suite
515 # Ensure there is (still) a target distribution
516 if len(self.pkg.changes["distribution"].keys()) < 1:
517 self.rejects.append("No valid distribution remaining.")
519 # Ensure target distributions exist
520 for suite in self.pkg.changes["distribution"].keys():
521 if not Cnf.has_key("Suite::%s" % (suite)):
522 self.rejects.append("Unknown distribution `%s'." % (suite))
524 ###########################################################################
526 def binary_file_checks(self, f, session):
528 entry = self.pkg.files[f]
530 # Extract package control information
531 deb_file = utils.open_file(f)
533 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
535 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
537 # Can't continue, none of the checks on control would work.
540 # Check for mandantory "Description:"
543 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
545 self.rejects.append("%s: Missing Description in binary package" % (f))
550 # Check for mandatory fields
551 for field in [ "Package", "Architecture", "Version" ]:
552 if control.Find(field) == None:
554 self.rejects.append("%s: No %s field in control." % (f, field))
557 # Ensure the package name matches the one give in the .changes
558 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
559 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
561 # Validate the package field
562 package = control.Find("Package")
563 if not re_valid_pkg_name.match(package):
564 self.rejects.append("%s: invalid package name '%s'." % (f, package))
566 # Validate the version field
567 version = control.Find("Version")
568 if not re_valid_version.match(version):
569 self.rejects.append("%s: invalid version number '%s'." % (f, version))
571 # Ensure the architecture of the .deb is one we know about.
572 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
573 architecture = control.Find("Architecture")
574 upload_suite = self.pkg.changes["distribution"].keys()[0]
576 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
577 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
578 self.rejects.append("Unknown architecture '%s'." % (architecture))
580 # Ensure the architecture of the .deb is one of the ones
581 # listed in the .changes.
582 if not self.pkg.changes["architecture"].has_key(architecture):
583 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
585 # Sanity-check the Depends field
586 depends = control.Find("Depends")
588 self.rejects.append("%s: Depends field is empty." % (f))
590 # Sanity-check the Provides field
591 provides = control.Find("Provides")
593 provide = re_spacestrip.sub('', provides)
595 self.rejects.append("%s: Provides field is empty." % (f))
596 prov_list = provide.split(",")
597 for prov in prov_list:
598 if not re_valid_pkg_name.match(prov):
599 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
601 # Check the section & priority match those given in the .changes (non-fatal)
602 if control.Find("Section") and entry["section"] != "" \
603 and entry["section"] != control.Find("Section"):
604 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
605 (f, control.Find("Section", ""), entry["section"]))
606 if control.Find("Priority") and entry["priority"] != "" \
607 and entry["priority"] != control.Find("Priority"):
608 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
609 (f, control.Find("Priority", ""), entry["priority"]))
611 entry["package"] = package
612 entry["architecture"] = architecture
613 entry["version"] = version
614 entry["maintainer"] = control.Find("Maintainer", "")
616 if f.endswith(".udeb"):
617 self.pkg.files[f]["dbtype"] = "udeb"
618 elif f.endswith(".deb"):
619 self.pkg.files[f]["dbtype"] = "deb"
621 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
623 entry["source"] = control.Find("Source", entry["package"])
625 # Get the source version
626 source = entry["source"]
629 if source.find("(") != -1:
630 m = re_extract_src_version.match(source)
632 source_version = m.group(2)
634 if not source_version:
635 source_version = self.pkg.files[f]["version"]
637 entry["source package"] = source
638 entry["source version"] = source_version
640 # Ensure the filename matches the contents of the .deb
641 m = re_isadeb.match(f)
644 file_package = m.group(1)
645 if entry["package"] != file_package:
646 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
647 (f, file_package, entry["dbtype"], entry["package"]))
648 epochless_version = re_no_epoch.sub('', control.Find("Version"))
651 file_version = m.group(2)
652 if epochless_version != file_version:
653 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
654 (f, file_version, entry["dbtype"], epochless_version))
657 file_architecture = m.group(3)
658 if entry["architecture"] != file_architecture:
659 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
660 (f, file_architecture, entry["dbtype"], entry["architecture"]))
662 # Check for existent source
663 source_version = entry["source version"]
664 source_package = entry["source package"]
665 if self.pkg.changes["architecture"].has_key("source"):
666 if source_version != self.pkg.changes["version"]:
667 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
668 (source_version, f, self.pkg.changes["version"]))
670 # Check in the SQL database
671 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
672 # Check in one of the other directories
673 source_epochless_version = re_no_epoch.sub('', source_version)
674 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
675 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
677 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
680 dsc_file_exists = False
681 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
682 if cnf.has_key("Dir::Queue::%s" % (myq)):
683 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
684 dsc_file_exists = True
687 if not dsc_file_exists:
688 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
690 # Check the version and for file overwrites
691 self.check_binary_against_db(f, session)
693 # Temporarily disable contents generation until we change the table storage layout
696 #if len(b.rejects) > 0:
697 # for j in b.rejects:
698 # self.rejects.append(j)
700 def source_file_checks(self, f, session):
701 entry = self.pkg.files[f]
703 m = re_issource.match(f)
707 entry["package"] = m.group(1)
708 entry["version"] = m.group(2)
709 entry["type"] = m.group(3)
711 # Ensure the source package name matches the Source filed in the .changes
712 if self.pkg.changes["source"] != entry["package"]:
713 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
715 # Ensure the source version matches the version in the .changes file
716 if re_is_orig_source.match(f):
717 changes_version = self.pkg.changes["chopversion2"]
719 changes_version = self.pkg.changes["chopversion"]
721 if changes_version != entry["version"]:
722 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
724 # Ensure the .changes lists source in the Architecture field
725 if not self.pkg.changes["architecture"].has_key("source"):
726 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
728 # Check the signature of a .dsc file
729 if entry["type"] == "dsc":
730 # check_signature returns either:
731 # (None, [list, of, rejects]) or (signature, [])
732 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
734 self.rejects.append(j)
736 entry["architecture"] = "source"
738 def per_suite_file_checks(self, f, suite, session):
740 entry = self.pkg.files[f]
741 archive = utils.where_am_i()
744 if entry.has_key("byhand"):
747 # Check we have fields we need to do these checks
749 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
750 if not entry.has_key(m):
751 self.rejects.append("file '%s' does not have field %s set" % (f, m))
757 # Handle component mappings
758 for m in cnf.ValueList("ComponentMappings"):
759 (source, dest) = m.split()
760 if entry["component"] == source:
761 entry["original component"] = source
762 entry["component"] = dest
764 # Ensure the component is valid for the target suite
765 if cnf.has_key("Suite:%s::Components" % (suite)) and \
766 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
767 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
770 # Validate the component
771 if not get_component(entry["component"], session):
772 self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
775 # See if the package is NEW
776 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
779 # Validate the priority
780 if entry["priority"].find('/') != -1:
781 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
783 # Determine the location
784 location = cnf["Dir::Pool"]
785 l = get_location(location, entry["component"], archive, session)
787 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
788 entry["location id"] = -1
790 entry["location id"] = l.location_id
792 # Check the md5sum & size against existing files (if any)
793 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
795 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
796 entry["size"], entry["md5sum"], entry["location id"])
799 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
800 elif found is False and poolfile is not None:
801 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
804 entry["files id"] = None
806 entry["files id"] = poolfile.file_id
808 # Check for packages that have moved from one component to another
809 entry['suite'] = suite
810 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
812 entry["othercomponents"] = res.fetchone()[0]
814 def check_files(self, action=True):
815 archive = utils.where_am_i()
816 file_keys = self.pkg.files.keys()
820 # XXX: As far as I can tell, this can no longer happen - see
821 # comments by AJ in old revisions - mhy
822 # if reprocess is 2 we've already done this and we're checking
823 # things again for the new .orig.tar.gz.
824 # [Yes, I'm fully aware of how disgusting this is]
825 if action and self.reprocess < 2:
827 os.chdir(self.pkg.directory)
829 ret = holding.copy_to_holding(f)
831 # XXX: Should we bail out here or try and continue?
832 self.rejects.append(ret)
836 # Check there isn't already a .changes or .dak file of the same name in
837 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
838 # [NB: this check must be done post-suite mapping]
839 base_filename = os.path.basename(self.pkg.changes_file)
840 dot_dak_filename = base_filename[:-8] + ".dak"
842 for suite in self.pkg.changes["distribution"].keys():
843 copychanges = "Suite::%s::CopyChanges" % (suite)
844 if cnf.has_key(copychanges) and \
845 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
846 self.rejects.append("%s: a file with this name already exists in %s" \
847 % (base_filename, cnf[copychanges]))
849 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
850 if cnf.has_key(copy_dot_dak) and \
851 os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
852 self.rejects.append("%s: a file with this name already exists in %s" \
853 % (dot_dak_filename, Cnf[copy_dot_dak]))
859 session = DBConn().session()
861 for f, entry in self.pkg.files.items():
862 # Ensure the file does not already exist in one of the accepted directories
863 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
864 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
865 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
866 self.rejects.append("%s file already exists in the %s directory." % (f, d))
868 if not re_taint_free.match(f):
869 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
871 # Check the file is readable
872 if os.access(f, os.R_OK) == 0:
873 # When running in -n, copy_to_holding() won't have
874 # generated the reject_message, so we need to.
876 if os.path.exists(f):
877 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
879 self.rejects.append("Can't read `%s'. [file not found]" % (f))
880 entry["type"] = "unreadable"
883 # If it's byhand skip remaining checks
884 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
886 entry["type"] = "byhand"
888 # Checks for a binary package...
889 elif re_isadeb.match(f):
891 entry["type"] = "deb"
893 # This routine appends to self.rejects/warnings as appropriate
894 self.binary_file_checks(f, session)
896 # Checks for a source package...
897 elif re_issource.match(f):
900 # This routine appends to self.rejects/warnings as appropriate
901 self.source_file_checks(f, session)
903 # Not a binary or source package? Assume byhand...
906 entry["type"] = "byhand"
908 # Per-suite file checks
909 entry["oldfiles"] = {}
910 for suite in self.pkg.changes["distribution"].keys():
911 self.per_suite_file_checks(f, suite, session)
915 # If the .changes file says it has source, it must have source.
916 if self.pkg.changes["architecture"].has_key("source"):
918 self.rejects.append("no source found and Architecture line in changes mention source.")
920 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
921 self.rejects.append("source only uploads are not supported.")
923 ###########################################################################
924 def check_dsc(self, action=True, session=None):
925 """Returns bool indicating whether or not the source changes are valid"""
926 # Ensure there is source to check
927 if not self.pkg.changes["architecture"].has_key("source"):
932 for f, entry in self.pkg.files.items():
933 if entry["type"] == "dsc":
935 self.rejects.append("can not process a .changes file with multiple .dsc's.")
940 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
942 self.rejects.append("source uploads must contain a dsc file")
945 # Parse the .dsc file
947 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
948 except CantOpenError:
949 # if not -n copy_to_holding() will have done this for us...
951 self.rejects.append("%s: can't read file." % (dsc_filename))
952 except ParseChangesError, line:
953 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
954 except InvalidDscError, line:
955 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
956 except ChangesUnicodeError:
957 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
959 # Build up the file list of files mentioned by the .dsc
961 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
962 except NoFilesFieldError:
963 self.rejects.append("%s: no Files: field." % (dsc_filename))
965 except UnknownFormatError, format:
966 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
968 except ParseChangesError, line:
969 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
972 # Enforce mandatory fields
973 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
974 if not self.pkg.dsc.has_key(i):
975 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
978 # Validate the source and version fields
979 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
980 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
981 if not re_valid_version.match(self.pkg.dsc["version"]):
982 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
984 # Only a limited list of source formats are allowed in each suite
985 for dist in self.pkg.changes["distribution"].keys():
986 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
987 if self.pkg.dsc["format"] not in allowed:
988 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
990 # Validate the Maintainer field
992 # We ignore the return value
993 fix_maintainer(self.pkg.dsc["maintainer"])
994 except ParseMaintError, msg:
995 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
996 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
998 # Validate the build-depends field(s)
999 for field_name in [ "build-depends", "build-depends-indep" ]:
1000 field = self.pkg.dsc.get(field_name)
1002 # Have apt try to parse them...
1004 apt_pkg.ParseSrcDepends(field)
1006 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1008 # Ensure the version number in the .dsc matches the version number in the .changes
1009 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1010 changes_version = self.pkg.files[dsc_filename]["version"]
1012 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1013 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1015 # Ensure the Files field contain only what's expected
1016 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1018 # Ensure source is newer than existing source in target suites
1019 session = DBConn().session()
1020 self.check_source_against_db(dsc_filename, session)
1021 self.check_dsc_against_db(dsc_filename, session)
1026 ###########################################################################
1028 def get_changelog_versions(self, source_dir):
1029 """Extracts a the source package and (optionally) grabs the
1030 version history out of debian/changelog for the BTS."""
1034 # Find the .dsc (again)
1036 for f in self.pkg.files.keys():
1037 if self.pkg.files[f]["type"] == "dsc":
1040 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1041 if not dsc_filename:
1044 # Create a symlink mirror of the source files in our temporary directory
1045 for f in self.pkg.files.keys():
1046 m = re_issource.match(f)
1048 src = os.path.join(source_dir, f)
1049 # If a file is missing for whatever reason, give up.
1050 if not os.path.exists(src):
1053 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1054 self.pkg.orig_files[f].has_key("path"):
1056 dest = os.path.join(os.getcwd(), f)
1057 os.symlink(src, dest)
1059 # If the orig files are not a part of the upload, create symlinks to the
1061 for orig_file in self.pkg.orig_files.keys():
1062 if not self.pkg.orig_files[orig_file].has_key("path"):
1064 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1065 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1067 # Extract the source
1068 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1069 (result, output) = commands.getstatusoutput(cmd)
1071 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1072 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1075 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1078 # Get the upstream version
1079 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1080 if re_strip_revision.search(upstr_version):
1081 upstr_version = re_strip_revision.sub('', upstr_version)
1083 # Ensure the changelog file exists
1084 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1085 if not os.path.exists(changelog_filename):
1086 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1089 # Parse the changelog
1090 self.pkg.dsc["bts changelog"] = ""
1091 changelog_file = utils.open_file(changelog_filename)
1092 for line in changelog_file.readlines():
1093 m = re_changelog_versions.match(line)
1095 self.pkg.dsc["bts changelog"] += line
1096 changelog_file.close()
1098 # Check we found at least one revision in the changelog
1099 if not self.pkg.dsc["bts changelog"]:
1100 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1102 def check_source(self):
1103 # XXX: I'm fairly sure reprocess == 2 can never happen
1104 # AJT disabled the is_incoming check years ago - mhy
1105 # We should probably scrap or rethink the whole reprocess thing
1107 # a) there's no source
1108 # or b) reprocess is 2 - we will do this check next time when orig
1109 # tarball is in 'files'
1110 # or c) the orig files are MIA
1111 if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1112 or len(self.pkg.orig_files) == 0:
1115 tmpdir = utils.temp_dirname()
1117 # Move into the temporary directory
1121 # Get the changelog version history
1122 self.get_changelog_versions(cwd)
1124 # Move back and cleanup the temporary tree
1128 shutil.rmtree(tmpdir)
1130 if e.errno != errno.EACCES:
1132 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1134 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1135 # We probably have u-r or u-w directories so chmod everything
1137 cmd = "chmod -R u+rwx %s" % (tmpdir)
1138 result = os.system(cmd)
1140 utils.fubar("'%s' failed with result %s." % (cmd, result))
1141 shutil.rmtree(tmpdir)
1142 except Exception, e:
1143 print "foobar2 (%s)" % e
1144 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1146 ###########################################################################
1147 def ensure_hashes(self):
1148 # Make sure we recognise the format of the Files: field in the .changes
1149 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1150 if len(format) == 2:
1151 format = int(format[0]), int(format[1])
1153 format = int(float(format[0])), 0
1155 # We need to deal with the original changes blob, as the fields we need
1156 # might not be in the changes dict serialised into the .dak anymore.
1157 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1159 # Copy the checksums over to the current changes dict. This will keep
1160 # the existing modifications to it intact.
1161 for field in orig_changes:
1162 if field.startswith('checksums-'):
1163 self.pkg.changes[field] = orig_changes[field]
1165 # Check for unsupported hashes
1166 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1167 self.rejects.append(j)
1169 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1170 self.rejects.append(j)
1172 # We have to calculate the hash if we have an earlier changes version than
1173 # the hash appears in rather than require it exist in the changes file
1174 for hashname, hashfunc, version in utils.known_hashes:
1175 # TODO: Move _ensure_changes_hash into this class
1176 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1177 self.rejects.append(j)
1178 if "source" in self.pkg.changes["architecture"]:
1179 # TODO: Move _ensure_dsc_hash into this class
1180 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1181 self.rejects.append(j)
1183 def check_hashes(self):
1184 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1185 self.rejects.append(m)
1187 for m in utils.check_size(".changes", self.pkg.files):
1188 self.rejects.append(m)
1190 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1191 self.rejects.append(m)
1193 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1194 self.rejects.append(m)
1196 self.ensure_hashes()
1198 ###########################################################################
1200 def ensure_orig(self, target_dir='.', session=None):
1202 Ensures that all orig files mentioned in the changes file are present
1203 in target_dir. If they do not exist, they are symlinked into place.
1205 An list containing the symlinks that were created are returned (so they
1212 for filename, entry in self.pkg.dsc_files.iteritems():
1213 if not re_is_orig_source.match(filename):
1214 # File is not an orig; ignore
1217 if os.path.exists(filename):
1218 # File exists, no need to continue
1221 def symlink_if_valid(path):
1222 f = utils.open_file(path)
1223 md5sum = apt_pkg.md5sum(f)
1226 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1227 expected = (int(entry['size']), entry['md5sum'])
1229 if fingerprint != expected:
1232 dest = os.path.join(target_dir, filename)
1234 os.symlink(path, dest)
1235 symlinked.append(dest)
1241 session_ = DBConn().session()
1246 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1247 poolfile_path = os.path.join(
1248 poolfile.location.path, poolfile.filename
1251 if symlink_if_valid(poolfile_path):
1261 # Look in some other queues for the file
1262 queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1263 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1265 for queue in queues:
1266 if not cnf.get('Dir::Queue::%s' % queue):
1269 queuefile_path = os.path.join(
1270 cnf['Dir::Queue::%s' % queue], filename
1273 if not os.path.exists(queuefile_path):
1274 # Does not exist in this queue
1277 if symlink_if_valid(queuefile_path):
1282 ###########################################################################
1284 def check_lintian(self):
1287 # Don't reject binary uploads
1288 if not self.pkg.changes['architecture'].has_key('source'):
1291 # Only check some distributions
1293 for dist in ('unstable', 'experimental'):
1294 if dist in self.pkg.changes['distribution']:
1301 tagfile = cnf.get("Dinstall::LintianTags")
1303 # We don't have a tagfile, so just don't do anything.
1306 # Parse the yaml file
1307 sourcefile = file(tagfile, 'r')
1308 sourcecontent = sourcefile.read()
1311 lintiantags = yaml.load(sourcecontent)['lintian']
1312 except yaml.YAMLError, msg:
1313 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1316 # Try and find all orig mentioned in the .dsc
1317 symlinked = self.ensure_orig()
1319 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1320 # so put it together like it. We put all types of tags in one file and then sort
1321 # through lintians output later to see if its a fatal tag we detected, or not.
1322 # So we only run lintian once on all tags, even if we might reject on some, but not
1324 # Additionally build up a set of tags
1326 (fd, temp_filename) = utils.temp_filename()
1327 temptagfile = os.fdopen(fd, 'w')
1328 for tagtype in lintiantags:
1329 for tag in lintiantags[tagtype]:
1330 temptagfile.write("%s\n" % tag)
1334 # So now we should look at running lintian at the .changes file, capturing output
1336 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1337 (result, output) = commands.getstatusoutput(command)
1339 # We are done with lintian, remove our tempfile and any symlinks we created
1340 os.unlink(temp_filename)
1341 for symlink in symlinked:
1345 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1346 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1348 if len(output) == 0:
1353 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1355 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1356 # are having a victim for a reject.
1357 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1358 for line in output.split('\n'):
1359 m = re_parse_lintian.match(line)
1364 epackage = m.group(2)
1368 # So lets check if we know the tag at all.
1369 if etag not in tags:
1373 # We know it and it is overriden. Check that override is allowed.
1374 if etag in lintiantags['warning']:
1375 # The tag is overriden, and it is allowed to be overriden.
1376 # Don't add a reject message.
1378 elif etag in lintiantags['error']:
1379 # The tag is overriden - but is not allowed to be
1380 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1381 log("overidden tag is overridden", etag)
1383 # Tag is known, it is not overriden, direct reject.
1384 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1385 log("auto rejecting", etag)
1386 # Now tell if they *might* override it.
1387 if etag in lintiantags['warning']:
1388 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1390 ###########################################################################
1391 def check_urgency(self):
1393 if self.pkg.changes["architecture"].has_key("source"):
1394 if not self.pkg.changes.has_key("urgency"):
1395 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1396 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1397 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1398 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1399 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1400 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1402 ###########################################################################
1404 # Sanity check the time stamps of files inside debs.
1405 # [Files in the near future cause ugly warnings and extreme time
1406 # travel can cause errors on extraction]
1408 def check_timestamps(self):
1411 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1412 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1413 tar = TarTime(future_cutoff, past_cutoff)
1415 for filename, entry in self.pkg.files.items():
1416 if entry["type"] == "deb":
1419 deb_file = utils.open_file(filename)
1420 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1423 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1424 except SystemError, e:
1425 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1426 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1429 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1433 future_files = tar.future_files.keys()
1435 num_future_files = len(future_files)
1436 future_file = future_files[0]
1437 future_date = tar.future_files[future_file]
1438 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1439 % (filename, num_future_files, future_file, time.ctime(future_date)))
1441 ancient_files = tar.ancient_files.keys()
1443 num_ancient_files = len(ancient_files)
1444 ancient_file = ancient_files[0]
1445 ancient_date = tar.ancient_files[ancient_file]
1446 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1447 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1449 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1451 ###########################################################################
1452 def check_transition(self, session):
1455 sourcepkg = self.pkg.changes["source"]
1457 # No sourceful upload -> no need to do anything else, direct return
1458 # We also work with unstable uploads, not experimental or those going to some
1459 # proposed-updates queue
1460 if "source" not in self.pkg.changes["architecture"] or \
1461 "unstable" not in self.pkg.changes["distribution"]:
1464 # Also only check if there is a file defined (and existant) with
1466 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1467 if transpath == "" or not os.path.exists(transpath):
1470 # Parse the yaml file
1471 sourcefile = file(transpath, 'r')
1472 sourcecontent = sourcefile.read()
1474 transitions = yaml.load(sourcecontent)
1475 except yaml.YAMLError, msg:
1476 # This shouldn't happen, there is a wrapper to edit the file which
1477 # checks it, but we prefer to be safe than ending up rejecting
1479 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1482 # Now look through all defined transitions
1483 for trans in transitions:
1484 t = transitions[trans]
1485 source = t["source"]
1488 # Will be None if nothing is in testing.
1489 current = get_source_in_suite(source, "testing", session)
1490 if current is not None:
1491 compare = apt_pkg.VersionCompare(current.version, expected)
1493 if current is None or compare < 0:
1494 # This is still valid, the current version in testing is older than
1495 # the new version we wait for, or there is none in testing yet
1497 # Check if the source we look at is affected by this.
1498 if sourcepkg in t['packages']:
1499 # The source is affected, lets reject it.
1501 rejectmsg = "%s: part of the %s transition.\n\n" % (
1504 if current is not None:
1505 currentlymsg = "at version %s" % (current.version)
1507 currentlymsg = "not present in testing"
1509 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1511 rejectmsg += "\n".join(textwrap.wrap("""Your package
1512 is part of a testing transition designed to get %s migrated (it is
1513 currently %s, we need version %s). This transition is managed by the
1514 Release Team, and %s is the Release-Team member responsible for it.
1515 Please mail debian-release@lists.debian.org or contact %s directly if you
1516 need further assistance. You might want to upload to experimental until this
1517 transition is done."""
1518 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1520 self.rejects.append(rejectmsg)
1523 ###########################################################################
1524 def check_signed_by_key(self):
1525 """Ensure the .changes is signed by an authorized uploader."""
1526 session = DBConn().session()
1528 self.check_transition(session)
1530 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1532 # match claimed name with actual name:
1534 # This is fundamentally broken but need us to refactor how we get
1535 # the UIDs/Fingerprints in order for us to fix it properly
1536 uid, uid_email = self.pkg.changes["fingerprint"], uid
1537 may_nmu, may_sponsor = 1, 1
1538 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1539 # and can't get one in there if we don't allow nmu/sponsorship
1540 elif is_dm is False:
1541 # If is_dm is False, we allow full upload rights
1542 uid_email = "%s@debian.org" % (uid)
1543 may_nmu, may_sponsor = 1, 1
1545 # Assume limited upload rights unless we've discovered otherwise
1547 may_nmu, may_sponsor = 0, 0
1549 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1551 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1553 if uid_name == "": sponsored = 1
1556 if ("source" in self.pkg.changes["architecture"] and
1557 uid_email and utils.is_email_alias(uid_email)):
1558 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1559 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1560 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1561 self.pkg.changes["sponsoremail"] = uid_email
1563 if sponsored and not may_sponsor:
1564 self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1566 if not sponsored and not may_nmu:
1567 should_reject = True
1568 highest_sid, highest_version = None, None
1570 # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1571 # It ignores higher versions with the dm_upload_allowed flag set to false
1572 # I'm keeping the existing behaviour for now until I've gone back and
1573 # checked exactly what the GR says - mhy
1574 for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1575 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1576 highest_sid = si.source_id
1577 highest_version = si.version
1579 if highest_sid is None:
1580 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1582 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1583 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1584 if email == uid_email or name == uid_name:
1585 should_reject = False
1588 if should_reject is True:
1589 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1591 for b in self.pkg.changes["binary"].keys():
1592 for suite in self.pkg.changes["distribution"].keys():
1593 q = session.query(DBSource)
1594 q = q.join(DBBinary).filter_by(package=b)
1595 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1598 if s.source != self.pkg.changes["source"]:
1599 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1601 for f in self.pkg.files.keys():
1602 if self.pkg.files[f].has_key("byhand"):
1603 self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1604 if self.pkg.files[f].has_key("new"):
1605 self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1609 ###########################################################################
1610 def build_summaries(self):
1611 """ Build a summary of changes the upload introduces. """
1613 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1615 short_summary = summary
1617 # This is for direport's benefit...
1618 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1621 summary += "Changes: " + f
1623 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1625 summary += self.announce(short_summary, 0)
1627 return (summary, short_summary)
1629 ###########################################################################
1631 def close_bugs(self, summary, action):
1633 Send mail to close bugs as instructed by the closes field in the changes file.
1634 Also add a line to summary if any work was done.
1636 @type summary: string
1637 @param summary: summary text, as given by L{build_summaries}
1640 @param action: Set to false no real action will be done.
1643 @return: summary. If action was taken, extended by the list of closed bugs.
1647 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1649 bugs = self.pkg.changes["closes"].keys()
1655 summary += "Closing bugs: "
1657 summary += "%s " % (bug)
1660 self.Subst["__BUG_NUMBER__"] = bug
1661 if self.pkg.changes["distribution"].has_key("stable"):
1662 self.Subst["__STABLE_WARNING__"] = """
1663 Note that this package is not part of the released stable Debian
1664 distribution. It may have dependencies on other unreleased software,
1665 or other instabilities. Please take care if you wish to install it.
1666 The update will eventually make its way into the next released Debian
1669 self.Subst["__STABLE_WARNING__"] = ""
1670 mail_message = utils.TemplateSubst(self.Subst, template)
1671 utils.send_mail(mail_message)
1673 # Clear up after ourselves
1674 del self.Subst["__BUG_NUMBER__"]
1675 del self.Subst["__STABLE_WARNING__"]
1677 if action and self.logger:
1678 self.logger.log(["closing bugs"] + bugs)
1684 ###########################################################################
1686 def announce(self, short_summary, action):
1688 Send an announce mail about a new upload.
1690 @type short_summary: string
1691 @param short_summary: Short summary text to include in the mail
1694 @param action: Set to false no real action will be done.
1697 @return: Textstring about action taken.
1702 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1704 # Only do announcements for source uploads with a recent dpkg-dev installed
1705 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1706 self.pkg.changes["architecture"].has_key("source"):
1712 self.Subst["__SHORT_SUMMARY__"] = short_summary
1714 for dist in self.pkg.changes["distribution"].keys():
1715 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1716 if announce_list == "" or lists_done.has_key(announce_list):
1719 lists_done[announce_list] = 1
1720 summary += "Announcing to %s\n" % (announce_list)
1724 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1725 if cnf.get("Dinstall::TrackingServer") and \
1726 self.pkg.changes["architecture"].has_key("source"):
1727 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1728 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1730 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1731 utils.send_mail(mail_message)
1733 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1735 if cnf.FindB("Dinstall::CloseBugs"):
1736 summary = self.close_bugs(summary, action)
1738 del self.Subst["__SHORT_SUMMARY__"]
1742 ###########################################################################
1744 def accept (self, summary, short_summary, targetdir=None):
1748 This moves all files referenced from the .changes into the I{accepted}
1749 queue, sends the accepted mail, announces to lists, closes bugs and
1750 also checks for override disparities. If enabled it will write out
1751 the version history for the BTS Version Tracking and will finally call
1754 @type summary: string
1755 @param summary: Summary text
1757 @type short_summary: string
1758 @param short_summary: Short summary
1763 stats = SummaryStats()
1765 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1767 if targetdir is None:
1768 targetdir = cnf["Dir::Queue::Accepted"]
1772 self.logger.log(["Accepting changes", self.pkg.changes_file])
1774 self.pkg.write_dot_dak(targetdir)
1776 # Move all the files into the accepted directory
1777 utils.move(self.pkg.changes_file, targetdir)
1779 for name, entry in sorted(self.pkg.files.items()):
1780 utils.move(name, targetdir)
1781 stats.accept_bytes += float(entry["size"])
1783 stats.accept_count += 1
1785 # Send accept mail, announce to lists, close bugs and check for
1786 # override disparities
1787 if not cnf["Dinstall::Options::No-Mail"]:
1789 self.Subst["__SUITE__"] = ""
1790 self.Subst["__SUMMARY__"] = summary
1791 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1792 utils.send_mail(mail_message)
1793 self.announce(short_summary, 1)
1795 ## Helper stuff for DebBugs Version Tracking
1796 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1797 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1798 # the conditionalization on dsc["bts changelog"] should be
1801 # Write out the version history from the changelog
1802 if self.pkg.changes["architecture"].has_key("source") and \
1803 self.pkg.dsc.has_key("bts changelog"):
1805 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1806 version_history = os.fdopen(fd, 'w')
1807 version_history.write(self.pkg.dsc["bts changelog"])
1808 version_history.close()
1809 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1810 self.pkg.changes_file[:-8]+".versions")
1811 os.rename(temp_filename, filename)
1812 os.chmod(filename, 0644)
1814 # Write out the binary -> source mapping.
1815 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1816 debinfo = os.fdopen(fd, 'w')
1817 for name, entry in sorted(self.pkg.files.items()):
1818 if entry["type"] == "deb":
1819 line = " ".join([entry["package"], entry["version"],
1820 entry["architecture"], entry["source package"],
1821 entry["source version"]])
1822 debinfo.write(line+"\n")
1824 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1825 self.pkg.changes_file[:-8]+".debinfo")
1826 os.rename(temp_filename, filename)
1827 os.chmod(filename, 0644)
1829 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1830 # <Ganneff> we do call queue_build too
1831 # <mhy> well yes, we'd have had to if we were inserting into accepted
1832 # <Ganneff> now. thats database only.
1833 # <mhy> urgh, that's going to get messy
1834 # <Ganneff> so i make the p-n call to it *also* using accepted/
1835 # <mhy> but then the packages will be in the queue_build table without the files being there
1836 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1837 # <mhy> ah, good point
1838 # <Ganneff> so it will work out, as unchecked move it over
1839 # <mhy> that's all completely sick
1842 # This routine returns None on success or an error on failure
1843 res = get_or_set_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1848 def check_override(self):
1850 Checks override entries for validity. Mails "Override disparity" warnings,
1851 if that feature is enabled.
1853 Abandons the check if
1854 - override disparity checks are disabled
1855 - mail sending is disabled
1860 # Abandon the check if:
1861 # a) override disparity checks have been disabled
1862 # b) we're not sending mail
1863 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1864 cnf["Dinstall::Options::No-Mail"]:
1867 summary = self.pkg.check_override()
1872 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1875 self.Subst["__SUMMARY__"] = summary
1876 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1877 utils.send_mail(mail_message)
1878 del self.Subst["__SUMMARY__"]
1880 ###########################################################################
1882 def remove(self, dir=None):
1884 Used (for instance) in p-u to remove the package from unchecked
1887 os.chdir(self.pkg.directory)
1891 for f in self.pkg.files.keys():
1893 os.unlink(self.pkg.changes_file)
1895 ###########################################################################
1897 def move_to_dir (self, dest, perms=0660, changesperms=0664):
1899 Move files to dest with certain perms/changesperms
1901 utils.move(self.pkg.changes_file, dest, perms=changesperms)
1902 for f in self.pkg.files.keys():
1903 utils.move(f, dest, perms=perms)
1905 ###########################################################################
1907 def force_reject(self, reject_files):
1909 Forcefully move files from the current directory to the
1910 reject directory. If any file already exists in the reject
1911 directory it will be moved to the morgue to make way for
1915 @param files: file dictionary
1921 for file_entry in reject_files:
1922 # Skip any files which don't exist or which we don't have permission to copy.
1923 if os.access(file_entry, os.R_OK) == 0:
1926 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1929 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1931 # File exists? Let's try and move it to the morgue
1932 if e.errno == errno.EEXIST:
1933 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1935 morgue_file = utils.find_next_free(morgue_file)
1936 except NoFreeFilenameError:
1937 # Something's either gone badly Pete Tong, or
1938 # someone is trying to exploit us.
1939 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1941 utils.move(dest_file, morgue_file, perms=0660)
1943 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1946 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1950 # If we got here, we own the destination file, so we can
1951 # safely overwrite it.
1952 utils.move(file_entry, dest_file, 1, perms=0660)
1955 ###########################################################################
1956 def do_reject (self, manual=0, reject_message="", note=""):
1958 Reject an upload. If called without a reject message or C{manual} is
1959 true, spawn an editor so the user can write one.
1962 @param manual: manual or automated rejection
1964 @type reject_message: string
1965 @param reject_message: A reject message
1970 # If we weren't given a manual rejection message, spawn an
1971 # editor so the user can add one in...
1972 if manual and not reject_message:
1973 (fd, temp_filename) = utils.temp_filename()
1974 temp_file = os.fdopen(fd, 'w')
1977 temp_file.write(line)
1979 editor = os.environ.get("EDITOR","vi")
1981 while answer == 'E':
1982 os.system("%s %s" % (editor, temp_filename))
1983 temp_fh = utils.open_file(temp_filename)
1984 reject_message = "".join(temp_fh.readlines())
1986 print "Reject message:"
1987 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
1988 prompt = "[R]eject, Edit, Abandon, Quit ?"
1990 while prompt.find(answer) == -1:
1991 answer = utils.our_raw_input(prompt)
1992 m = re_default_answer.search(prompt)
1995 answer = answer[:1].upper()
1996 os.unlink(temp_filename)
2002 print "Rejecting.\n"
2006 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2007 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2009 # Move all the files into the reject directory
2010 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2011 self.force_reject(reject_files)
2013 # If we fail here someone is probably trying to exploit the race
2014 # so let's just raise an exception ...
2015 if os.path.exists(reason_filename):
2016 os.unlink(reason_filename)
2017 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2019 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2023 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2024 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2025 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2026 os.write(reason_fd, reject_message)
2027 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2029 # Build up the rejection email
2030 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2031 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2032 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2033 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2034 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2035 # Write the rejection email out as the <foo>.reason file
2036 os.write(reason_fd, reject_mail_message)
2038 del self.Subst["__REJECTOR_ADDRESS__"]
2039 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2040 del self.Subst["__CC__"]
2044 # Send the rejection mail if appropriate
2045 if not cnf["Dinstall::Options::No-Mail"]:
2046 utils.send_mail(reject_mail_message)
2049 self.logger.log(["rejected", self.pkg.changes_file])
2053 ################################################################################
2054 def in_override_p(self, package, component, suite, binary_type, file, session):
2056 Check if a package already has override entries in the DB
2058 @type package: string
2059 @param package: package name
2061 @type component: string
2062 @param component: database id of the component
2065 @param suite: database id of the suite
2067 @type binary_type: string
2068 @param binary_type: type of the package
2071 @param file: filename we check
2073 @return: the database result. But noone cares anyway.
2079 if binary_type == "": # must be source
2082 file_type = binary_type
2084 # Override suite name; used for example with proposed-updates
2085 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2086 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2088 result = get_override(package, suite, component, file_type, session)
2090 # If checking for a source package fall back on the binary override type
2091 if file_type == "dsc" and len(result) < 1:
2092 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2094 # Remember the section and priority so we can check them later if appropriate
2097 self.pkg.files[file]["override section"] = result.section.section
2098 self.pkg.files[file]["override priority"] = result.priority.priority
2103 ################################################################################
2104 def get_anyversion(self, sv_list, suite):
2107 @param sv_list: list of (suite, version) tuples to check
2110 @param suite: suite name
2116 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2117 for (s, v) in sv_list:
2118 if s in [ x.lower() for x in anysuite ]:
2119 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2124 ################################################################################
2126 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
2129 @param sv_list: list of (suite, version) tuples to check
2134 @type new_version: string
2135 @param new_version: XXX
2137 Ensure versions are newer than existing packages in target
2138 suites and that cross-suite version checking rules as
2139 set out in the conf file are satisfied.
2144 # Check versions for each target suite
2145 for target_suite in self.pkg.changes["distribution"].keys():
2146 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2147 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2149 # Enforce "must be newer than target suite" even if conffile omits it
2150 if target_suite not in must_be_newer_than:
2151 must_be_newer_than.append(target_suite)
2153 for (suite, existent_version) in sv_list:
2154 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2156 if suite in must_be_newer_than and sourceful and vercmp < 1:
2157 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2159 if suite in must_be_older_than and vercmp > -1:
2162 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2163 # we really use the other suite, ignoring the conflicting one ...
2164 addsuite = self.pkg.changes["distribution-version"][suite]
2166 add_version = self.get_anyversion(sv_list, addsuite)
2167 target_version = self.get_anyversion(sv_list, target_suite)
2170 # not add_version can only happen if we map to a suite
2171 # that doesn't enhance the suite we're propup'ing from.
2172 # so "propup-ver x a b c; map a d" is a problem only if
2173 # d doesn't enhance a.
2175 # i think we could always propagate in this case, rather
2176 # than complaining. either way, this isn't a REJECT issue
2178 # And - we really should complain to the dorks who configured dak
2179 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2180 self.pkg.changes.setdefault("propdistribution", {})
2181 self.pkg.changes["propdistribution"][addsuite] = 1
2183 elif not target_version:
2184 # not targets_version is true when the package is NEW
2185 # we could just stick with the "...old version..." REJECT
2186 # for this, I think.
2187 self.rejects.append("Won't propogate NEW packages.")
2188 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2189 # propogation would be redundant. no need to reject though.
2190 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2192 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2193 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2195 self.warnings.append("Propogating upload to %s" % (addsuite))
2196 self.pkg.changes.setdefault("propdistribution", {})
2197 self.pkg.changes["propdistribution"][addsuite] = 1
2201 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2203 ################################################################################
2204 def check_binary_against_db(self, file, session):
2205 # Ensure version is sane
2206 q = session.query(BinAssociation)
2207 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
2208 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
2210 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2211 file, self.pkg.files[file]["version"], sourceful=False)
2213 # Check for any existing copies of the file
2214 q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
2215 q = q.filter_by(version=self.pkg.files[file]["version"])
2216 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
2219 self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
2221 ################################################################################
2223 def check_source_against_db(self, file, session):
2226 source = self.pkg.dsc.get("source")
2227 version = self.pkg.dsc.get("version")
2229 # Ensure version is sane
2230 q = session.query(SrcAssociation)
2231 q = q.join(DBSource).filter(DBSource.source==source)
2233 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2234 file, version, sourceful=True)
2236 ################################################################################
2237 def check_dsc_against_db(self, file, session):
2240 @warning: NB: this function can remove entries from the 'files' index [if
2241 the orig tarball is a duplicate of the one in the archive]; if
2242 you're iterating over 'files' and call this function as part of
2243 the loop, be sure to add a check to the top of the loop to
2244 ensure you haven't just tried to dereference the deleted entry.
2249 self.pkg.orig_files = {} # XXX: do we need to clear it?
2250 orig_files = self.pkg.orig_files
2252 # Try and find all files mentioned in the .dsc. This has
2253 # to work harder to cope with the multiple possible
2254 # locations of an .orig.tar.gz.
2255 # The ordering on the select is needed to pick the newest orig
2256 # when it exists in multiple places.
2257 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2259 if self.pkg.files.has_key(dsc_name):
2260 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2261 actual_size = int(self.pkg.files[dsc_name]["size"])
2262 found = "%s in incoming" % (dsc_name)
2264 # Check the file does not already exist in the archive
2265 ql = get_poolfile_like_name(dsc_name, session)
2267 # Strip out anything that isn't '%s' or '/%s$'
2269 if not i.filename.endswith(dsc_name):
2272 # "[dak] has not broken them. [dak] has fixed a
2273 # brokenness. Your crappy hack exploited a bug in
2276 # "(Come on! I thought it was always obvious that
2277 # one just doesn't release different files with
2278 # the same name and version.)"
2279 # -- ajk@ on d-devel@l.d.o
2282 # Ignore exact matches for .orig.tar.gz
2284 if re_is_orig_source.match(dsc_name):
2286 if self.pkg.files.has_key(dsc_name) and \
2287 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2288 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2289 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2290 # TODO: Don't delete the entry, just mark it as not needed
2291 # This would fix the stupidity of changing something we often iterate over
2292 # whilst we're doing it
2293 del self.pkg.files[dsc_name]
2294 if not orig_files.has_key(dsc_name):
2295 orig_files[dsc_name] = {}
2296 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2300 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2302 elif re_is_orig_source.match(dsc_name):
2304 ql = get_poolfile_like_name(dsc_name, session)
2306 # Strip out anything that isn't '%s' or '/%s$'
2307 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2309 if not i.filename.endswith(dsc_name):
2313 # Unfortunately, we may get more than one match here if,
2314 # for example, the package was in potato but had an -sa
2315 # upload in woody. So we need to choose the right one.
2317 # default to something sane in case we don't match any or have only one
2322 old_file = os.path.join(i.location.path, i.filename)
2323 old_file_fh = utils.open_file(old_file)
2324 actual_md5 = apt_pkg.md5sum(old_file_fh)
2326 actual_size = os.stat(old_file)[stat.ST_SIZE]
2327 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2330 old_file = os.path.join(i.location.path, i.filename)
2331 old_file_fh = utils.open_file(old_file)
2332 actual_md5 = apt_pkg.md5sum(old_file_fh)
2334 actual_size = os.stat(old_file)[stat.ST_SIZE]
2336 suite_type = x.location.archive_type
2337 # need this for updating dsc_files in install()
2338 dsc_entry["files id"] = x.file_id
2339 # See install() in process-accepted...
2340 if not orig_files.has_key(dsc_name):
2341 orig_files[dsc_name] = {}
2342 orig_files[dsc_name]["id"] = x.file_id
2343 orig_files[dsc_name]["path"] = old_file
2344 orig_files[dsc_name]["location"] = x.location.location_id
2346 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2347 # Not there? Check the queue directories...
2348 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2349 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2351 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2352 if os.path.exists(in_otherdir):
2353 in_otherdir_fh = utils.open_file(in_otherdir)
2354 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2355 in_otherdir_fh.close()
2356 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2358 if not orig_files.has_key(dsc_name):
2359 orig_files[dsc_name] = {}
2360 orig_files[dsc_name]["path"] = in_otherdir
2363 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2366 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2368 if actual_md5 != dsc_entry["md5sum"]:
2369 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2370 if actual_size != int(dsc_entry["size"]):
2371 self.rejects.append("size for %s doesn't match %s." % (found, file))
2373 ################################################################################
2374 def accepted_checks(self, overwrite_checks, session):
2375 # Recheck anything that relies on the database; since that's not
2376 # frozen between accept and our run time when called from p-a.
2378 # overwrite_checks is set to False when installing to stable/oldstable
2383 # Find the .dsc (again)
2385 for f in self.pkg.files.keys():
2386 if self.pkg.files[f]["type"] == "dsc":
2389 for checkfile in self.pkg.files.keys():
2390 # The .orig.tar.gz can disappear out from under us is it's a
2391 # duplicate of one in the archive.
2392 if not self.pkg.files.has_key(checkfile):
2395 entry = self.pkg.files[checkfile]
2397 # Check that the source still exists
2398 if entry["type"] == "deb":
2399 source_version = entry["source version"]
2400 source_package = entry["source package"]
2401 if not self.pkg.changes["architecture"].has_key("source") \
2402 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2403 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2405 # Version and file overwrite checks
2406 if overwrite_checks:
2407 if entry["type"] == "deb":
2408 self.check_binary_against_db(checkfile, session)
2409 elif entry["type"] == "dsc":
2410 self.check_source_against_db(checkfile, session)
2411 self.check_dsc_against_db(dsc_filename, session)
2413 # propogate in the case it is in the override tables:
2414 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2415 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2416 propogate[suite] = 1
2418 nopropogate[suite] = 1
2420 for suite in propogate.keys():
2421 if suite in nopropogate:
2423 self.pkg.changes["distribution"][suite] = 1
2425 for checkfile in self.pkg.files.keys():
2426 # Check the package is still in the override tables
2427 for suite in self.pkg.changes["distribution"].keys():
2428 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2429 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2431 ################################################################################
2432 # This is not really a reject, but an unaccept, but since a) the code for
2433 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2434 # extremely rare, for now we'll go with whining at our admin folks...
2436 def do_unaccept(self):
2440 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2441 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2442 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2443 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2444 if cnf.has_key("Dinstall::Bcc"):
2445 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2447 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2449 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2451 # Write the rejection email out as the <foo>.reason file
2452 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2453 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2455 # If we fail here someone is probably trying to exploit the race
2456 # so let's just raise an exception ...
2457 if os.path.exists(reject_filename):
2458 os.unlink(reject_filename)
2460 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2461 os.write(fd, reject_mail_message)
2464 utils.send_mail(reject_mail_message)
2466 del self.Subst["__REJECTOR_ADDRESS__"]
2467 del self.Subst["__REJECT_MESSAGE__"]
2468 del self.Subst["__CC__"]
2470 ################################################################################
2471 # If any file of an upload has a recent mtime then chances are good
2472 # the file is still being uploaded.
2474 def upload_too_new(self):
2477 # Move back to the original directory to get accurate time stamps
2479 os.chdir(self.pkg.directory)
2480 file_list = self.pkg.files.keys()
2481 file_list.extend(self.pkg.dsc_files.keys())
2482 file_list.append(self.pkg.changes_file)
2485 last_modified = time.time()-os.path.getmtime(f)
2486 if last_modified < int(cnf["Dinstall::SkipTime"]):