5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
52 from summarystats import SummaryStats
53 from utils import parse_changes, check_dsc_files
54 from textutils import fix_maintainer
55 from binary import Binary
57 ###############################################################################
59 def get_type(f, session):
61 Get the file type of C{f}
64 @param f: file entry from Changes object
66 @type session: SQLA Session
67 @param session: SQL Alchemy session object
74 if f.has_key("dbtype"):
75 file_type = f["dbtype"]
76 elif re_source_ext.match(f["type"]):
79 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
81 # Validate the override type
82 type_id = get_override_type(file_type, session)
84 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
88 ################################################################################
90 # Determine what parts in a .changes are NEW
92 def determine_new(changes, files, warn=1):
94 Determine what parts in a C{changes} file are NEW.
96 @type changes: Upload.Pkg.changes dict
97 @param changes: Changes dictionary
99 @type files: Upload.Pkg.files dict
100 @param files: Files dictionary
103 @param warn: Warn if overrides are added for (old)stable
106 @return: dictionary of NEW components.
111 session = DBConn().session()
113 # Build up a list of potentially new things
114 for name, f in files.items():
115 # Skip byhand elements
116 if f["type"] == "byhand":
119 priority = f["priority"]
120 section = f["section"]
121 file_type = get_type(f, session)
122 component = f["component"]
124 if file_type == "dsc":
127 if not new.has_key(pkg):
129 new[pkg]["priority"] = priority
130 new[pkg]["section"] = section
131 new[pkg]["type"] = file_type
132 new[pkg]["component"] = component
133 new[pkg]["files"] = []
135 old_type = new[pkg]["type"]
136 if old_type != file_type:
137 # source gets trumped by deb or udeb
138 if old_type == "dsc":
139 new[pkg]["priority"] = priority
140 new[pkg]["section"] = section
141 new[pkg]["type"] = file_type
142 new[pkg]["component"] = component
144 new[pkg]["files"].append(name)
146 if f.has_key("othercomponents"):
147 new[pkg]["othercomponents"] = f["othercomponents"]
149 for suite in changes["suite"].keys():
150 for pkg in new.keys():
151 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
153 for file_entry in new[pkg]["files"]:
154 if files[file_entry].has_key("new"):
155 del files[file_entry]["new"]
159 for s in ['stable', 'oldstable']:
160 if changes["suite"].has_key(s):
161 print "WARNING: overrides will be added for %s!" % s
162 for pkg in new.keys():
163 if new[pkg].has_key("othercomponents"):
164 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
170 ################################################################################
172 def check_valid(new):
174 Check if section and priority for NEW packages exist in database.
175 Additionally does sanity checks:
176 - debian-installer packages have to be udeb (or source)
177 - non debian-installer packages can not be udeb
178 - source priority can only be assigned to dsc file types
181 @param new: Dict of new packages with their section, priority and type.
184 for pkg in new.keys():
185 section_name = new[pkg]["section"]
186 priority_name = new[pkg]["priority"]
187 file_type = new[pkg]["type"]
189 section = get_section(section_name)
191 new[pkg]["section id"] = -1
193 new[pkg]["section id"] = section.section_id
195 priority = get_priority(priority_name)
197 new[pkg]["priority id"] = -1
199 new[pkg]["priority id"] = priority.priority_id
202 di = section_name.find("debian-installer") != -1
204 # If d-i, we must be udeb and vice-versa
205 if (di and file_type not in ("udeb", "dsc")) or \
206 (not di and file_type == "udeb"):
207 new[pkg]["section id"] = -1
209 # If dsc we need to be source and vice-versa
210 if (priority == "source" and file_type != "dsc") or \
211 (priority != "source" and file_type == "dsc"):
212 new[pkg]["priority id"] = -1
214 ###############################################################################
216 def lookup_uid_from_fingerprint(fpr, session):
219 # This is a stupid default, but see the comments below
222 user = get_uid_from_fingerprint(fpr, session)
226 if user.name is None:
231 # Check the relevant fingerprint (which we have to have)
232 for f in user.fingerprint:
233 if f.fingerprint == fpr:
234 is_dm = f.keyring.debian_maintainer
237 return (uid, uid_name, is_dm)
239 ###############################################################################
241 # Used by Upload.check_timestamps
242 class TarTime(object):
243 def __init__(self, future_cutoff, past_cutoff):
245 self.future_cutoff = future_cutoff
246 self.past_cutoff = past_cutoff
249 self.future_files = {}
250 self.ancient_files = {}
252 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
253 if MTime > self.future_cutoff:
254 self.future_files[Name] = MTime
255 if MTime < self.past_cutoff:
256 self.ancient_files[Name] = MTime
258 ###############################################################################
260 class Upload(object):
262 Everything that has to do with an upload processed.
270 ###########################################################################
273 """ Reset a number of internal variables."""
275 # Initialize the substitution template map
278 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
279 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
280 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
281 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
289 def package_info(self):
292 if len(self.rejects) > 0:
293 msg += "\n\nReject Reasons:\n%s" % "\n".join(self.rejects)
295 if len(self.warnings) > 0:
296 msg += "\n\nWarnings:\n%s" % "\n".join(self.warnings)
298 if len(self.notes) > 0:
299 msg += "\n\nNotes:\n%s" % "\n".join(self.notes)
303 ###########################################################################
304 def update_subst(self):
305 """ Set up the per-package template substitution mappings """
309 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
310 if not self.pkg.changes.has_key("architecture") or not \
311 isinstance(self.pkg.changes["architecture"], DictType):
312 self.pkg.changes["architecture"] = { "Unknown" : "" }
314 # and maintainer2047 may not exist.
315 if not self.pkg.changes.has_key("maintainer2047"):
316 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
318 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
319 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
320 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
322 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
323 if self.pkg.changes["architecture"].has_key("source") and \
324 self.pkg.changes["changedby822"] != "" and \
325 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
327 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
328 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
329 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
331 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
332 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
333 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
335 if "sponsoremail" in self.pkg.changes:
336 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
338 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
339 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
341 # Apply any global override of the Maintainer field
342 if cnf.get("Dinstall::OverrideMaintainer"):
343 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
344 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
346 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
347 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
348 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
350 ###########################################################################
351 def load_changes(self, filename):
354 @rvalue: whether the changes file was valid or not. We may want to
355 reject even if this is True (see what gets put in self.rejects).
356 This is simply to prevent us even trying things later which will
357 fail because we couldn't properly parse the file.
360 self.pkg.changes_file = filename
362 # Parse the .changes field into a dictionary
364 self.pkg.changes.update(parse_changes(filename))
365 except CantOpenError:
366 self.rejects.append("%s: can't read file." % (filename))
368 except ParseChangesError, line:
369 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
371 except ChangesUnicodeError:
372 self.rejects.append("%s: changes file not proper utf-8" % (filename))
375 # Parse the Files field from the .changes into another dictionary
377 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
378 except ParseChangesError, line:
379 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
381 except UnknownFormatError, format:
382 self.rejects.append("%s: unknown format '%s'." % (filename, format))
385 # Check for mandatory fields
386 for i in ("distribution", "source", "binary", "architecture",
387 "version", "maintainer", "files", "changes", "description"):
388 if not self.pkg.changes.has_key(i):
389 # Avoid undefined errors later
390 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
393 # Strip a source version in brackets from the source field
394 if re_strip_srcver.search(self.pkg.changes["source"]):
395 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
397 # Ensure the source field is a valid package name.
398 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
399 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
401 # Split multi-value fields into a lower-level dictionary
402 for i in ("architecture", "distribution", "binary", "closes"):
403 o = self.pkg.changes.get(i, "")
405 del self.pkg.changes[i]
407 self.pkg.changes[i] = {}
410 self.pkg.changes[i][j] = 1
412 # Fix the Maintainer: field to be RFC822/2047 compatible
414 (self.pkg.changes["maintainer822"],
415 self.pkg.changes["maintainer2047"],
416 self.pkg.changes["maintainername"],
417 self.pkg.changes["maintaineremail"]) = \
418 fix_maintainer (self.pkg.changes["maintainer"])
419 except ParseMaintError, msg:
420 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
421 % (filename, changes["maintainer"], msg))
423 # ...likewise for the Changed-By: field if it exists.
425 (self.pkg.changes["changedby822"],
426 self.pkg.changes["changedby2047"],
427 self.pkg.changes["changedbyname"],
428 self.pkg.changes["changedbyemail"]) = \
429 fix_maintainer (self.pkg.changes.get("changed-by", ""))
430 except ParseMaintError, msg:
431 self.pkg.changes["changedby822"] = ""
432 self.pkg.changes["changedby2047"] = ""
433 self.pkg.changes["changedbyname"] = ""
434 self.pkg.changes["changedbyemail"] = ""
436 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
437 % (filename, changes["changed-by"], msg))
439 # Ensure all the values in Closes: are numbers
440 if self.pkg.changes.has_key("closes"):
441 for i in self.pkg.changes["closes"].keys():
442 if re_isanum.match (i) == None:
443 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
445 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
446 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
447 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
449 # Check there isn't already a changes file of the same name in one
450 # of the queue directories.
451 base_filename = os.path.basename(filename)
452 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
453 if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
454 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
456 # Check the .changes is non-empty
457 if not self.pkg.files:
458 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
461 # Changes was syntactically valid even if we'll reject
464 ###########################################################################
466 def check_distributions(self):
467 "Check and map the Distribution field"
471 # Handle suite mappings
472 for m in Cnf.ValueList("SuiteMappings"):
475 if mtype == "map" or mtype == "silent-map":
476 (source, dest) = args[1:3]
477 if self.pkg.changes["distribution"].has_key(source):
478 del self.pkg.changes["distribution"][source]
479 self.pkg.changes["distribution"][dest] = 1
480 if mtype != "silent-map":
481 self.notes.append("Mapping %s to %s." % (source, dest))
482 if self.pkg.changes.has_key("distribution-version"):
483 if self.pkg.changes["distribution-version"].has_key(source):
484 self.pkg.changes["distribution-version"][source]=dest
485 elif mtype == "map-unreleased":
486 (source, dest) = args[1:3]
487 if self.pkg.changes["distribution"].has_key(source):
488 for arch in self.pkg.changes["architecture"].keys():
489 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
490 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
491 del self.pkg.changes["distribution"][source]
492 self.pkg.changes["distribution"][dest] = 1
494 elif mtype == "ignore":
496 if self.pkg.changes["distribution"].has_key(suite):
497 del self.pkg.changes["distribution"][suite]
498 self.warnings.append("Ignoring %s as a target suite." % (suite))
499 elif mtype == "reject":
501 if self.pkg.changes["distribution"].has_key(suite):
502 self.rejects.append("Uploads to %s are not accepted." % (suite))
503 elif mtype == "propup-version":
504 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
506 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
507 if self.pkg.changes["distribution"].has_key(args[1]):
508 self.pkg.changes.setdefault("distribution-version", {})
509 for suite in args[2:]:
510 self.pkg.changes["distribution-version"][suite] = suite
512 # Ensure there is (still) a target distribution
513 if len(self.pkg.changes["distribution"].keys()) < 1:
514 self.rejects.append("No valid distribution remaining.")
516 # Ensure target distributions exist
517 for suite in self.pkg.changes["distribution"].keys():
518 if not Cnf.has_key("Suite::%s" % (suite)):
519 self.rejects.append("Unknown distribution `%s'." % (suite))
521 ###########################################################################
523 def binary_file_checks(self, f, session):
525 entry = self.pkg.files[f]
527 # Extract package control information
528 deb_file = utils.open_file(f)
530 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
532 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
534 # Can't continue, none of the checks on control would work.
537 # Check for mandantory "Description:"
540 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
542 self.rejects.append("%s: Missing Description in binary package" % (f))
547 # Check for mandatory fields
548 for field in [ "Package", "Architecture", "Version" ]:
549 if control.Find(field) == None:
551 self.rejects.append("%s: No %s field in control." % (f, field))
554 # Ensure the package name matches the one give in the .changes
555 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
556 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
558 # Validate the package field
559 package = control.Find("Package")
560 if not re_valid_pkg_name.match(package):
561 self.rejects.append("%s: invalid package name '%s'." % (f, package))
563 # Validate the version field
564 version = control.Find("Version")
565 if not re_valid_version.match(version):
566 self.rejects.append("%s: invalid version number '%s'." % (f, version))
568 # Ensure the architecture of the .deb is one we know about.
569 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
570 architecture = control.Find("Architecture")
571 upload_suite = self.pkg.changes["distribution"].keys()[0]
573 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
574 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
575 self.rejects.append("Unknown architecture '%s'." % (architecture))
577 # Ensure the architecture of the .deb is one of the ones
578 # listed in the .changes.
579 if not self.pkg.changes["architecture"].has_key(architecture):
580 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
582 # Sanity-check the Depends field
583 depends = control.Find("Depends")
585 self.rejects.append("%s: Depends field is empty." % (f))
587 # Sanity-check the Provides field
588 provides = control.Find("Provides")
590 provide = re_spacestrip.sub('', provides)
592 self.rejects.append("%s: Provides field is empty." % (f))
593 prov_list = provide.split(",")
594 for prov in prov_list:
595 if not re_valid_pkg_name.match(prov):
596 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
598 # Check the section & priority match those given in the .changes (non-fatal)
599 if control.Find("Section") and entry["section"] != "" \
600 and entry["section"] != control.Find("Section"):
601 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
602 (f, control.Find("Section", ""), entry["section"]))
603 if control.Find("Priority") and entry["priority"] != "" \
604 and entry["priority"] != control.Find("Priority"):
605 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
606 (f, control.Find("Priority", ""), entry["priority"]))
608 entry["package"] = package
609 entry["architecture"] = architecture
610 entry["version"] = version
611 entry["maintainer"] = control.Find("Maintainer", "")
613 if f.endswith(".udeb"):
614 self.pkg.files[f]["dbtype"] = "udeb"
615 elif f.endswith(".deb"):
616 self.pkg.files[f]["dbtype"] = "deb"
618 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
620 entry["source"] = control.Find("Source", entry["package"])
622 # Get the source version
623 source = entry["source"]
626 if source.find("(") != -1:
627 m = re_extract_src_version.match(source)
629 source_version = m.group(2)
631 if not source_version:
632 source_version = self.pkg.files[f]["version"]
634 entry["source package"] = source
635 entry["source version"] = source_version
637 # Ensure the filename matches the contents of the .deb
638 m = re_isadeb.match(f)
641 file_package = m.group(1)
642 if entry["package"] != file_package:
643 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
644 (f, file_package, entry["dbtype"], entry["package"]))
645 epochless_version = re_no_epoch.sub('', control.Find("Version"))
648 file_version = m.group(2)
649 if epochless_version != file_version:
650 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
651 (f, file_version, entry["dbtype"], epochless_version))
654 file_architecture = m.group(3)
655 if entry["architecture"] != file_architecture:
656 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
657 (f, file_architecture, entry["dbtype"], entry["architecture"]))
659 # Check for existent source
660 source_version = entry["source version"]
661 source_package = entry["source package"]
662 if self.pkg.changes["architecture"].has_key("source"):
663 if source_version != self.pkg.changes["version"]:
664 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
665 (source_version, f, self.pkg.changes["version"]))
667 # Check in the SQL database
668 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
669 # Check in one of the other directories
670 source_epochless_version = re_no_epoch.sub('', source_version)
671 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
672 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
674 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
677 dsc_file_exists = False
678 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
679 if cnf.has_key("Dir::Queue::%s" % (myq)):
680 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
681 dsc_file_exists = True
684 if not dsc_file_exists:
685 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
687 # Check the version and for file overwrites
688 self.check_binary_against_db(f, session)
690 # Temporarily disable contents generation until we change the table storage layout
693 #if len(b.rejects) > 0:
694 # for j in b.rejects:
695 # self.rejects.append(j)
697 def source_file_checks(self, f, session):
698 entry = self.pkg.files[f]
700 m = re_issource.match(f)
704 entry["package"] = m.group(1)
705 entry["version"] = m.group(2)
706 entry["type"] = m.group(3)
708 # Ensure the source package name matches the Source filed in the .changes
709 if self.pkg.changes["source"] != entry["package"]:
710 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
712 # Ensure the source version matches the version in the .changes file
713 if re_is_orig_source.match(f):
714 changes_version = self.pkg.changes["chopversion2"]
716 changes_version = self.pkg.changes["chopversion"]
718 if changes_version != entry["version"]:
719 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
721 # Ensure the .changes lists source in the Architecture field
722 if not self.pkg.changes["architecture"].has_key("source"):
723 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
725 # Check the signature of a .dsc file
726 if entry["type"] == "dsc":
727 # check_signature returns either:
728 # (None, [list, of, rejects]) or (signature, [])
729 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
731 self.rejects.append(j)
733 entry["architecture"] = "source"
735 def per_suite_file_checks(self, f, suite, session):
737 entry = self.pkg.files[f]
738 archive = utils.where_am_i()
741 if entry.has_key("byhand"):
744 # Check we have fields we need to do these checks
746 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
747 if not entry.has_key(m):
748 self.rejects.append("file '%s' does not have field %s set" % (f, m))
754 # Handle component mappings
755 for m in cnf.ValueList("ComponentMappings"):
756 (source, dest) = m.split()
757 if entry["component"] == source:
758 entry["original component"] = source
759 entry["component"] = dest
761 # Ensure the component is valid for the target suite
762 if cnf.has_key("Suite:%s::Components" % (suite)) and \
763 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
764 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
767 # Validate the component
768 if not get_component(entry["component"], session):
769 self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
772 # See if the package is NEW
773 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
776 # Validate the priority
777 if entry["priority"].find('/') != -1:
778 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
780 # Determine the location
781 location = cnf["Dir::Pool"]
782 l = get_location(location, entry["component"], archive, session)
784 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
785 entry["location id"] = -1
787 entry["location id"] = l.location_id
789 # Check the md5sum & size against existing files (if any)
790 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
792 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
793 entry["size"], entry["md5sum"], entry["location id"])
796 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
797 elif found is False and poolfile is not None:
798 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
801 entry["files id"] = None
803 entry["files id"] = poolfile.file_id
805 # Check for packages that have moved from one component to another
806 entry['suite'] = suite
807 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
809 entry["othercomponents"] = res.fetchone()[0]
811 def check_files(self, action=True):
812 archive = utils.where_am_i()
813 file_keys = self.pkg.files.keys()
817 # XXX: As far as I can tell, this can no longer happen - see
818 # comments by AJ in old revisions - mhy
819 # if reprocess is 2 we've already done this and we're checking
820 # things again for the new .orig.tar.gz.
821 # [Yes, I'm fully aware of how disgusting this is]
822 if action and self.reprocess < 2:
824 os.chdir(self.pkg.directory)
826 ret = holding.copy_to_holding(f)
828 # XXX: Should we bail out here or try and continue?
829 self.rejects.append(ret)
833 # Check there isn't already a .changes or .dak file of the same name in
834 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
835 # [NB: this check must be done post-suite mapping]
836 base_filename = os.path.basename(self.pkg.changes_file)
837 dot_dak_filename = base_filename[:-8] + ".dak"
839 for suite in self.pkg.changes["distribution"].keys():
840 copychanges = "Suite::%s::CopyChanges" % (suite)
841 if cnf.has_key(copychanges) and \
842 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
843 self.rejects.append("%s: a file with this name already exists in %s" \
844 % (base_filename, cnf[copychanges]))
846 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
847 if cnf.has_key(copy_dot_dak) and \
848 os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
849 self.rejects.append("%s: a file with this name already exists in %s" \
850 % (dot_dak_filename, Cnf[copy_dot_dak]))
856 session = DBConn().session()
858 for f, entry in self.pkg.files.items():
859 # Ensure the file does not already exist in one of the accepted directories
860 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
861 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
862 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
863 self.rejects.append("%s file already exists in the %s directory." % (f, d))
865 if not re_taint_free.match(f):
866 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
868 # Check the file is readable
869 if os.access(f, os.R_OK) == 0:
870 # When running in -n, copy_to_holding() won't have
871 # generated the reject_message, so we need to.
873 if os.path.exists(f):
874 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
876 self.rejects.append("Can't read `%s'. [file not found]" % (f))
877 entry["type"] = "unreadable"
880 # If it's byhand skip remaining checks
881 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
883 entry["type"] = "byhand"
885 # Checks for a binary package...
886 elif re_isadeb.match(f):
888 entry["type"] = "deb"
890 # This routine appends to self.rejects/warnings as appropriate
891 self.binary_file_checks(f, session)
893 # Checks for a source package...
894 elif re_issource.match(f):
897 # This routine appends to self.rejects/warnings as appropriate
898 self.source_file_checks(f, session)
900 # Not a binary or source package? Assume byhand...
903 entry["type"] = "byhand"
905 # Per-suite file checks
906 entry["oldfiles"] = {}
907 for suite in self.pkg.changes["distribution"].keys():
908 self.per_suite_file_checks(f, suite, session)
912 # If the .changes file says it has source, it must have source.
913 if self.pkg.changes["architecture"].has_key("source"):
915 self.rejects.append("no source found and Architecture line in changes mention source.")
917 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
918 self.rejects.append("source only uploads are not supported.")
920 ###########################################################################
921 def check_dsc(self, action=True, session=None):
922 """Returns bool indicating whether or not the source changes are valid"""
923 # Ensure there is source to check
924 if not self.pkg.changes["architecture"].has_key("source"):
929 for f, entry in self.pkg.files.items():
930 if entry["type"] == "dsc":
932 self.rejects.append("can not process a .changes file with multiple .dsc's.")
937 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
939 self.rejects.append("source uploads must contain a dsc file")
942 # Parse the .dsc file
944 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
945 except CantOpenError:
946 # if not -n copy_to_holding() will have done this for us...
948 self.rejects.append("%s: can't read file." % (dsc_filename))
949 except ParseChangesError, line:
950 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
951 except InvalidDscError, line:
952 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
953 except ChangesUnicodeError:
954 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
956 # Build up the file list of files mentioned by the .dsc
958 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
959 except NoFilesFieldError:
960 self.rejects.append("%s: no Files: field." % (dsc_filename))
962 except UnknownFormatError, format:
963 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
965 except ParseChangesError, line:
966 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
969 # Enforce mandatory fields
970 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
971 if not self.pkg.dsc.has_key(i):
972 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
975 # Validate the source and version fields
976 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
977 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
978 if not re_valid_version.match(self.pkg.dsc["version"]):
979 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
981 # Only a limited list of source formats are allowed in each suite
982 for dist in self.pkg.changes["distribution"].keys():
983 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
984 if self.pkg.dsc["format"] not in allowed:
985 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
987 # Validate the Maintainer field
989 # We ignore the return value
990 fix_maintainer(self.pkg.dsc["maintainer"])
991 except ParseMaintError, msg:
992 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
993 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
995 # Validate the build-depends field(s)
996 for field_name in [ "build-depends", "build-depends-indep" ]:
997 field = self.pkg.dsc.get(field_name)
999 # Have apt try to parse them...
1001 apt_pkg.ParseSrcDepends(field)
1003 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1005 # Ensure the version number in the .dsc matches the version number in the .changes
1006 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1007 changes_version = self.pkg.files[dsc_filename]["version"]
1009 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1010 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1012 # Ensure the Files field contain only what's expected
1013 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1015 # Ensure source is newer than existing source in target suites
1016 session = DBConn().session()
1017 self.check_source_against_db(dsc_filename, session)
1018 self.check_dsc_against_db(dsc_filename, session)
1023 ###########################################################################
1025 def get_changelog_versions(self, source_dir):
1026 """Extracts a the source package and (optionally) grabs the
1027 version history out of debian/changelog for the BTS."""
1031 # Find the .dsc (again)
1033 for f in self.pkg.files.keys():
1034 if self.pkg.files[f]["type"] == "dsc":
1037 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1038 if not dsc_filename:
1041 # Create a symlink mirror of the source files in our temporary directory
1042 for f in self.pkg.files.keys():
1043 m = re_issource.match(f)
1045 src = os.path.join(source_dir, f)
1046 # If a file is missing for whatever reason, give up.
1047 if not os.path.exists(src):
1050 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1051 self.pkg.orig_files[f].has_key("path"):
1053 dest = os.path.join(os.getcwd(), f)
1054 os.symlink(src, dest)
1056 # If the orig files are not a part of the upload, create symlinks to the
1058 for orig_file in self.pkg.orig_files.keys():
1059 if not self.pkg.orig_files[orig_file].has_key("path"):
1061 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1062 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1064 # Extract the source
1065 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1066 (result, output) = commands.getstatusoutput(cmd)
1068 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1069 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1072 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1075 # Get the upstream version
1076 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1077 if re_strip_revision.search(upstr_version):
1078 upstr_version = re_strip_revision.sub('', upstr_version)
1080 # Ensure the changelog file exists
1081 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1082 if not os.path.exists(changelog_filename):
1083 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1086 # Parse the changelog
1087 self.pkg.dsc["bts changelog"] = ""
1088 changelog_file = utils.open_file(changelog_filename)
1089 for line in changelog_file.readlines():
1090 m = re_changelog_versions.match(line)
1092 self.pkg.dsc["bts changelog"] += line
1093 changelog_file.close()
1095 # Check we found at least one revision in the changelog
1096 if not self.pkg.dsc["bts changelog"]:
1097 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1099 def check_source(self):
1100 # XXX: I'm fairly sure reprocess == 2 can never happen
1101 # AJT disabled the is_incoming check years ago - mhy
1102 # We should probably scrap or rethink the whole reprocess thing
1104 # a) there's no source
1105 # or b) reprocess is 2 - we will do this check next time when orig
1106 # tarball is in 'files'
1107 # or c) the orig files are MIA
1108 if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1109 or len(self.pkg.orig_files) == 0:
1112 tmpdir = utils.temp_dirname()
1114 # Move into the temporary directory
1118 # Get the changelog version history
1119 self.get_changelog_versions(cwd)
1121 # Move back and cleanup the temporary tree
1125 shutil.rmtree(tmpdir)
1127 if e.errno != errno.EACCES:
1129 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1131 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1132 # We probably have u-r or u-w directories so chmod everything
1134 cmd = "chmod -R u+rwx %s" % (tmpdir)
1135 result = os.system(cmd)
1137 utils.fubar("'%s' failed with result %s." % (cmd, result))
1138 shutil.rmtree(tmpdir)
1139 except Exception, e:
1140 print "foobar2 (%s)" % e
1141 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1143 ###########################################################################
1144 def ensure_hashes(self):
1145 # Make sure we recognise the format of the Files: field in the .changes
1146 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1147 if len(format) == 2:
1148 format = int(format[0]), int(format[1])
1150 format = int(float(format[0])), 0
1152 # We need to deal with the original changes blob, as the fields we need
1153 # might not be in the changes dict serialised into the .dak anymore.
1154 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1156 # Copy the checksums over to the current changes dict. This will keep
1157 # the existing modifications to it intact.
1158 for field in orig_changes:
1159 if field.startswith('checksums-'):
1160 self.pkg.changes[field] = orig_changes[field]
1162 # Check for unsupported hashes
1163 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1164 self.rejects.append(j)
1166 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1167 self.rejects.append(j)
1169 # We have to calculate the hash if we have an earlier changes version than
1170 # the hash appears in rather than require it exist in the changes file
1171 for hashname, hashfunc, version in utils.known_hashes:
1172 # TODO: Move _ensure_changes_hash into this class
1173 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1174 self.rejects.append(j)
1175 if "source" in self.pkg.changes["architecture"]:
1176 # TODO: Move _ensure_dsc_hash into this class
1177 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1178 self.rejects.append(j)
1180 def check_hashes(self):
1181 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1182 self.rejects.append(m)
1184 for m in utils.check_size(".changes", self.pkg.files):
1185 self.rejects.append(m)
1187 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1188 self.rejects.append(m)
1190 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1191 self.rejects.append(m)
1193 self.ensure_hashes()
1195 ###########################################################################
1197 def ensure_orig(self, target_dir='.', session=None):
1199 Ensures that all orig files mentioned in the changes file are present
1200 in target_dir. If they do not exist, they are symlinked into place.
1202 An list containing the symlinks that were created are returned (so they
1209 for filename, entry in self.pkg.dsc_files.iteritems():
1210 if not re_is_orig_source.match(filename):
1211 # File is not an orig; ignore
1214 if os.path.exists(filename):
1215 # File exists, no need to continue
1218 def symlink_if_valid(path):
1219 f = utils.open_file(path)
1220 md5sum = apt_pkg.md5sum(f)
1223 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1224 expected = (int(entry['size']), entry['md5sum'])
1226 if fingerprint != expected:
1229 dest = os.path.join(target_dir, filename)
1231 os.symlink(path, dest)
1232 symlinked.append(dest)
1238 session_ = DBConn().session()
1243 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1244 poolfile_path = os.path.join(
1245 poolfile.location.path, poolfile.filename
1248 if symlink_if_valid(poolfile_path):
1258 # Look in some other queues for the file
1259 queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1260 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1262 for queue in queues:
1263 if not cnf.get('Dir::Queue::%s' % queue):
1266 queuefile_path = os.path.join(
1267 cnf['Dir::Queue::%s' % queue], filename
1270 if not os.path.exists(queuefile_path):
1271 # Does not exist in this queue
1274 if symlink_if_valid(queuefile_path):
1279 ###########################################################################
1281 def check_lintian(self):
1284 # Don't reject binary uploads
1285 if not self.pkg.changes['architecture'].has_key('source'):
1288 # Only check some distributions
1290 for dist in ('unstable', 'experimental'):
1291 if dist in self.pkg.changes['distribution']:
1298 tagfile = cnf.get("Dinstall::LintianTags")
1300 # We don't have a tagfile, so just don't do anything.
1303 # Parse the yaml file
1304 sourcefile = file(tagfile, 'r')
1305 sourcecontent = sourcefile.read()
1308 lintiantags = yaml.load(sourcecontent)['lintian']
1309 except yaml.YAMLError, msg:
1310 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1313 # Try and find all orig mentioned in the .dsc
1314 symlinked = self.ensure_orig()
1316 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1317 # so put it together like it. We put all types of tags in one file and then sort
1318 # through lintians output later to see if its a fatal tag we detected, or not.
1319 # So we only run lintian once on all tags, even if we might reject on some, but not
1321 # Additionally build up a set of tags
1323 (fd, temp_filename) = utils.temp_filename()
1324 temptagfile = os.fdopen(fd, 'w')
1325 for tagtype in lintiantags:
1326 for tag in lintiantags[tagtype]:
1327 temptagfile.write("%s\n" % tag)
1331 # So now we should look at running lintian at the .changes file, capturing output
1333 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1334 (result, output) = commands.getstatusoutput(command)
1336 # We are done with lintian, remove our tempfile and any symlinks we created
1337 os.unlink(temp_filename)
1338 for symlink in symlinked:
1342 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1343 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1345 if len(output) == 0:
1350 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1352 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1353 # are having a victim for a reject.
1354 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1355 for line in output.split('\n'):
1356 m = re_parse_lintian.match(line)
1361 epackage = m.group(2)
1365 # So lets check if we know the tag at all.
1366 if etag not in tags:
1370 # We know it and it is overriden. Check that override is allowed.
1371 if etag in lintiantags['warning']:
1372 # The tag is overriden, and it is allowed to be overriden.
1373 # Don't add a reject message.
1375 elif etag in lintiantags['error']:
1376 # The tag is overriden - but is not allowed to be
1377 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1378 log("ftpmaster does not allow tag to be overridable", etag)
1380 # Tag is known, it is not overriden, direct reject.
1381 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1382 # Now tell if they *might* override it.
1383 if etag in lintiantags['warning']:
1384 log("auto rejecting", "overridable", etag)
1385 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1387 log("auto rejecting", "not overridable", etag)
1389 ###########################################################################
1390 def check_urgency(self):
1392 if self.pkg.changes["architecture"].has_key("source"):
1393 if not self.pkg.changes.has_key("urgency"):
1394 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1395 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1396 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1397 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1398 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1399 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1401 ###########################################################################
1403 # Sanity check the time stamps of files inside debs.
1404 # [Files in the near future cause ugly warnings and extreme time
1405 # travel can cause errors on extraction]
1407 def check_timestamps(self):
1410 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1411 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1412 tar = TarTime(future_cutoff, past_cutoff)
1414 for filename, entry in self.pkg.files.items():
1415 if entry["type"] == "deb":
1418 deb_file = utils.open_file(filename)
1419 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1422 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1423 except SystemError, e:
1424 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1425 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1428 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1432 future_files = tar.future_files.keys()
1434 num_future_files = len(future_files)
1435 future_file = future_files[0]
1436 future_date = tar.future_files[future_file]
1437 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1438 % (filename, num_future_files, future_file, time.ctime(future_date)))
1440 ancient_files = tar.ancient_files.keys()
1442 num_ancient_files = len(ancient_files)
1443 ancient_file = ancient_files[0]
1444 ancient_date = tar.ancient_files[ancient_file]
1445 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1446 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1448 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1450 ###########################################################################
1451 def check_transition(self, session):
1454 sourcepkg = self.pkg.changes["source"]
1456 # No sourceful upload -> no need to do anything else, direct return
1457 # We also work with unstable uploads, not experimental or those going to some
1458 # proposed-updates queue
1459 if "source" not in self.pkg.changes["architecture"] or \
1460 "unstable" not in self.pkg.changes["distribution"]:
1463 # Also only check if there is a file defined (and existant) with
1465 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1466 if transpath == "" or not os.path.exists(transpath):
1469 # Parse the yaml file
1470 sourcefile = file(transpath, 'r')
1471 sourcecontent = sourcefile.read()
1473 transitions = yaml.load(sourcecontent)
1474 except yaml.YAMLError, msg:
1475 # This shouldn't happen, there is a wrapper to edit the file which
1476 # checks it, but we prefer to be safe than ending up rejecting
1478 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1481 # Now look through all defined transitions
1482 for trans in transitions:
1483 t = transitions[trans]
1484 source = t["source"]
1487 # Will be None if nothing is in testing.
1488 current = get_source_in_suite(source, "testing", session)
1489 if current is not None:
1490 compare = apt_pkg.VersionCompare(current.version, expected)
1492 if current is None or compare < 0:
1493 # This is still valid, the current version in testing is older than
1494 # the new version we wait for, or there is none in testing yet
1496 # Check if the source we look at is affected by this.
1497 if sourcepkg in t['packages']:
1498 # The source is affected, lets reject it.
1500 rejectmsg = "%s: part of the %s transition.\n\n" % (
1503 if current is not None:
1504 currentlymsg = "at version %s" % (current.version)
1506 currentlymsg = "not present in testing"
1508 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1510 rejectmsg += "\n".join(textwrap.wrap("""Your package
1511 is part of a testing transition designed to get %s migrated (it is
1512 currently %s, we need version %s). This transition is managed by the
1513 Release Team, and %s is the Release-Team member responsible for it.
1514 Please mail debian-release@lists.debian.org or contact %s directly if you
1515 need further assistance. You might want to upload to experimental until this
1516 transition is done."""
1517 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1519 self.rejects.append(rejectmsg)
1522 ###########################################################################
1523 def check_signed_by_key(self):
1524 """Ensure the .changes is signed by an authorized uploader."""
1525 session = DBConn().session()
1527 self.check_transition(session)
1529 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1531 # match claimed name with actual name:
1533 # This is fundamentally broken but need us to refactor how we get
1534 # the UIDs/Fingerprints in order for us to fix it properly
1535 uid, uid_email = self.pkg.changes["fingerprint"], uid
1536 may_nmu, may_sponsor = 1, 1
1537 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1538 # and can't get one in there if we don't allow nmu/sponsorship
1539 elif is_dm is False:
1540 # If is_dm is False, we allow full upload rights
1541 uid_email = "%s@debian.org" % (uid)
1542 may_nmu, may_sponsor = 1, 1
1544 # Assume limited upload rights unless we've discovered otherwise
1546 may_nmu, may_sponsor = 0, 0
1548 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1550 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1552 if uid_name == "": sponsored = 1
1555 if ("source" in self.pkg.changes["architecture"] and
1556 uid_email and utils.is_email_alias(uid_email)):
1557 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1558 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1559 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1560 self.pkg.changes["sponsoremail"] = uid_email
1562 if sponsored and not may_sponsor:
1563 self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1565 if not sponsored and not may_nmu:
1566 should_reject = True
1567 highest_sid, highest_version = None, None
1569 # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1570 # It ignores higher versions with the dm_upload_allowed flag set to false
1571 # I'm keeping the existing behaviour for now until I've gone back and
1572 # checked exactly what the GR says - mhy
1573 for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1574 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1575 highest_sid = si.source_id
1576 highest_version = si.version
1578 if highest_sid is None:
1579 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1581 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1582 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1583 if email == uid_email or name == uid_name:
1584 should_reject = False
1587 if should_reject is True:
1588 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1590 for b in self.pkg.changes["binary"].keys():
1591 for suite in self.pkg.changes["distribution"].keys():
1592 q = session.query(DBSource)
1593 q = q.join(DBBinary).filter_by(package=b)
1594 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1597 if s.source != self.pkg.changes["source"]:
1598 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1600 for f in self.pkg.files.keys():
1601 if self.pkg.files[f].has_key("byhand"):
1602 self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1603 if self.pkg.files[f].has_key("new"):
1604 self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1608 ###########################################################################
1609 def build_summaries(self):
1610 """ Build a summary of changes the upload introduces. """
1612 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1614 short_summary = summary
1616 # This is for direport's benefit...
1617 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1620 summary += "Changes: " + f
1622 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1624 summary += self.announce(short_summary, 0)
1626 return (summary, short_summary)
1628 ###########################################################################
1630 def close_bugs(self, summary, action):
1632 Send mail to close bugs as instructed by the closes field in the changes file.
1633 Also add a line to summary if any work was done.
1635 @type summary: string
1636 @param summary: summary text, as given by L{build_summaries}
1639 @param action: Set to false no real action will be done.
1642 @return: summary. If action was taken, extended by the list of closed bugs.
1646 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1648 bugs = self.pkg.changes["closes"].keys()
1654 summary += "Closing bugs: "
1656 summary += "%s " % (bug)
1659 self.Subst["__BUG_NUMBER__"] = bug
1660 if self.pkg.changes["distribution"].has_key("stable"):
1661 self.Subst["__STABLE_WARNING__"] = """
1662 Note that this package is not part of the released stable Debian
1663 distribution. It may have dependencies on other unreleased software,
1664 or other instabilities. Please take care if you wish to install it.
1665 The update will eventually make its way into the next released Debian
1668 self.Subst["__STABLE_WARNING__"] = ""
1669 mail_message = utils.TemplateSubst(self.Subst, template)
1670 utils.send_mail(mail_message)
1672 # Clear up after ourselves
1673 del self.Subst["__BUG_NUMBER__"]
1674 del self.Subst["__STABLE_WARNING__"]
1676 if action and self.logger:
1677 self.logger.log(["closing bugs"] + bugs)
1683 ###########################################################################
1685 def announce(self, short_summary, action):
1687 Send an announce mail about a new upload.
1689 @type short_summary: string
1690 @param short_summary: Short summary text to include in the mail
1693 @param action: Set to false no real action will be done.
1696 @return: Textstring about action taken.
1701 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1703 # Only do announcements for source uploads with a recent dpkg-dev installed
1704 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1705 self.pkg.changes["architecture"].has_key("source"):
1711 self.Subst["__SHORT_SUMMARY__"] = short_summary
1713 for dist in self.pkg.changes["distribution"].keys():
1714 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1715 if announce_list == "" or lists_done.has_key(announce_list):
1718 lists_done[announce_list] = 1
1719 summary += "Announcing to %s\n" % (announce_list)
1723 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1724 if cnf.get("Dinstall::TrackingServer") and \
1725 self.pkg.changes["architecture"].has_key("source"):
1726 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1727 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1729 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1730 utils.send_mail(mail_message)
1732 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1734 if cnf.FindB("Dinstall::CloseBugs"):
1735 summary = self.close_bugs(summary, action)
1737 del self.Subst["__SHORT_SUMMARY__"]
1741 ###########################################################################
1743 def accept (self, summary, short_summary, targetdir=None):
1747 This moves all files referenced from the .changes into the I{accepted}
1748 queue, sends the accepted mail, announces to lists, closes bugs and
1749 also checks for override disparities. If enabled it will write out
1750 the version history for the BTS Version Tracking and will finally call
1753 @type summary: string
1754 @param summary: Summary text
1756 @type short_summary: string
1757 @param short_summary: Short summary
1762 stats = SummaryStats()
1764 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1766 if targetdir is None:
1767 targetdir = cnf["Dir::Queue::Accepted"]
1771 self.logger.log(["Accepting changes", self.pkg.changes_file])
1773 self.pkg.write_dot_dak(targetdir)
1775 # Move all the files into the accepted directory
1776 utils.move(self.pkg.changes_file, targetdir)
1778 for name, entry in sorted(self.pkg.files.items()):
1779 utils.move(name, targetdir)
1780 stats.accept_bytes += float(entry["size"])
1782 stats.accept_count += 1
1784 # Send accept mail, announce to lists, close bugs and check for
1785 # override disparities
1786 if not cnf["Dinstall::Options::No-Mail"]:
1788 self.Subst["__SUITE__"] = ""
1789 self.Subst["__SUMMARY__"] = summary
1790 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1791 utils.send_mail(mail_message)
1792 self.announce(short_summary, 1)
1794 ## Helper stuff for DebBugs Version Tracking
1795 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1796 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1797 # the conditionalization on dsc["bts changelog"] should be
1800 # Write out the version history from the changelog
1801 if self.pkg.changes["architecture"].has_key("source") and \
1802 self.pkg.dsc.has_key("bts changelog"):
1804 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1805 version_history = os.fdopen(fd, 'w')
1806 version_history.write(self.pkg.dsc["bts changelog"])
1807 version_history.close()
1808 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1809 self.pkg.changes_file[:-8]+".versions")
1810 os.rename(temp_filename, filename)
1811 os.chmod(filename, 0644)
1813 # Write out the binary -> source mapping.
1814 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1815 debinfo = os.fdopen(fd, 'w')
1816 for name, entry in sorted(self.pkg.files.items()):
1817 if entry["type"] == "deb":
1818 line = " ".join([entry["package"], entry["version"],
1819 entry["architecture"], entry["source package"],
1820 entry["source version"]])
1821 debinfo.write(line+"\n")
1823 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1824 self.pkg.changes_file[:-8]+".debinfo")
1825 os.rename(temp_filename, filename)
1826 os.chmod(filename, 0644)
1828 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1829 # <Ganneff> we do call queue_build too
1830 # <mhy> well yes, we'd have had to if we were inserting into accepted
1831 # <Ganneff> now. thats database only.
1832 # <mhy> urgh, that's going to get messy
1833 # <Ganneff> so i make the p-n call to it *also* using accepted/
1834 # <mhy> but then the packages will be in the queue_build table without the files being there
1835 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1836 # <mhy> ah, good point
1837 # <Ganneff> so it will work out, as unchecked move it over
1838 # <mhy> that's all completely sick
1841 # This routine returns None on success or an error on failure
1842 res = get_or_set_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1847 def check_override(self):
1849 Checks override entries for validity. Mails "Override disparity" warnings,
1850 if that feature is enabled.
1852 Abandons the check if
1853 - override disparity checks are disabled
1854 - mail sending is disabled
1859 # Abandon the check if:
1860 # a) override disparity checks have been disabled
1861 # b) we're not sending mail
1862 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1863 cnf["Dinstall::Options::No-Mail"]:
1866 summary = self.pkg.check_override()
1871 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1874 self.Subst["__SUMMARY__"] = summary
1875 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1876 utils.send_mail(mail_message)
1877 del self.Subst["__SUMMARY__"]
1879 ###########################################################################
1881 def remove(self, dir=None):
1883 Used (for instance) in p-u to remove the package from unchecked
1886 os.chdir(self.pkg.directory)
1890 for f in self.pkg.files.keys():
1892 os.unlink(self.pkg.changes_file)
1894 ###########################################################################
1896 def move_to_dir (self, dest, perms=0660, changesperms=0664):
1898 Move files to dest with certain perms/changesperms
1900 utils.move(self.pkg.changes_file, dest, perms=changesperms)
1901 for f in self.pkg.files.keys():
1902 utils.move(f, dest, perms=perms)
1904 ###########################################################################
1906 def force_reject(self, reject_files):
1908 Forcefully move files from the current directory to the
1909 reject directory. If any file already exists in the reject
1910 directory it will be moved to the morgue to make way for
1914 @param files: file dictionary
1920 for file_entry in reject_files:
1921 # Skip any files which don't exist or which we don't have permission to copy.
1922 if os.access(file_entry, os.R_OK) == 0:
1925 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1928 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1930 # File exists? Let's try and move it to the morgue
1931 if e.errno == errno.EEXIST:
1932 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1934 morgue_file = utils.find_next_free(morgue_file)
1935 except NoFreeFilenameError:
1936 # Something's either gone badly Pete Tong, or
1937 # someone is trying to exploit us.
1938 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1940 utils.move(dest_file, morgue_file, perms=0660)
1942 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1945 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1949 # If we got here, we own the destination file, so we can
1950 # safely overwrite it.
1951 utils.move(file_entry, dest_file, 1, perms=0660)
1954 ###########################################################################
1955 def do_reject (self, manual=0, reject_message="", note=""):
1957 Reject an upload. If called without a reject message or C{manual} is
1958 true, spawn an editor so the user can write one.
1961 @param manual: manual or automated rejection
1963 @type reject_message: string
1964 @param reject_message: A reject message
1969 # If we weren't given a manual rejection message, spawn an
1970 # editor so the user can add one in...
1971 if manual and not reject_message:
1972 (fd, temp_filename) = utils.temp_filename()
1973 temp_file = os.fdopen(fd, 'w')
1976 temp_file.write(line)
1978 editor = os.environ.get("EDITOR","vi")
1980 while answer == 'E':
1981 os.system("%s %s" % (editor, temp_filename))
1982 temp_fh = utils.open_file(temp_filename)
1983 reject_message = "".join(temp_fh.readlines())
1985 print "Reject message:"
1986 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
1987 prompt = "[R]eject, Edit, Abandon, Quit ?"
1989 while prompt.find(answer) == -1:
1990 answer = utils.our_raw_input(prompt)
1991 m = re_default_answer.search(prompt)
1994 answer = answer[:1].upper()
1995 os.unlink(temp_filename)
2001 print "Rejecting.\n"
2005 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2006 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2008 # Move all the files into the reject directory
2009 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2010 self.force_reject(reject_files)
2012 # If we fail here someone is probably trying to exploit the race
2013 # so let's just raise an exception ...
2014 if os.path.exists(reason_filename):
2015 os.unlink(reason_filename)
2016 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2018 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2022 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2023 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2024 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2025 os.write(reason_fd, reject_message)
2026 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2028 # Build up the rejection email
2029 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2030 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2031 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2032 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2033 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2034 # Write the rejection email out as the <foo>.reason file
2035 os.write(reason_fd, reject_mail_message)
2037 del self.Subst["__REJECTOR_ADDRESS__"]
2038 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2039 del self.Subst["__CC__"]
2043 # Send the rejection mail if appropriate
2044 if not cnf["Dinstall::Options::No-Mail"]:
2045 utils.send_mail(reject_mail_message)
2048 self.logger.log(["rejected", self.pkg.changes_file])
2052 ################################################################################
2053 def in_override_p(self, package, component, suite, binary_type, file, session):
2055 Check if a package already has override entries in the DB
2057 @type package: string
2058 @param package: package name
2060 @type component: string
2061 @param component: database id of the component
2064 @param suite: database id of the suite
2066 @type binary_type: string
2067 @param binary_type: type of the package
2070 @param file: filename we check
2072 @return: the database result. But noone cares anyway.
2078 if binary_type == "": # must be source
2081 file_type = binary_type
2083 # Override suite name; used for example with proposed-updates
2084 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2085 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2087 result = get_override(package, suite, component, file_type, session)
2089 # If checking for a source package fall back on the binary override type
2090 if file_type == "dsc" and len(result) < 1:
2091 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2093 # Remember the section and priority so we can check them later if appropriate
2096 self.pkg.files[file]["override section"] = result.section.section
2097 self.pkg.files[file]["override priority"] = result.priority.priority
2102 ################################################################################
2103 def get_anyversion(self, sv_list, suite):
2106 @param sv_list: list of (suite, version) tuples to check
2109 @param suite: suite name
2115 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2116 for (s, v) in sv_list:
2117 if s in [ x.lower() for x in anysuite ]:
2118 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2123 ################################################################################
2125 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
2128 @param sv_list: list of (suite, version) tuples to check
2133 @type new_version: string
2134 @param new_version: XXX
2136 Ensure versions are newer than existing packages in target
2137 suites and that cross-suite version checking rules as
2138 set out in the conf file are satisfied.
2143 # Check versions for each target suite
2144 for target_suite in self.pkg.changes["distribution"].keys():
2145 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2146 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2148 # Enforce "must be newer than target suite" even if conffile omits it
2149 if target_suite not in must_be_newer_than:
2150 must_be_newer_than.append(target_suite)
2152 for (suite, existent_version) in sv_list:
2153 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2155 if suite in must_be_newer_than and sourceful and vercmp < 1:
2156 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2158 if suite in must_be_older_than and vercmp > -1:
2161 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2162 # we really use the other suite, ignoring the conflicting one ...
2163 addsuite = self.pkg.changes["distribution-version"][suite]
2165 add_version = self.get_anyversion(sv_list, addsuite)
2166 target_version = self.get_anyversion(sv_list, target_suite)
2169 # not add_version can only happen if we map to a suite
2170 # that doesn't enhance the suite we're propup'ing from.
2171 # so "propup-ver x a b c; map a d" is a problem only if
2172 # d doesn't enhance a.
2174 # i think we could always propagate in this case, rather
2175 # than complaining. either way, this isn't a REJECT issue
2177 # And - we really should complain to the dorks who configured dak
2178 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2179 self.pkg.changes.setdefault("propdistribution", {})
2180 self.pkg.changes["propdistribution"][addsuite] = 1
2182 elif not target_version:
2183 # not targets_version is true when the package is NEW
2184 # we could just stick with the "...old version..." REJECT
2185 # for this, I think.
2186 self.rejects.append("Won't propogate NEW packages.")
2187 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2188 # propogation would be redundant. no need to reject though.
2189 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2191 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2192 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2194 self.warnings.append("Propogating upload to %s" % (addsuite))
2195 self.pkg.changes.setdefault("propdistribution", {})
2196 self.pkg.changes["propdistribution"][addsuite] = 1
2200 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2202 ################################################################################
2203 def check_binary_against_db(self, file, session):
2204 # Ensure version is sane
2205 q = session.query(BinAssociation)
2206 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
2207 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
2209 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2210 file, self.pkg.files[file]["version"], sourceful=False)
2212 # Check for any existing copies of the file
2213 q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
2214 q = q.filter_by(version=self.pkg.files[file]["version"])
2215 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
2218 self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
2220 ################################################################################
2222 def check_source_against_db(self, file, session):
2225 source = self.pkg.dsc.get("source")
2226 version = self.pkg.dsc.get("version")
2228 # Ensure version is sane
2229 q = session.query(SrcAssociation)
2230 q = q.join(DBSource).filter(DBSource.source==source)
2232 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2233 file, version, sourceful=True)
2235 ################################################################################
2236 def check_dsc_against_db(self, file, session):
2239 @warning: NB: this function can remove entries from the 'files' index [if
2240 the orig tarball is a duplicate of the one in the archive]; if
2241 you're iterating over 'files' and call this function as part of
2242 the loop, be sure to add a check to the top of the loop to
2243 ensure you haven't just tried to dereference the deleted entry.
2248 self.pkg.orig_files = {} # XXX: do we need to clear it?
2249 orig_files = self.pkg.orig_files
2251 # Try and find all files mentioned in the .dsc. This has
2252 # to work harder to cope with the multiple possible
2253 # locations of an .orig.tar.gz.
2254 # The ordering on the select is needed to pick the newest orig
2255 # when it exists in multiple places.
2256 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2258 if self.pkg.files.has_key(dsc_name):
2259 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2260 actual_size = int(self.pkg.files[dsc_name]["size"])
2261 found = "%s in incoming" % (dsc_name)
2263 # Check the file does not already exist in the archive
2264 ql = get_poolfile_like_name(dsc_name, session)
2266 # Strip out anything that isn't '%s' or '/%s$'
2268 if not i.filename.endswith(dsc_name):
2271 # "[dak] has not broken them. [dak] has fixed a
2272 # brokenness. Your crappy hack exploited a bug in
2275 # "(Come on! I thought it was always obvious that
2276 # one just doesn't release different files with
2277 # the same name and version.)"
2278 # -- ajk@ on d-devel@l.d.o
2281 # Ignore exact matches for .orig.tar.gz
2283 if re_is_orig_source.match(dsc_name):
2285 if self.pkg.files.has_key(dsc_name) and \
2286 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2287 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2288 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2289 # TODO: Don't delete the entry, just mark it as not needed
2290 # This would fix the stupidity of changing something we often iterate over
2291 # whilst we're doing it
2292 del self.pkg.files[dsc_name]
2293 if not orig_files.has_key(dsc_name):
2294 orig_files[dsc_name] = {}
2295 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2299 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2301 elif re_is_orig_source.match(dsc_name):
2303 ql = get_poolfile_like_name(dsc_name, session)
2305 # Strip out anything that isn't '%s' or '/%s$'
2306 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2308 if not i.filename.endswith(dsc_name):
2312 # Unfortunately, we may get more than one match here if,
2313 # for example, the package was in potato but had an -sa
2314 # upload in woody. So we need to choose the right one.
2316 # default to something sane in case we don't match any or have only one
2321 old_file = os.path.join(i.location.path, i.filename)
2322 old_file_fh = utils.open_file(old_file)
2323 actual_md5 = apt_pkg.md5sum(old_file_fh)
2325 actual_size = os.stat(old_file)[stat.ST_SIZE]
2326 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2329 old_file = os.path.join(i.location.path, i.filename)
2330 old_file_fh = utils.open_file(old_file)
2331 actual_md5 = apt_pkg.md5sum(old_file_fh)
2333 actual_size = os.stat(old_file)[stat.ST_SIZE]
2335 suite_type = x.location.archive_type
2336 # need this for updating dsc_files in install()
2337 dsc_entry["files id"] = x.file_id
2338 # See install() in process-accepted...
2339 if not orig_files.has_key(dsc_name):
2340 orig_files[dsc_name] = {}
2341 orig_files[dsc_name]["id"] = x.file_id
2342 orig_files[dsc_name]["path"] = old_file
2343 orig_files[dsc_name]["location"] = x.location.location_id
2345 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2346 # Not there? Check the queue directories...
2347 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2348 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2350 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2351 if os.path.exists(in_otherdir):
2352 in_otherdir_fh = utils.open_file(in_otherdir)
2353 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2354 in_otherdir_fh.close()
2355 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2357 if not orig_files.has_key(dsc_name):
2358 orig_files[dsc_name] = {}
2359 orig_files[dsc_name]["path"] = in_otherdir
2362 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2365 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2367 if actual_md5 != dsc_entry["md5sum"]:
2368 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2369 if actual_size != int(dsc_entry["size"]):
2370 self.rejects.append("size for %s doesn't match %s." % (found, file))
2372 ################################################################################
2373 def accepted_checks(self, overwrite_checks, session):
2374 # Recheck anything that relies on the database; since that's not
2375 # frozen between accept and our run time when called from p-a.
2377 # overwrite_checks is set to False when installing to stable/oldstable
2382 # Find the .dsc (again)
2384 for f in self.pkg.files.keys():
2385 if self.pkg.files[f]["type"] == "dsc":
2388 for checkfile in self.pkg.files.keys():
2389 # The .orig.tar.gz can disappear out from under us is it's a
2390 # duplicate of one in the archive.
2391 if not self.pkg.files.has_key(checkfile):
2394 entry = self.pkg.files[checkfile]
2396 # Check that the source still exists
2397 if entry["type"] == "deb":
2398 source_version = entry["source version"]
2399 source_package = entry["source package"]
2400 if not self.pkg.changes["architecture"].has_key("source") \
2401 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2402 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2404 # Version and file overwrite checks
2405 if overwrite_checks:
2406 if entry["type"] == "deb":
2407 self.check_binary_against_db(checkfile, session)
2408 elif entry["type"] == "dsc":
2409 self.check_source_against_db(checkfile, session)
2410 self.check_dsc_against_db(dsc_filename, session)
2412 # propogate in the case it is in the override tables:
2413 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2414 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2415 propogate[suite] = 1
2417 nopropogate[suite] = 1
2419 for suite in propogate.keys():
2420 if suite in nopropogate:
2422 self.pkg.changes["distribution"][suite] = 1
2424 for checkfile in self.pkg.files.keys():
2425 # Check the package is still in the override tables
2426 for suite in self.pkg.changes["distribution"].keys():
2427 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2428 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2430 ################################################################################
2431 # This is not really a reject, but an unaccept, but since a) the code for
2432 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2433 # extremely rare, for now we'll go with whining at our admin folks...
2435 def do_unaccept(self):
2439 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2440 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2441 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2442 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2443 if cnf.has_key("Dinstall::Bcc"):
2444 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2446 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2448 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2450 # Write the rejection email out as the <foo>.reason file
2451 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2452 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2454 # If we fail here someone is probably trying to exploit the race
2455 # so let's just raise an exception ...
2456 if os.path.exists(reject_filename):
2457 os.unlink(reject_filename)
2459 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2460 os.write(fd, reject_mail_message)
2463 utils.send_mail(reject_mail_message)
2465 del self.Subst["__REJECTOR_ADDRESS__"]
2466 del self.Subst["__REJECT_MESSAGE__"]
2467 del self.Subst["__CC__"]
2469 ################################################################################
2470 # If any file of an upload has a recent mtime then chances are good
2471 # the file is still being uploaded.
2473 def upload_too_new(self):
2476 # Move back to the original directory to get accurate time stamps
2478 os.chdir(self.pkg.directory)
2479 file_list = self.pkg.files.keys()
2480 file_list.extend(self.pkg.dsc_files.keys())
2481 file_list.append(self.pkg.changes_file)
2484 last_modified = time.time()-os.path.getmtime(f)
2485 if last_modified < int(cnf["Dinstall::SkipTime"]):