5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
52 from summarystats import SummaryStats
53 from utils import parse_changes, check_dsc_files
54 from textutils import fix_maintainer
55 from binary import Binary
57 ###############################################################################
59 def get_type(f, session):
61 Get the file type of C{f}
64 @param f: file entry from Changes object
66 @type session: SQLA Session
67 @param session: SQL Alchemy session object
74 if f.has_key("dbtype"):
75 file_type = f["dbtype"]
76 elif re_source_ext.match(f["type"]):
79 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
81 # Validate the override type
82 type_id = get_override_type(file_type, session)
84 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
88 ################################################################################
90 # Determine what parts in a .changes are NEW
92 def determine_new(changes, files, warn=1):
94 Determine what parts in a C{changes} file are NEW.
96 @type changes: Upload.Pkg.changes dict
97 @param changes: Changes dictionary
99 @type files: Upload.Pkg.files dict
100 @param files: Files dictionary
103 @param warn: Warn if overrides are added for (old)stable
106 @return: dictionary of NEW components.
111 session = DBConn().session()
113 # Build up a list of potentially new things
114 for name, f in files.items():
115 # Skip byhand elements
116 if f["type"] == "byhand":
119 priority = f["priority"]
120 section = f["section"]
121 file_type = get_type(f, session)
122 component = f["component"]
124 if file_type == "dsc":
127 if not new.has_key(pkg):
129 new[pkg]["priority"] = priority
130 new[pkg]["section"] = section
131 new[pkg]["type"] = file_type
132 new[pkg]["component"] = component
133 new[pkg]["files"] = []
135 old_type = new[pkg]["type"]
136 if old_type != file_type:
137 # source gets trumped by deb or udeb
138 if old_type == "dsc":
139 new[pkg]["priority"] = priority
140 new[pkg]["section"] = section
141 new[pkg]["type"] = file_type
142 new[pkg]["component"] = component
144 new[pkg]["files"].append(name)
146 if f.has_key("othercomponents"):
147 new[pkg]["othercomponents"] = f["othercomponents"]
149 for suite in changes["suite"].keys():
150 for pkg in new.keys():
151 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
153 for file_entry in new[pkg]["files"]:
154 if files[file_entry].has_key("new"):
155 del files[file_entry]["new"]
159 for s in ['stable', 'oldstable']:
160 if changes["suite"].has_key(s):
161 print "WARNING: overrides will be added for %s!" % s
162 for pkg in new.keys():
163 if new[pkg].has_key("othercomponents"):
164 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
170 ################################################################################
172 def check_valid(new):
174 Check if section and priority for NEW packages exist in database.
175 Additionally does sanity checks:
176 - debian-installer packages have to be udeb (or source)
177 - non debian-installer packages can not be udeb
178 - source priority can only be assigned to dsc file types
181 @param new: Dict of new packages with their section, priority and type.
184 for pkg in new.keys():
185 section_name = new[pkg]["section"]
186 priority_name = new[pkg]["priority"]
187 file_type = new[pkg]["type"]
189 section = get_section(section_name)
191 new[pkg]["section id"] = -1
193 new[pkg]["section id"] = section.section_id
195 priority = get_priority(priority_name)
197 new[pkg]["priority id"] = -1
199 new[pkg]["priority id"] = priority.priority_id
202 di = section_name.find("debian-installer") != -1
204 # If d-i, we must be udeb and vice-versa
205 if (di and file_type not in ("udeb", "dsc")) or \
206 (not di and file_type == "udeb"):
207 new[pkg]["section id"] = -1
209 # If dsc we need to be source and vice-versa
210 if (priority == "source" and file_type != "dsc") or \
211 (priority != "source" and file_type == "dsc"):
212 new[pkg]["priority id"] = -1
214 ###############################################################################
216 def lookup_uid_from_fingerprint(fpr, session):
219 # This is a stupid default, but see the comments below
222 user = get_uid_from_fingerprint(fpr, session)
226 if user.name is None:
231 # Check the relevant fingerprint (which we have to have)
232 for f in user.fingerprint:
233 if f.fingerprint == fpr:
234 is_dm = f.keyring.debian_maintainer
237 return (uid, uid_name, is_dm)
239 ###############################################################################
241 # Used by Upload.check_timestamps
242 class TarTime(object):
243 def __init__(self, future_cutoff, past_cutoff):
245 self.future_cutoff = future_cutoff
246 self.past_cutoff = past_cutoff
249 self.future_files = {}
250 self.ancient_files = {}
252 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
253 if MTime > self.future_cutoff:
254 self.future_files[Name] = MTime
255 if MTime < self.past_cutoff:
256 self.ancient_files[Name] = MTime
258 ###############################################################################
260 class Upload(object):
262 Everything that has to do with an upload processed.
270 ###########################################################################
273 """ Reset a number of internal variables."""
275 # Initialize the substitution template map
278 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
279 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
280 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
281 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
289 def package_info(self):
291 Format various messages from this Upload to send to the maintainer.
295 ('Reject Reasons', self.rejects),
296 ('Warnings', self.warnings),
297 ('Notes', self.notes),
301 for title, messages in reasons:
303 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
307 ###########################################################################
308 def update_subst(self):
309 """ Set up the per-package template substitution mappings """
313 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
314 if not self.pkg.changes.has_key("architecture") or not \
315 isinstance(self.pkg.changes["architecture"], DictType):
316 self.pkg.changes["architecture"] = { "Unknown" : "" }
318 # and maintainer2047 may not exist.
319 if not self.pkg.changes.has_key("maintainer2047"):
320 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
322 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
323 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
324 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
326 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
327 if self.pkg.changes["architecture"].has_key("source") and \
328 self.pkg.changes["changedby822"] != "" and \
329 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
331 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
332 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
333 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
335 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
336 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
337 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
339 if "sponsoremail" in self.pkg.changes:
340 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
342 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
343 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
345 # Apply any global override of the Maintainer field
346 if cnf.get("Dinstall::OverrideMaintainer"):
347 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
348 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
350 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
351 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
352 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
354 ###########################################################################
355 def load_changes(self, filename):
358 @rvalue: whether the changes file was valid or not. We may want to
359 reject even if this is True (see what gets put in self.rejects).
360 This is simply to prevent us even trying things later which will
361 fail because we couldn't properly parse the file.
364 self.pkg.changes_file = filename
366 # Parse the .changes field into a dictionary
368 self.pkg.changes.update(parse_changes(filename))
369 except CantOpenError:
370 self.rejects.append("%s: can't read file." % (filename))
372 except ParseChangesError, line:
373 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
375 except ChangesUnicodeError:
376 self.rejects.append("%s: changes file not proper utf-8" % (filename))
379 # Parse the Files field from the .changes into another dictionary
381 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
382 except ParseChangesError, line:
383 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
385 except UnknownFormatError, format:
386 self.rejects.append("%s: unknown format '%s'." % (filename, format))
389 # Check for mandatory fields
390 for i in ("distribution", "source", "binary", "architecture",
391 "version", "maintainer", "files", "changes", "description"):
392 if not self.pkg.changes.has_key(i):
393 # Avoid undefined errors later
394 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
397 # Strip a source version in brackets from the source field
398 if re_strip_srcver.search(self.pkg.changes["source"]):
399 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
401 # Ensure the source field is a valid package name.
402 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
403 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
405 # Split multi-value fields into a lower-level dictionary
406 for i in ("architecture", "distribution", "binary", "closes"):
407 o = self.pkg.changes.get(i, "")
409 del self.pkg.changes[i]
411 self.pkg.changes[i] = {}
414 self.pkg.changes[i][j] = 1
416 # Fix the Maintainer: field to be RFC822/2047 compatible
418 (self.pkg.changes["maintainer822"],
419 self.pkg.changes["maintainer2047"],
420 self.pkg.changes["maintainername"],
421 self.pkg.changes["maintaineremail"]) = \
422 fix_maintainer (self.pkg.changes["maintainer"])
423 except ParseMaintError, msg:
424 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
425 % (filename, changes["maintainer"], msg))
427 # ...likewise for the Changed-By: field if it exists.
429 (self.pkg.changes["changedby822"],
430 self.pkg.changes["changedby2047"],
431 self.pkg.changes["changedbyname"],
432 self.pkg.changes["changedbyemail"]) = \
433 fix_maintainer (self.pkg.changes.get("changed-by", ""))
434 except ParseMaintError, msg:
435 self.pkg.changes["changedby822"] = ""
436 self.pkg.changes["changedby2047"] = ""
437 self.pkg.changes["changedbyname"] = ""
438 self.pkg.changes["changedbyemail"] = ""
440 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
441 % (filename, changes["changed-by"], msg))
443 # Ensure all the values in Closes: are numbers
444 if self.pkg.changes.has_key("closes"):
445 for i in self.pkg.changes["closes"].keys():
446 if re_isanum.match (i) == None:
447 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
449 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
450 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
451 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
453 # Check there isn't already a changes file of the same name in one
454 # of the queue directories.
455 base_filename = os.path.basename(filename)
456 for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
457 if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
458 self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
460 # Check the .changes is non-empty
461 if not self.pkg.files:
462 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
465 # Changes was syntactically valid even if we'll reject
468 ###########################################################################
470 def check_distributions(self):
471 "Check and map the Distribution field"
475 # Handle suite mappings
476 for m in Cnf.ValueList("SuiteMappings"):
479 if mtype == "map" or mtype == "silent-map":
480 (source, dest) = args[1:3]
481 if self.pkg.changes["distribution"].has_key(source):
482 del self.pkg.changes["distribution"][source]
483 self.pkg.changes["distribution"][dest] = 1
484 if mtype != "silent-map":
485 self.notes.append("Mapping %s to %s." % (source, dest))
486 if self.pkg.changes.has_key("distribution-version"):
487 if self.pkg.changes["distribution-version"].has_key(source):
488 self.pkg.changes["distribution-version"][source]=dest
489 elif mtype == "map-unreleased":
490 (source, dest) = args[1:3]
491 if self.pkg.changes["distribution"].has_key(source):
492 for arch in self.pkg.changes["architecture"].keys():
493 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
494 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
495 del self.pkg.changes["distribution"][source]
496 self.pkg.changes["distribution"][dest] = 1
498 elif mtype == "ignore":
500 if self.pkg.changes["distribution"].has_key(suite):
501 del self.pkg.changes["distribution"][suite]
502 self.warnings.append("Ignoring %s as a target suite." % (suite))
503 elif mtype == "reject":
505 if self.pkg.changes["distribution"].has_key(suite):
506 self.rejects.append("Uploads to %s are not accepted." % (suite))
507 elif mtype == "propup-version":
508 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
510 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
511 if self.pkg.changes["distribution"].has_key(args[1]):
512 self.pkg.changes.setdefault("distribution-version", {})
513 for suite in args[2:]:
514 self.pkg.changes["distribution-version"][suite] = suite
516 # Ensure there is (still) a target distribution
517 if len(self.pkg.changes["distribution"].keys()) < 1:
518 self.rejects.append("No valid distribution remaining.")
520 # Ensure target distributions exist
521 for suite in self.pkg.changes["distribution"].keys():
522 if not Cnf.has_key("Suite::%s" % (suite)):
523 self.rejects.append("Unknown distribution `%s'." % (suite))
525 ###########################################################################
527 def binary_file_checks(self, f, session):
529 entry = self.pkg.files[f]
531 # Extract package control information
532 deb_file = utils.open_file(f)
534 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
536 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
538 # Can't continue, none of the checks on control would work.
541 # Check for mandantory "Description:"
544 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
546 self.rejects.append("%s: Missing Description in binary package" % (f))
551 # Check for mandatory fields
552 for field in [ "Package", "Architecture", "Version" ]:
553 if control.Find(field) == None:
555 self.rejects.append("%s: No %s field in control." % (f, field))
558 # Ensure the package name matches the one give in the .changes
559 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
560 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
562 # Validate the package field
563 package = control.Find("Package")
564 if not re_valid_pkg_name.match(package):
565 self.rejects.append("%s: invalid package name '%s'." % (f, package))
567 # Validate the version field
568 version = control.Find("Version")
569 if not re_valid_version.match(version):
570 self.rejects.append("%s: invalid version number '%s'." % (f, version))
572 # Ensure the architecture of the .deb is one we know about.
573 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
574 architecture = control.Find("Architecture")
575 upload_suite = self.pkg.changes["distribution"].keys()[0]
577 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
578 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
579 self.rejects.append("Unknown architecture '%s'." % (architecture))
581 # Ensure the architecture of the .deb is one of the ones
582 # listed in the .changes.
583 if not self.pkg.changes["architecture"].has_key(architecture):
584 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
586 # Sanity-check the Depends field
587 depends = control.Find("Depends")
589 self.rejects.append("%s: Depends field is empty." % (f))
591 # Sanity-check the Provides field
592 provides = control.Find("Provides")
594 provide = re_spacestrip.sub('', provides)
596 self.rejects.append("%s: Provides field is empty." % (f))
597 prov_list = provide.split(",")
598 for prov in prov_list:
599 if not re_valid_pkg_name.match(prov):
600 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
602 # Check the section & priority match those given in the .changes (non-fatal)
603 if control.Find("Section") and entry["section"] != "" \
604 and entry["section"] != control.Find("Section"):
605 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
606 (f, control.Find("Section", ""), entry["section"]))
607 if control.Find("Priority") and entry["priority"] != "" \
608 and entry["priority"] != control.Find("Priority"):
609 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
610 (f, control.Find("Priority", ""), entry["priority"]))
612 entry["package"] = package
613 entry["architecture"] = architecture
614 entry["version"] = version
615 entry["maintainer"] = control.Find("Maintainer", "")
617 if f.endswith(".udeb"):
618 self.pkg.files[f]["dbtype"] = "udeb"
619 elif f.endswith(".deb"):
620 self.pkg.files[f]["dbtype"] = "deb"
622 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
624 entry["source"] = control.Find("Source", entry["package"])
626 # Get the source version
627 source = entry["source"]
630 if source.find("(") != -1:
631 m = re_extract_src_version.match(source)
633 source_version = m.group(2)
635 if not source_version:
636 source_version = self.pkg.files[f]["version"]
638 entry["source package"] = source
639 entry["source version"] = source_version
641 # Ensure the filename matches the contents of the .deb
642 m = re_isadeb.match(f)
645 file_package = m.group(1)
646 if entry["package"] != file_package:
647 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
648 (f, file_package, entry["dbtype"], entry["package"]))
649 epochless_version = re_no_epoch.sub('', control.Find("Version"))
652 file_version = m.group(2)
653 if epochless_version != file_version:
654 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
655 (f, file_version, entry["dbtype"], epochless_version))
658 file_architecture = m.group(3)
659 if entry["architecture"] != file_architecture:
660 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
661 (f, file_architecture, entry["dbtype"], entry["architecture"]))
663 # Check for existent source
664 source_version = entry["source version"]
665 source_package = entry["source package"]
666 if self.pkg.changes["architecture"].has_key("source"):
667 if source_version != self.pkg.changes["version"]:
668 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
669 (source_version, f, self.pkg.changes["version"]))
671 # Check in the SQL database
672 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
673 # Check in one of the other directories
674 source_epochless_version = re_no_epoch.sub('', source_version)
675 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
676 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
678 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
681 dsc_file_exists = False
682 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
683 if cnf.has_key("Dir::Queue::%s" % (myq)):
684 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
685 dsc_file_exists = True
688 if not dsc_file_exists:
689 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
691 # Check the version and for file overwrites
692 self.check_binary_against_db(f, session)
694 # Temporarily disable contents generation until we change the table storage layout
697 #if len(b.rejects) > 0:
698 # for j in b.rejects:
699 # self.rejects.append(j)
701 def source_file_checks(self, f, session):
702 entry = self.pkg.files[f]
704 m = re_issource.match(f)
708 entry["package"] = m.group(1)
709 entry["version"] = m.group(2)
710 entry["type"] = m.group(3)
712 # Ensure the source package name matches the Source filed in the .changes
713 if self.pkg.changes["source"] != entry["package"]:
714 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
716 # Ensure the source version matches the version in the .changes file
717 if re_is_orig_source.match(f):
718 changes_version = self.pkg.changes["chopversion2"]
720 changes_version = self.pkg.changes["chopversion"]
722 if changes_version != entry["version"]:
723 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
725 # Ensure the .changes lists source in the Architecture field
726 if not self.pkg.changes["architecture"].has_key("source"):
727 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
729 # Check the signature of a .dsc file
730 if entry["type"] == "dsc":
731 # check_signature returns either:
732 # (None, [list, of, rejects]) or (signature, [])
733 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
735 self.rejects.append(j)
737 entry["architecture"] = "source"
739 def per_suite_file_checks(self, f, suite, session):
741 entry = self.pkg.files[f]
742 archive = utils.where_am_i()
745 if entry.has_key("byhand"):
748 # Check we have fields we need to do these checks
750 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
751 if not entry.has_key(m):
752 self.rejects.append("file '%s' does not have field %s set" % (f, m))
758 # Handle component mappings
759 for m in cnf.ValueList("ComponentMappings"):
760 (source, dest) = m.split()
761 if entry["component"] == source:
762 entry["original component"] = source
763 entry["component"] = dest
765 # Ensure the component is valid for the target suite
766 if cnf.has_key("Suite:%s::Components" % (suite)) and \
767 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
768 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
771 # Validate the component
772 if not get_component(entry["component"], session):
773 self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
776 # See if the package is NEW
777 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
780 # Validate the priority
781 if entry["priority"].find('/') != -1:
782 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
784 # Determine the location
785 location = cnf["Dir::Pool"]
786 l = get_location(location, entry["component"], archive, session)
788 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
789 entry["location id"] = -1
791 entry["location id"] = l.location_id
793 # Check the md5sum & size against existing files (if any)
794 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
796 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
797 entry["size"], entry["md5sum"], entry["location id"])
800 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
801 elif found is False and poolfile is not None:
802 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
805 entry["files id"] = None
807 entry["files id"] = poolfile.file_id
809 # Check for packages that have moved from one component to another
810 entry['suite'] = suite
811 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
813 entry["othercomponents"] = res.fetchone()[0]
815 def check_files(self, action=True):
816 archive = utils.where_am_i()
817 file_keys = self.pkg.files.keys()
821 # XXX: As far as I can tell, this can no longer happen - see
822 # comments by AJ in old revisions - mhy
823 # if reprocess is 2 we've already done this and we're checking
824 # things again for the new .orig.tar.gz.
825 # [Yes, I'm fully aware of how disgusting this is]
826 if action and self.reprocess < 2:
828 os.chdir(self.pkg.directory)
830 ret = holding.copy_to_holding(f)
832 # XXX: Should we bail out here or try and continue?
833 self.rejects.append(ret)
837 # Check there isn't already a .changes or .dak file of the same name in
838 # the proposed-updates "CopyChanges" or "CopyDotDak" storage directories.
839 # [NB: this check must be done post-suite mapping]
840 base_filename = os.path.basename(self.pkg.changes_file)
841 dot_dak_filename = base_filename[:-8] + ".dak"
843 for suite in self.pkg.changes["distribution"].keys():
844 copychanges = "Suite::%s::CopyChanges" % (suite)
845 if cnf.has_key(copychanges) and \
846 os.path.exists(os.path.join(cnf[copychanges], base_filename)):
847 self.rejects.append("%s: a file with this name already exists in %s" \
848 % (base_filename, cnf[copychanges]))
850 copy_dot_dak = "Suite::%s::CopyDotDak" % (suite)
851 if cnf.has_key(copy_dot_dak) and \
852 os.path.exists(os.path.join(cnf[copy_dot_dak], dot_dak_filename)):
853 self.rejects.append("%s: a file with this name already exists in %s" \
854 % (dot_dak_filename, Cnf[copy_dot_dak]))
860 session = DBConn().session()
862 for f, entry in self.pkg.files.items():
863 # Ensure the file does not already exist in one of the accepted directories
864 for d in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
865 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
866 if os.path.exists(cnf["Dir::Queue::%s" % (d) ] + '/' + f):
867 self.rejects.append("%s file already exists in the %s directory." % (f, d))
869 if not re_taint_free.match(f):
870 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
872 # Check the file is readable
873 if os.access(f, os.R_OK) == 0:
874 # When running in -n, copy_to_holding() won't have
875 # generated the reject_message, so we need to.
877 if os.path.exists(f):
878 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
880 self.rejects.append("Can't read `%s'. [file not found]" % (f))
881 entry["type"] = "unreadable"
884 # If it's byhand skip remaining checks
885 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
887 entry["type"] = "byhand"
889 # Checks for a binary package...
890 elif re_isadeb.match(f):
892 entry["type"] = "deb"
894 # This routine appends to self.rejects/warnings as appropriate
895 self.binary_file_checks(f, session)
897 # Checks for a source package...
898 elif re_issource.match(f):
901 # This routine appends to self.rejects/warnings as appropriate
902 self.source_file_checks(f, session)
904 # Not a binary or source package? Assume byhand...
907 entry["type"] = "byhand"
909 # Per-suite file checks
910 entry["oldfiles"] = {}
911 for suite in self.pkg.changes["distribution"].keys():
912 self.per_suite_file_checks(f, suite, session)
916 # If the .changes file says it has source, it must have source.
917 if self.pkg.changes["architecture"].has_key("source"):
919 self.rejects.append("no source found and Architecture line in changes mention source.")
921 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
922 self.rejects.append("source only uploads are not supported.")
924 ###########################################################################
925 def check_dsc(self, action=True, session=None):
926 """Returns bool indicating whether or not the source changes are valid"""
927 # Ensure there is source to check
928 if not self.pkg.changes["architecture"].has_key("source"):
933 for f, entry in self.pkg.files.items():
934 if entry["type"] == "dsc":
936 self.rejects.append("can not process a .changes file with multiple .dsc's.")
941 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
943 self.rejects.append("source uploads must contain a dsc file")
946 # Parse the .dsc file
948 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
949 except CantOpenError:
950 # if not -n copy_to_holding() will have done this for us...
952 self.rejects.append("%s: can't read file." % (dsc_filename))
953 except ParseChangesError, line:
954 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
955 except InvalidDscError, line:
956 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
957 except ChangesUnicodeError:
958 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
960 # Build up the file list of files mentioned by the .dsc
962 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
963 except NoFilesFieldError:
964 self.rejects.append("%s: no Files: field." % (dsc_filename))
966 except UnknownFormatError, format:
967 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
969 except ParseChangesError, line:
970 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
973 # Enforce mandatory fields
974 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
975 if not self.pkg.dsc.has_key(i):
976 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
979 # Validate the source and version fields
980 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
981 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
982 if not re_valid_version.match(self.pkg.dsc["version"]):
983 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
985 # Only a limited list of source formats are allowed in each suite
986 for dist in self.pkg.changes["distribution"].keys():
987 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
988 if self.pkg.dsc["format"] not in allowed:
989 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
991 # Validate the Maintainer field
993 # We ignore the return value
994 fix_maintainer(self.pkg.dsc["maintainer"])
995 except ParseMaintError, msg:
996 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
997 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
999 # Validate the build-depends field(s)
1000 for field_name in [ "build-depends", "build-depends-indep" ]:
1001 field = self.pkg.dsc.get(field_name)
1003 # Have apt try to parse them...
1005 apt_pkg.ParseSrcDepends(field)
1007 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1009 # Ensure the version number in the .dsc matches the version number in the .changes
1010 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1011 changes_version = self.pkg.files[dsc_filename]["version"]
1013 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1014 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1016 # Ensure the Files field contain only what's expected
1017 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1019 # Ensure source is newer than existing source in target suites
1020 session = DBConn().session()
1021 self.check_source_against_db(dsc_filename, session)
1022 self.check_dsc_against_db(dsc_filename, session)
1027 ###########################################################################
1029 def get_changelog_versions(self, source_dir):
1030 """Extracts a the source package and (optionally) grabs the
1031 version history out of debian/changelog for the BTS."""
1035 # Find the .dsc (again)
1037 for f in self.pkg.files.keys():
1038 if self.pkg.files[f]["type"] == "dsc":
1041 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1042 if not dsc_filename:
1045 # Create a symlink mirror of the source files in our temporary directory
1046 for f in self.pkg.files.keys():
1047 m = re_issource.match(f)
1049 src = os.path.join(source_dir, f)
1050 # If a file is missing for whatever reason, give up.
1051 if not os.path.exists(src):
1054 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1055 self.pkg.orig_files[f].has_key("path"):
1057 dest = os.path.join(os.getcwd(), f)
1058 os.symlink(src, dest)
1060 # If the orig files are not a part of the upload, create symlinks to the
1062 for orig_file in self.pkg.orig_files.keys():
1063 if not self.pkg.orig_files[orig_file].has_key("path"):
1065 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1066 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1068 # Extract the source
1069 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1070 (result, output) = commands.getstatusoutput(cmd)
1072 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1073 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1076 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1079 # Get the upstream version
1080 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1081 if re_strip_revision.search(upstr_version):
1082 upstr_version = re_strip_revision.sub('', upstr_version)
1084 # Ensure the changelog file exists
1085 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1086 if not os.path.exists(changelog_filename):
1087 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1090 # Parse the changelog
1091 self.pkg.dsc["bts changelog"] = ""
1092 changelog_file = utils.open_file(changelog_filename)
1093 for line in changelog_file.readlines():
1094 m = re_changelog_versions.match(line)
1096 self.pkg.dsc["bts changelog"] += line
1097 changelog_file.close()
1099 # Check we found at least one revision in the changelog
1100 if not self.pkg.dsc["bts changelog"]:
1101 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1103 def check_source(self):
1104 # XXX: I'm fairly sure reprocess == 2 can never happen
1105 # AJT disabled the is_incoming check years ago - mhy
1106 # We should probably scrap or rethink the whole reprocess thing
1108 # a) there's no source
1109 # or b) reprocess is 2 - we will do this check next time when orig
1110 # tarball is in 'files'
1111 # or c) the orig files are MIA
1112 if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
1113 or len(self.pkg.orig_files) == 0:
1116 tmpdir = utils.temp_dirname()
1118 # Move into the temporary directory
1122 # Get the changelog version history
1123 self.get_changelog_versions(cwd)
1125 # Move back and cleanup the temporary tree
1129 shutil.rmtree(tmpdir)
1131 if e.errno != errno.EACCES:
1133 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1135 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1136 # We probably have u-r or u-w directories so chmod everything
1138 cmd = "chmod -R u+rwx %s" % (tmpdir)
1139 result = os.system(cmd)
1141 utils.fubar("'%s' failed with result %s." % (cmd, result))
1142 shutil.rmtree(tmpdir)
1143 except Exception, e:
1144 print "foobar2 (%s)" % e
1145 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1147 ###########################################################################
1148 def ensure_hashes(self):
1149 # Make sure we recognise the format of the Files: field in the .changes
1150 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1151 if len(format) == 2:
1152 format = int(format[0]), int(format[1])
1154 format = int(float(format[0])), 0
1156 # We need to deal with the original changes blob, as the fields we need
1157 # might not be in the changes dict serialised into the .dak anymore.
1158 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1160 # Copy the checksums over to the current changes dict. This will keep
1161 # the existing modifications to it intact.
1162 for field in orig_changes:
1163 if field.startswith('checksums-'):
1164 self.pkg.changes[field] = orig_changes[field]
1166 # Check for unsupported hashes
1167 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1168 self.rejects.append(j)
1170 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1171 self.rejects.append(j)
1173 # We have to calculate the hash if we have an earlier changes version than
1174 # the hash appears in rather than require it exist in the changes file
1175 for hashname, hashfunc, version in utils.known_hashes:
1176 # TODO: Move _ensure_changes_hash into this class
1177 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1178 self.rejects.append(j)
1179 if "source" in self.pkg.changes["architecture"]:
1180 # TODO: Move _ensure_dsc_hash into this class
1181 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1182 self.rejects.append(j)
1184 def check_hashes(self):
1185 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1186 self.rejects.append(m)
1188 for m in utils.check_size(".changes", self.pkg.files):
1189 self.rejects.append(m)
1191 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1192 self.rejects.append(m)
1194 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1195 self.rejects.append(m)
1197 self.ensure_hashes()
1199 ###########################################################################
1201 def ensure_orig(self, target_dir='.', session=None):
1203 Ensures that all orig files mentioned in the changes file are present
1204 in target_dir. If they do not exist, they are symlinked into place.
1206 An list containing the symlinks that were created are returned (so they
1213 for filename, entry in self.pkg.dsc_files.iteritems():
1214 if not re_is_orig_source.match(filename):
1215 # File is not an orig; ignore
1218 if os.path.exists(filename):
1219 # File exists, no need to continue
1222 def symlink_if_valid(path):
1223 f = utils.open_file(path)
1224 md5sum = apt_pkg.md5sum(f)
1227 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1228 expected = (int(entry['size']), entry['md5sum'])
1230 if fingerprint != expected:
1233 dest = os.path.join(target_dir, filename)
1235 os.symlink(path, dest)
1236 symlinked.append(dest)
1242 session_ = DBConn().session()
1247 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1248 poolfile_path = os.path.join(
1249 poolfile.location.path, poolfile.filename
1252 if symlink_if_valid(poolfile_path):
1262 # Look in some other queues for the file
1263 queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1264 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1266 for queue in queues:
1267 if not cnf.get('Dir::Queue::%s' % queue):
1270 queuefile_path = os.path.join(
1271 cnf['Dir::Queue::%s' % queue], filename
1274 if not os.path.exists(queuefile_path):
1275 # Does not exist in this queue
1278 if symlink_if_valid(queuefile_path):
1283 ###########################################################################
1285 def check_lintian(self):
1288 # Don't reject binary uploads
1289 if not self.pkg.changes['architecture'].has_key('source'):
1292 # Only check some distributions
1294 for dist in ('unstable', 'experimental'):
1295 if dist in self.pkg.changes['distribution']:
1302 tagfile = cnf.get("Dinstall::LintianTags")
1304 # We don't have a tagfile, so just don't do anything.
1307 # Parse the yaml file
1308 sourcefile = file(tagfile, 'r')
1309 sourcecontent = sourcefile.read()
1312 lintiantags = yaml.load(sourcecontent)['lintian']
1313 except yaml.YAMLError, msg:
1314 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1317 # Try and find all orig mentioned in the .dsc
1318 symlinked = self.ensure_orig()
1320 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1321 # so put it together like it. We put all types of tags in one file and then sort
1322 # through lintians output later to see if its a fatal tag we detected, or not.
1323 # So we only run lintian once on all tags, even if we might reject on some, but not
1325 # Additionally build up a set of tags
1327 (fd, temp_filename) = utils.temp_filename()
1328 temptagfile = os.fdopen(fd, 'w')
1329 for tagtype in lintiantags:
1330 for tag in lintiantags[tagtype]:
1331 temptagfile.write("%s\n" % tag)
1335 # So now we should look at running lintian at the .changes file, capturing output
1337 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1338 (result, output) = commands.getstatusoutput(command)
1340 # We are done with lintian, remove our tempfile and any symlinks we created
1341 os.unlink(temp_filename)
1342 for symlink in symlinked:
1346 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1347 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1349 if len(output) == 0:
1354 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1356 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1357 # are having a victim for a reject.
1358 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1359 for line in output.split('\n'):
1360 m = re_parse_lintian.match(line)
1365 epackage = m.group(2)
1369 # So lets check if we know the tag at all.
1370 if etag not in tags:
1374 # We know it and it is overriden. Check that override is allowed.
1375 if etag in lintiantags['warning']:
1376 # The tag is overriden, and it is allowed to be overriden.
1377 # Don't add a reject message.
1379 elif etag in lintiantags['error']:
1380 # The tag is overriden - but is not allowed to be
1381 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1382 log("ftpmaster does not allow tag to be overridable", etag)
1384 # Tag is known, it is not overriden, direct reject.
1385 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1386 # Now tell if they *might* override it.
1387 if etag in lintiantags['warning']:
1388 log("auto rejecting", "overridable", etag)
1389 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1391 log("auto rejecting", "not overridable", etag)
1393 ###########################################################################
1394 def check_urgency(self):
1396 if self.pkg.changes["architecture"].has_key("source"):
1397 if not self.pkg.changes.has_key("urgency"):
1398 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1399 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1400 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1401 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1402 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1403 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1405 ###########################################################################
1407 # Sanity check the time stamps of files inside debs.
1408 # [Files in the near future cause ugly warnings and extreme time
1409 # travel can cause errors on extraction]
1411 def check_timestamps(self):
1414 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1415 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1416 tar = TarTime(future_cutoff, past_cutoff)
1418 for filename, entry in self.pkg.files.items():
1419 if entry["type"] == "deb":
1422 deb_file = utils.open_file(filename)
1423 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1426 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1427 except SystemError, e:
1428 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1429 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1432 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1436 future_files = tar.future_files.keys()
1438 num_future_files = len(future_files)
1439 future_file = future_files[0]
1440 future_date = tar.future_files[future_file]
1441 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1442 % (filename, num_future_files, future_file, time.ctime(future_date)))
1444 ancient_files = tar.ancient_files.keys()
1446 num_ancient_files = len(ancient_files)
1447 ancient_file = ancient_files[0]
1448 ancient_date = tar.ancient_files[ancient_file]
1449 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1450 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1452 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1454 ###########################################################################
1455 def check_transition(self, session):
1458 sourcepkg = self.pkg.changes["source"]
1460 # No sourceful upload -> no need to do anything else, direct return
1461 # We also work with unstable uploads, not experimental or those going to some
1462 # proposed-updates queue
1463 if "source" not in self.pkg.changes["architecture"] or \
1464 "unstable" not in self.pkg.changes["distribution"]:
1467 # Also only check if there is a file defined (and existant) with
1469 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1470 if transpath == "" or not os.path.exists(transpath):
1473 # Parse the yaml file
1474 sourcefile = file(transpath, 'r')
1475 sourcecontent = sourcefile.read()
1477 transitions = yaml.load(sourcecontent)
1478 except yaml.YAMLError, msg:
1479 # This shouldn't happen, there is a wrapper to edit the file which
1480 # checks it, but we prefer to be safe than ending up rejecting
1482 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1485 # Now look through all defined transitions
1486 for trans in transitions:
1487 t = transitions[trans]
1488 source = t["source"]
1491 # Will be None if nothing is in testing.
1492 current = get_source_in_suite(source, "testing", session)
1493 if current is not None:
1494 compare = apt_pkg.VersionCompare(current.version, expected)
1496 if current is None or compare < 0:
1497 # This is still valid, the current version in testing is older than
1498 # the new version we wait for, or there is none in testing yet
1500 # Check if the source we look at is affected by this.
1501 if sourcepkg in t['packages']:
1502 # The source is affected, lets reject it.
1504 rejectmsg = "%s: part of the %s transition.\n\n" % (
1507 if current is not None:
1508 currentlymsg = "at version %s" % (current.version)
1510 currentlymsg = "not present in testing"
1512 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1514 rejectmsg += "\n".join(textwrap.wrap("""Your package
1515 is part of a testing transition designed to get %s migrated (it is
1516 currently %s, we need version %s). This transition is managed by the
1517 Release Team, and %s is the Release-Team member responsible for it.
1518 Please mail debian-release@lists.debian.org or contact %s directly if you
1519 need further assistance. You might want to upload to experimental until this
1520 transition is done."""
1521 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1523 self.rejects.append(rejectmsg)
1526 ###########################################################################
1527 def check_signed_by_key(self):
1528 """Ensure the .changes is signed by an authorized uploader."""
1529 session = DBConn().session()
1531 self.check_transition(session)
1533 (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
1535 # match claimed name with actual name:
1537 # This is fundamentally broken but need us to refactor how we get
1538 # the UIDs/Fingerprints in order for us to fix it properly
1539 uid, uid_email = self.pkg.changes["fingerprint"], uid
1540 may_nmu, may_sponsor = 1, 1
1541 # XXX by default new dds don't have a fingerprint/uid in the db atm,
1542 # and can't get one in there if we don't allow nmu/sponsorship
1543 elif is_dm is False:
1544 # If is_dm is False, we allow full upload rights
1545 uid_email = "%s@debian.org" % (uid)
1546 may_nmu, may_sponsor = 1, 1
1548 # Assume limited upload rights unless we've discovered otherwise
1550 may_nmu, may_sponsor = 0, 0
1552 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1554 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1556 if uid_name == "": sponsored = 1
1559 if ("source" in self.pkg.changes["architecture"] and
1560 uid_email and utils.is_email_alias(uid_email)):
1561 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1562 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1563 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1564 self.pkg.changes["sponsoremail"] = uid_email
1566 if sponsored and not may_sponsor:
1567 self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
1569 if not sponsored and not may_nmu:
1570 should_reject = True
1571 highest_sid, highest_version = None, None
1573 # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
1574 # It ignores higher versions with the dm_upload_allowed flag set to false
1575 # I'm keeping the existing behaviour for now until I've gone back and
1576 # checked exactly what the GR says - mhy
1577 for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
1578 if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
1579 highest_sid = si.source_id
1580 highest_version = si.version
1582 if highest_sid is None:
1583 self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
1585 for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
1586 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1587 if email == uid_email or name == uid_name:
1588 should_reject = False
1591 if should_reject is True:
1592 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
1594 for b in self.pkg.changes["binary"].keys():
1595 for suite in self.pkg.changes["distribution"].keys():
1596 q = session.query(DBSource)
1597 q = q.join(DBBinary).filter_by(package=b)
1598 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1601 if s.source != self.pkg.changes["source"]:
1602 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
1604 for f in self.pkg.files.keys():
1605 if self.pkg.files[f].has_key("byhand"):
1606 self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
1607 if self.pkg.files[f].has_key("new"):
1608 self.rejects.append("%s may not upload NEW file %s" % (uid, f))
1612 ###########################################################################
1613 def build_summaries(self):
1614 """ Build a summary of changes the upload introduces. """
1616 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1618 short_summary = summary
1620 # This is for direport's benefit...
1621 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1624 summary += "Changes: " + f
1626 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1628 summary += self.announce(short_summary, 0)
1630 return (summary, short_summary)
1632 ###########################################################################
1634 def close_bugs(self, summary, action):
1636 Send mail to close bugs as instructed by the closes field in the changes file.
1637 Also add a line to summary if any work was done.
1639 @type summary: string
1640 @param summary: summary text, as given by L{build_summaries}
1643 @param action: Set to false no real action will be done.
1646 @return: summary. If action was taken, extended by the list of closed bugs.
1650 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1652 bugs = self.pkg.changes["closes"].keys()
1658 summary += "Closing bugs: "
1660 summary += "%s " % (bug)
1663 self.Subst["__BUG_NUMBER__"] = bug
1664 if self.pkg.changes["distribution"].has_key("stable"):
1665 self.Subst["__STABLE_WARNING__"] = """
1666 Note that this package is not part of the released stable Debian
1667 distribution. It may have dependencies on other unreleased software,
1668 or other instabilities. Please take care if you wish to install it.
1669 The update will eventually make its way into the next released Debian
1672 self.Subst["__STABLE_WARNING__"] = ""
1673 mail_message = utils.TemplateSubst(self.Subst, template)
1674 utils.send_mail(mail_message)
1676 # Clear up after ourselves
1677 del self.Subst["__BUG_NUMBER__"]
1678 del self.Subst["__STABLE_WARNING__"]
1680 if action and self.logger:
1681 self.logger.log(["closing bugs"] + bugs)
1687 ###########################################################################
1689 def announce(self, short_summary, action):
1691 Send an announce mail about a new upload.
1693 @type short_summary: string
1694 @param short_summary: Short summary text to include in the mail
1697 @param action: Set to false no real action will be done.
1700 @return: Textstring about action taken.
1705 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1707 # Only do announcements for source uploads with a recent dpkg-dev installed
1708 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1709 self.pkg.changes["architecture"].has_key("source"):
1715 self.Subst["__SHORT_SUMMARY__"] = short_summary
1717 for dist in self.pkg.changes["distribution"].keys():
1718 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1719 if announce_list == "" or lists_done.has_key(announce_list):
1722 lists_done[announce_list] = 1
1723 summary += "Announcing to %s\n" % (announce_list)
1727 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1728 if cnf.get("Dinstall::TrackingServer") and \
1729 self.pkg.changes["architecture"].has_key("source"):
1730 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1731 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1733 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1734 utils.send_mail(mail_message)
1736 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1738 if cnf.FindB("Dinstall::CloseBugs"):
1739 summary = self.close_bugs(summary, action)
1741 del self.Subst["__SHORT_SUMMARY__"]
1745 ###########################################################################
1747 def accept (self, summary, short_summary, targetdir=None):
1751 This moves all files referenced from the .changes into the I{accepted}
1752 queue, sends the accepted mail, announces to lists, closes bugs and
1753 also checks for override disparities. If enabled it will write out
1754 the version history for the BTS Version Tracking and will finally call
1757 @type summary: string
1758 @param summary: Summary text
1760 @type short_summary: string
1761 @param short_summary: Short summary
1766 stats = SummaryStats()
1768 accepttemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted')
1770 if targetdir is None:
1771 targetdir = cnf["Dir::Queue::Accepted"]
1775 self.logger.log(["Accepting changes", self.pkg.changes_file])
1777 self.pkg.write_dot_dak(targetdir)
1779 # Move all the files into the accepted directory
1780 utils.move(self.pkg.changes_file, targetdir)
1782 for name, entry in sorted(self.pkg.files.items()):
1783 utils.move(name, targetdir)
1784 stats.accept_bytes += float(entry["size"])
1786 stats.accept_count += 1
1788 # Send accept mail, announce to lists, close bugs and check for
1789 # override disparities
1790 if not cnf["Dinstall::Options::No-Mail"]:
1792 self.Subst["__SUITE__"] = ""
1793 self.Subst["__SUMMARY__"] = summary
1794 mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
1795 utils.send_mail(mail_message)
1796 self.announce(short_summary, 1)
1798 ## Helper stuff for DebBugs Version Tracking
1799 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1800 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1801 # the conditionalization on dsc["bts changelog"] should be
1804 # Write out the version history from the changelog
1805 if self.pkg.changes["architecture"].has_key("source") and \
1806 self.pkg.dsc.has_key("bts changelog"):
1808 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1809 version_history = os.fdopen(fd, 'w')
1810 version_history.write(self.pkg.dsc["bts changelog"])
1811 version_history.close()
1812 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1813 self.pkg.changes_file[:-8]+".versions")
1814 os.rename(temp_filename, filename)
1815 os.chmod(filename, 0644)
1817 # Write out the binary -> source mapping.
1818 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1819 debinfo = os.fdopen(fd, 'w')
1820 for name, entry in sorted(self.pkg.files.items()):
1821 if entry["type"] == "deb":
1822 line = " ".join([entry["package"], entry["version"],
1823 entry["architecture"], entry["source package"],
1824 entry["source version"]])
1825 debinfo.write(line+"\n")
1827 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1828 self.pkg.changes_file[:-8]+".debinfo")
1829 os.rename(temp_filename, filename)
1830 os.chmod(filename, 0644)
1832 # Its is Cnf["Dir::Queue::Accepted"] here, not targetdir!
1833 # <Ganneff> we do call queue_build too
1834 # <mhy> well yes, we'd have had to if we were inserting into accepted
1835 # <Ganneff> now. thats database only.
1836 # <mhy> urgh, that's going to get messy
1837 # <Ganneff> so i make the p-n call to it *also* using accepted/
1838 # <mhy> but then the packages will be in the queue_build table without the files being there
1839 # <Ganneff> as the buildd queue is only regenerated whenever unchecked runs
1840 # <mhy> ah, good point
1841 # <Ganneff> so it will work out, as unchecked move it over
1842 # <mhy> that's all completely sick
1845 # This routine returns None on success or an error on failure
1846 res = get_or_set_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1851 def check_override(self):
1853 Checks override entries for validity. Mails "Override disparity" warnings,
1854 if that feature is enabled.
1856 Abandons the check if
1857 - override disparity checks are disabled
1858 - mail sending is disabled
1863 # Abandon the check if:
1864 # a) override disparity checks have been disabled
1865 # b) we're not sending mail
1866 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1867 cnf["Dinstall::Options::No-Mail"]:
1870 summary = self.pkg.check_override()
1875 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1878 self.Subst["__SUMMARY__"] = summary
1879 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
1880 utils.send_mail(mail_message)
1881 del self.Subst["__SUMMARY__"]
1883 ###########################################################################
1885 def remove(self, dir=None):
1887 Used (for instance) in p-u to remove the package from unchecked
1890 os.chdir(self.pkg.directory)
1894 for f in self.pkg.files.keys():
1896 os.unlink(self.pkg.changes_file)
1898 ###########################################################################
1900 def move_to_dir (self, dest, perms=0660, changesperms=0664):
1902 Move files to dest with certain perms/changesperms
1904 utils.move(self.pkg.changes_file, dest, perms=changesperms)
1905 for f in self.pkg.files.keys():
1906 utils.move(f, dest, perms=perms)
1908 ###########################################################################
1910 def force_reject(self, reject_files):
1912 Forcefully move files from the current directory to the
1913 reject directory. If any file already exists in the reject
1914 directory it will be moved to the morgue to make way for
1918 @param files: file dictionary
1924 for file_entry in reject_files:
1925 # Skip any files which don't exist or which we don't have permission to copy.
1926 if os.access(file_entry, os.R_OK) == 0:
1929 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
1932 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
1934 # File exists? Let's try and move it to the morgue
1935 if e.errno == errno.EEXIST:
1936 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
1938 morgue_file = utils.find_next_free(morgue_file)
1939 except NoFreeFilenameError:
1940 # Something's either gone badly Pete Tong, or
1941 # someone is trying to exploit us.
1942 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
1944 utils.move(dest_file, morgue_file, perms=0660)
1946 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
1949 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
1953 # If we got here, we own the destination file, so we can
1954 # safely overwrite it.
1955 utils.move(file_entry, dest_file, 1, perms=0660)
1958 ###########################################################################
1959 def do_reject (self, manual=0, reject_message="", note=""):
1961 Reject an upload. If called without a reject message or C{manual} is
1962 true, spawn an editor so the user can write one.
1965 @param manual: manual or automated rejection
1967 @type reject_message: string
1968 @param reject_message: A reject message
1973 # If we weren't given a manual rejection message, spawn an
1974 # editor so the user can add one in...
1975 if manual and not reject_message:
1976 (fd, temp_filename) = utils.temp_filename()
1977 temp_file = os.fdopen(fd, 'w')
1980 temp_file.write(line)
1982 editor = os.environ.get("EDITOR","vi")
1984 while answer == 'E':
1985 os.system("%s %s" % (editor, temp_filename))
1986 temp_fh = utils.open_file(temp_filename)
1987 reject_message = "".join(temp_fh.readlines())
1989 print "Reject message:"
1990 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
1991 prompt = "[R]eject, Edit, Abandon, Quit ?"
1993 while prompt.find(answer) == -1:
1994 answer = utils.our_raw_input(prompt)
1995 m = re_default_answer.search(prompt)
1998 answer = answer[:1].upper()
1999 os.unlink(temp_filename)
2005 print "Rejecting.\n"
2009 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2010 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2012 # Move all the files into the reject directory
2013 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2014 self.force_reject(reject_files)
2016 # If we fail here someone is probably trying to exploit the race
2017 # so let's just raise an exception ...
2018 if os.path.exists(reason_filename):
2019 os.unlink(reason_filename)
2020 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2022 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2026 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2027 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2028 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2029 os.write(reason_fd, reject_message)
2030 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2032 # Build up the rejection email
2033 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2034 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2035 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2036 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2037 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2038 # Write the rejection email out as the <foo>.reason file
2039 os.write(reason_fd, reject_mail_message)
2041 del self.Subst["__REJECTOR_ADDRESS__"]
2042 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2043 del self.Subst["__CC__"]
2047 # Send the rejection mail if appropriate
2048 if not cnf["Dinstall::Options::No-Mail"]:
2049 utils.send_mail(reject_mail_message)
2052 self.logger.log(["rejected", self.pkg.changes_file])
2056 ################################################################################
2057 def in_override_p(self, package, component, suite, binary_type, file, session):
2059 Check if a package already has override entries in the DB
2061 @type package: string
2062 @param package: package name
2064 @type component: string
2065 @param component: database id of the component
2068 @param suite: database id of the suite
2070 @type binary_type: string
2071 @param binary_type: type of the package
2074 @param file: filename we check
2076 @return: the database result. But noone cares anyway.
2082 if binary_type == "": # must be source
2085 file_type = binary_type
2087 # Override suite name; used for example with proposed-updates
2088 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2089 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2091 result = get_override(package, suite, component, file_type, session)
2093 # If checking for a source package fall back on the binary override type
2094 if file_type == "dsc" and len(result) < 1:
2095 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2097 # Remember the section and priority so we can check them later if appropriate
2100 self.pkg.files[file]["override section"] = result.section.section
2101 self.pkg.files[file]["override priority"] = result.priority.priority
2106 ################################################################################
2107 def get_anyversion(self, sv_list, suite):
2110 @param sv_list: list of (suite, version) tuples to check
2113 @param suite: suite name
2119 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2120 for (s, v) in sv_list:
2121 if s in [ x.lower() for x in anysuite ]:
2122 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2127 ################################################################################
2129 def cross_suite_version_check(self, sv_list, file, new_version, sourceful=False):
2132 @param sv_list: list of (suite, version) tuples to check
2137 @type new_version: string
2138 @param new_version: XXX
2140 Ensure versions are newer than existing packages in target
2141 suites and that cross-suite version checking rules as
2142 set out in the conf file are satisfied.
2147 # Check versions for each target suite
2148 for target_suite in self.pkg.changes["distribution"].keys():
2149 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2150 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2152 # Enforce "must be newer than target suite" even if conffile omits it
2153 if target_suite not in must_be_newer_than:
2154 must_be_newer_than.append(target_suite)
2156 for (suite, existent_version) in sv_list:
2157 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2159 if suite in must_be_newer_than and sourceful and vercmp < 1:
2160 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2162 if suite in must_be_older_than and vercmp > -1:
2165 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2166 # we really use the other suite, ignoring the conflicting one ...
2167 addsuite = self.pkg.changes["distribution-version"][suite]
2169 add_version = self.get_anyversion(sv_list, addsuite)
2170 target_version = self.get_anyversion(sv_list, target_suite)
2173 # not add_version can only happen if we map to a suite
2174 # that doesn't enhance the suite we're propup'ing from.
2175 # so "propup-ver x a b c; map a d" is a problem only if
2176 # d doesn't enhance a.
2178 # i think we could always propagate in this case, rather
2179 # than complaining. either way, this isn't a REJECT issue
2181 # And - we really should complain to the dorks who configured dak
2182 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2183 self.pkg.changes.setdefault("propdistribution", {})
2184 self.pkg.changes["propdistribution"][addsuite] = 1
2186 elif not target_version:
2187 # not targets_version is true when the package is NEW
2188 # we could just stick with the "...old version..." REJECT
2189 # for this, I think.
2190 self.rejects.append("Won't propogate NEW packages.")
2191 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2192 # propogation would be redundant. no need to reject though.
2193 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2195 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2196 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2198 self.warnings.append("Propogating upload to %s" % (addsuite))
2199 self.pkg.changes.setdefault("propdistribution", {})
2200 self.pkg.changes["propdistribution"][addsuite] = 1
2204 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
2206 ################################################################################
2207 def check_binary_against_db(self, file, session):
2208 # Ensure version is sane
2209 q = session.query(BinAssociation)
2210 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
2211 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
2213 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2214 file, self.pkg.files[file]["version"], sourceful=False)
2216 # Check for any existing copies of the file
2217 q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
2218 q = q.filter_by(version=self.pkg.files[file]["version"])
2219 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
2222 self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
2224 ################################################################################
2226 def check_source_against_db(self, file, session):
2229 source = self.pkg.dsc.get("source")
2230 version = self.pkg.dsc.get("version")
2232 # Ensure version is sane
2233 q = session.query(SrcAssociation)
2234 q = q.join(DBSource).filter(DBSource.source==source)
2236 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2237 file, version, sourceful=True)
2239 ################################################################################
2240 def check_dsc_against_db(self, file, session):
2243 @warning: NB: this function can remove entries from the 'files' index [if
2244 the orig tarball is a duplicate of the one in the archive]; if
2245 you're iterating over 'files' and call this function as part of
2246 the loop, be sure to add a check to the top of the loop to
2247 ensure you haven't just tried to dereference the deleted entry.
2252 self.pkg.orig_files = {} # XXX: do we need to clear it?
2253 orig_files = self.pkg.orig_files
2255 # Try and find all files mentioned in the .dsc. This has
2256 # to work harder to cope with the multiple possible
2257 # locations of an .orig.tar.gz.
2258 # The ordering on the select is needed to pick the newest orig
2259 # when it exists in multiple places.
2260 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2262 if self.pkg.files.has_key(dsc_name):
2263 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2264 actual_size = int(self.pkg.files[dsc_name]["size"])
2265 found = "%s in incoming" % (dsc_name)
2267 # Check the file does not already exist in the archive
2268 ql = get_poolfile_like_name(dsc_name, session)
2270 # Strip out anything that isn't '%s' or '/%s$'
2272 if not i.filename.endswith(dsc_name):
2275 # "[dak] has not broken them. [dak] has fixed a
2276 # brokenness. Your crappy hack exploited a bug in
2279 # "(Come on! I thought it was always obvious that
2280 # one just doesn't release different files with
2281 # the same name and version.)"
2282 # -- ajk@ on d-devel@l.d.o
2285 # Ignore exact matches for .orig.tar.gz
2287 if re_is_orig_source.match(dsc_name):
2289 if self.pkg.files.has_key(dsc_name) and \
2290 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2291 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2292 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2293 # TODO: Don't delete the entry, just mark it as not needed
2294 # This would fix the stupidity of changing something we often iterate over
2295 # whilst we're doing it
2296 del self.pkg.files[dsc_name]
2297 if not orig_files.has_key(dsc_name):
2298 orig_files[dsc_name] = {}
2299 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2303 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2305 elif re_is_orig_source.match(dsc_name):
2307 ql = get_poolfile_like_name(dsc_name, session)
2309 # Strip out anything that isn't '%s' or '/%s$'
2310 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2312 if not i.filename.endswith(dsc_name):
2316 # Unfortunately, we may get more than one match here if,
2317 # for example, the package was in potato but had an -sa
2318 # upload in woody. So we need to choose the right one.
2320 # default to something sane in case we don't match any or have only one
2325 old_file = os.path.join(i.location.path, i.filename)
2326 old_file_fh = utils.open_file(old_file)
2327 actual_md5 = apt_pkg.md5sum(old_file_fh)
2329 actual_size = os.stat(old_file)[stat.ST_SIZE]
2330 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2333 old_file = os.path.join(i.location.path, i.filename)
2334 old_file_fh = utils.open_file(old_file)
2335 actual_md5 = apt_pkg.md5sum(old_file_fh)
2337 actual_size = os.stat(old_file)[stat.ST_SIZE]
2339 suite_type = x.location.archive_type
2340 # need this for updating dsc_files in install()
2341 dsc_entry["files id"] = x.file_id
2342 # See install() in process-accepted...
2343 if not orig_files.has_key(dsc_name):
2344 orig_files[dsc_name] = {}
2345 orig_files[dsc_name]["id"] = x.file_id
2346 orig_files[dsc_name]["path"] = old_file
2347 orig_files[dsc_name]["location"] = x.location.location_id
2349 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2350 # Not there? Check the queue directories...
2351 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2352 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2354 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2355 if os.path.exists(in_otherdir):
2356 in_otherdir_fh = utils.open_file(in_otherdir)
2357 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2358 in_otherdir_fh.close()
2359 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2361 if not orig_files.has_key(dsc_name):
2362 orig_files[dsc_name] = {}
2363 orig_files[dsc_name]["path"] = in_otherdir
2366 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
2369 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
2371 if actual_md5 != dsc_entry["md5sum"]:
2372 self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
2373 if actual_size != int(dsc_entry["size"]):
2374 self.rejects.append("size for %s doesn't match %s." % (found, file))
2376 ################################################################################
2377 def accepted_checks(self, overwrite_checks, session):
2378 # Recheck anything that relies on the database; since that's not
2379 # frozen between accept and our run time when called from p-a.
2381 # overwrite_checks is set to False when installing to stable/oldstable
2386 # Find the .dsc (again)
2388 for f in self.pkg.files.keys():
2389 if self.pkg.files[f]["type"] == "dsc":
2392 for checkfile in self.pkg.files.keys():
2393 # The .orig.tar.gz can disappear out from under us is it's a
2394 # duplicate of one in the archive.
2395 if not self.pkg.files.has_key(checkfile):
2398 entry = self.pkg.files[checkfile]
2400 # Check that the source still exists
2401 if entry["type"] == "deb":
2402 source_version = entry["source version"]
2403 source_package = entry["source package"]
2404 if not self.pkg.changes["architecture"].has_key("source") \
2405 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2406 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2408 # Version and file overwrite checks
2409 if overwrite_checks:
2410 if entry["type"] == "deb":
2411 self.check_binary_against_db(checkfile, session)
2412 elif entry["type"] == "dsc":
2413 self.check_source_against_db(checkfile, session)
2414 self.check_dsc_against_db(dsc_filename, session)
2416 # propogate in the case it is in the override tables:
2417 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2418 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2419 propogate[suite] = 1
2421 nopropogate[suite] = 1
2423 for suite in propogate.keys():
2424 if suite in nopropogate:
2426 self.pkg.changes["distribution"][suite] = 1
2428 for checkfile in self.pkg.files.keys():
2429 # Check the package is still in the override tables
2430 for suite in self.pkg.changes["distribution"].keys():
2431 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2432 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2434 ################################################################################
2435 # This is not really a reject, but an unaccept, but since a) the code for
2436 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2437 # extremely rare, for now we'll go with whining at our admin folks...
2439 def do_unaccept(self):
2443 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2444 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2445 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2446 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2447 if cnf.has_key("Dinstall::Bcc"):
2448 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2450 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2452 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2454 # Write the rejection email out as the <foo>.reason file
2455 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2456 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2458 # If we fail here someone is probably trying to exploit the race
2459 # so let's just raise an exception ...
2460 if os.path.exists(reject_filename):
2461 os.unlink(reject_filename)
2463 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2464 os.write(fd, reject_mail_message)
2467 utils.send_mail(reject_mail_message)
2469 del self.Subst["__REJECTOR_ADDRESS__"]
2470 del self.Subst["__REJECT_MESSAGE__"]
2471 del self.Subst["__CC__"]
2473 ################################################################################
2474 # If any file of an upload has a recent mtime then chances are good
2475 # the file is still being uploaded.
2477 def upload_too_new(self):
2480 # Move back to the original directory to get accurate time stamps
2482 os.chdir(self.pkg.directory)
2483 file_list = self.pkg.files.keys()
2484 file_list.extend(self.pkg.dsc_files.keys())
2485 file_list.append(self.pkg.changes_file)
2488 last_modified = time.time()-os.path.getmtime(f)
2489 if last_modified < int(cnf["Dinstall::SkipTime"]):