5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
58 ###############################################################################
60 def get_type(f, session):
62 Get the file type of C{f}
65 @param f: file entry from Changes object
67 @type session: SQLA Session
68 @param session: SQL Alchemy session object
75 if f.has_key("dbtype"):
76 file_type = f["dbtype"]
77 elif re_source_ext.match(f["type"]):
80 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
82 # Validate the override type
83 type_id = get_override_type(file_type, session)
85 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
89 ################################################################################
91 # Determine what parts in a .changes are NEW
93 def determine_new(changes, files, warn=1):
95 Determine what parts in a C{changes} file are NEW.
97 @type changes: Upload.Pkg.changes dict
98 @param changes: Changes dictionary
100 @type files: Upload.Pkg.files dict
101 @param files: Files dictionary
104 @param warn: Warn if overrides are added for (old)stable
107 @return: dictionary of NEW components.
112 session = DBConn().session()
114 # Build up a list of potentially new things
115 for name, f in files.items():
116 # Skip byhand elements
117 if f["type"] == "byhand":
120 priority = f["priority"]
121 section = f["section"]
122 file_type = get_type(f, session)
123 component = f["component"]
125 if file_type == "dsc":
128 if not new.has_key(pkg):
130 new[pkg]["priority"] = priority
131 new[pkg]["section"] = section
132 new[pkg]["type"] = file_type
133 new[pkg]["component"] = component
134 new[pkg]["files"] = []
136 old_type = new[pkg]["type"]
137 if old_type != file_type:
138 # source gets trumped by deb or udeb
139 if old_type == "dsc":
140 new[pkg]["priority"] = priority
141 new[pkg]["section"] = section
142 new[pkg]["type"] = file_type
143 new[pkg]["component"] = component
145 new[pkg]["files"].append(name)
147 if f.has_key("othercomponents"):
148 new[pkg]["othercomponents"] = f["othercomponents"]
150 for suite in changes["suite"].keys():
151 for pkg in new.keys():
152 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
154 for file_entry in new[pkg]["files"]:
155 if files[file_entry].has_key("new"):
156 del files[file_entry]["new"]
160 for s in ['stable', 'oldstable']:
161 if changes["suite"].has_key(s):
162 print "WARNING: overrides will be added for %s!" % s
163 for pkg in new.keys():
164 if new[pkg].has_key("othercomponents"):
165 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
171 ################################################################################
173 def check_valid(new):
175 Check if section and priority for NEW packages exist in database.
176 Additionally does sanity checks:
177 - debian-installer packages have to be udeb (or source)
178 - non debian-installer packages can not be udeb
179 - source priority can only be assigned to dsc file types
182 @param new: Dict of new packages with their section, priority and type.
185 for pkg in new.keys():
186 section_name = new[pkg]["section"]
187 priority_name = new[pkg]["priority"]
188 file_type = new[pkg]["type"]
190 section = get_section(section_name)
192 new[pkg]["section id"] = -1
194 new[pkg]["section id"] = section.section_id
196 priority = get_priority(priority_name)
198 new[pkg]["priority id"] = -1
200 new[pkg]["priority id"] = priority.priority_id
203 di = section_name.find("debian-installer") != -1
205 # If d-i, we must be udeb and vice-versa
206 if (di and file_type not in ("udeb", "dsc")) or \
207 (not di and file_type == "udeb"):
208 new[pkg]["section id"] = -1
210 # If dsc we need to be source and vice-versa
211 if (priority == "source" and file_type != "dsc") or \
212 (priority != "source" and file_type == "dsc"):
213 new[pkg]["priority id"] = -1
215 ###############################################################################
217 def check_status(files):
219 for f in files.keys():
220 if files[f]["type"] == "byhand":
222 elif files[f].has_key("new"):
226 ###############################################################################
228 # Used by Upload.check_timestamps
229 class TarTime(object):
230 def __init__(self, future_cutoff, past_cutoff):
232 self.future_cutoff = future_cutoff
233 self.past_cutoff = past_cutoff
236 self.future_files = {}
237 self.ancient_files = {}
239 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
240 if MTime > self.future_cutoff:
241 self.future_files[Name] = MTime
242 if MTime < self.past_cutoff:
243 self.ancient_files[Name] = MTime
245 ###############################################################################
247 class Upload(object):
249 Everything that has to do with an upload processed.
257 ###########################################################################
260 """ Reset a number of internal variables."""
262 # Initialize the substitution template map
265 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
266 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
267 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
268 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
276 def package_info(self):
278 Format various messages from this Upload to send to the maintainer.
282 ('Reject Reasons', self.rejects),
283 ('Warnings', self.warnings),
284 ('Notes', self.notes),
288 for title, messages in msgs:
290 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
295 ###########################################################################
296 def update_subst(self):
297 """ Set up the per-package template substitution mappings """
301 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
302 if not self.pkg.changes.has_key("architecture") or not \
303 isinstance(self.pkg.changes["architecture"], dict):
304 self.pkg.changes["architecture"] = { "Unknown" : "" }
306 # and maintainer2047 may not exist.
307 if not self.pkg.changes.has_key("maintainer2047"):
308 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
310 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
311 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
312 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
314 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
315 if self.pkg.changes["architecture"].has_key("source") and \
316 self.pkg.changes["changedby822"] != "" and \
317 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
319 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
320 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
321 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
323 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
324 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
325 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
327 if "sponsoremail" in self.pkg.changes:
328 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
330 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
331 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
333 # Apply any global override of the Maintainer field
334 if cnf.get("Dinstall::OverrideMaintainer"):
335 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
336 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
338 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
339 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
340 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
342 ###########################################################################
343 def load_changes(self, filename):
346 @rvalue: whether the changes file was valid or not. We may want to
347 reject even if this is True (see what gets put in self.rejects).
348 This is simply to prevent us even trying things later which will
349 fail because we couldn't properly parse the file.
352 self.pkg.changes_file = filename
354 # Parse the .changes field into a dictionary
356 self.pkg.changes.update(parse_changes(filename))
357 except CantOpenError:
358 self.rejects.append("%s: can't read file." % (filename))
360 except ParseChangesError, line:
361 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
363 except ChangesUnicodeError:
364 self.rejects.append("%s: changes file not proper utf-8" % (filename))
367 # Parse the Files field from the .changes into another dictionary
369 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
370 except ParseChangesError, line:
371 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
373 except UnknownFormatError, format:
374 self.rejects.append("%s: unknown format '%s'." % (filename, format))
377 # Check for mandatory fields
378 for i in ("distribution", "source", "binary", "architecture",
379 "version", "maintainer", "files", "changes", "description"):
380 if not self.pkg.changes.has_key(i):
381 # Avoid undefined errors later
382 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
385 # Strip a source version in brackets from the source field
386 if re_strip_srcver.search(self.pkg.changes["source"]):
387 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
389 # Ensure the source field is a valid package name.
390 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
391 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
393 # Split multi-value fields into a lower-level dictionary
394 for i in ("architecture", "distribution", "binary", "closes"):
395 o = self.pkg.changes.get(i, "")
397 del self.pkg.changes[i]
399 self.pkg.changes[i] = {}
402 self.pkg.changes[i][j] = 1
404 # Fix the Maintainer: field to be RFC822/2047 compatible
406 (self.pkg.changes["maintainer822"],
407 self.pkg.changes["maintainer2047"],
408 self.pkg.changes["maintainername"],
409 self.pkg.changes["maintaineremail"]) = \
410 fix_maintainer (self.pkg.changes["maintainer"])
411 except ParseMaintError, msg:
412 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
413 % (filename, self.pkg.changes["maintainer"], msg))
415 # ...likewise for the Changed-By: field if it exists.
417 (self.pkg.changes["changedby822"],
418 self.pkg.changes["changedby2047"],
419 self.pkg.changes["changedbyname"],
420 self.pkg.changes["changedbyemail"]) = \
421 fix_maintainer (self.pkg.changes.get("changed-by", ""))
422 except ParseMaintError, msg:
423 self.pkg.changes["changedby822"] = ""
424 self.pkg.changes["changedby2047"] = ""
425 self.pkg.changes["changedbyname"] = ""
426 self.pkg.changes["changedbyemail"] = ""
428 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
429 % (filename, changes["changed-by"], msg))
431 # Ensure all the values in Closes: are numbers
432 if self.pkg.changes.has_key("closes"):
433 for i in self.pkg.changes["closes"].keys():
434 if re_isanum.match (i) == None:
435 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
437 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
438 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
439 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
441 # Check the .changes is non-empty
442 if not self.pkg.files:
443 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
446 # Changes was syntactically valid even if we'll reject
449 ###########################################################################
451 def check_distributions(self):
452 "Check and map the Distribution field"
456 # Handle suite mappings
457 for m in Cnf.ValueList("SuiteMappings"):
460 if mtype == "map" or mtype == "silent-map":
461 (source, dest) = args[1:3]
462 if self.pkg.changes["distribution"].has_key(source):
463 del self.pkg.changes["distribution"][source]
464 self.pkg.changes["distribution"][dest] = 1
465 if mtype != "silent-map":
466 self.notes.append("Mapping %s to %s." % (source, dest))
467 if self.pkg.changes.has_key("distribution-version"):
468 if self.pkg.changes["distribution-version"].has_key(source):
469 self.pkg.changes["distribution-version"][source]=dest
470 elif mtype == "map-unreleased":
471 (source, dest) = args[1:3]
472 if self.pkg.changes["distribution"].has_key(source):
473 for arch in self.pkg.changes["architecture"].keys():
474 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
475 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
476 del self.pkg.changes["distribution"][source]
477 self.pkg.changes["distribution"][dest] = 1
479 elif mtype == "ignore":
481 if self.pkg.changes["distribution"].has_key(suite):
482 del self.pkg.changes["distribution"][suite]
483 self.warnings.append("Ignoring %s as a target suite." % (suite))
484 elif mtype == "reject":
486 if self.pkg.changes["distribution"].has_key(suite):
487 self.rejects.append("Uploads to %s are not accepted." % (suite))
488 elif mtype == "propup-version":
489 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
491 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
492 if self.pkg.changes["distribution"].has_key(args[1]):
493 self.pkg.changes.setdefault("distribution-version", {})
494 for suite in args[2:]:
495 self.pkg.changes["distribution-version"][suite] = suite
497 # Ensure there is (still) a target distribution
498 if len(self.pkg.changes["distribution"].keys()) < 1:
499 self.rejects.append("No valid distribution remaining.")
501 # Ensure target distributions exist
502 for suite in self.pkg.changes["distribution"].keys():
503 if not Cnf.has_key("Suite::%s" % (suite)):
504 self.rejects.append("Unknown distribution `%s'." % (suite))
506 ###########################################################################
508 def binary_file_checks(self, f, session):
510 entry = self.pkg.files[f]
512 # Extract package control information
513 deb_file = utils.open_file(f)
515 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
517 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
519 # Can't continue, none of the checks on control would work.
522 # Check for mandantory "Description:"
525 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
527 self.rejects.append("%s: Missing Description in binary package" % (f))
532 # Check for mandatory fields
533 for field in [ "Package", "Architecture", "Version" ]:
534 if control.Find(field) == None:
536 self.rejects.append("%s: No %s field in control." % (f, field))
539 # Ensure the package name matches the one give in the .changes
540 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
541 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
543 # Validate the package field
544 package = control.Find("Package")
545 if not re_valid_pkg_name.match(package):
546 self.rejects.append("%s: invalid package name '%s'." % (f, package))
548 # Validate the version field
549 version = control.Find("Version")
550 if not re_valid_version.match(version):
551 self.rejects.append("%s: invalid version number '%s'." % (f, version))
553 # Ensure the architecture of the .deb is one we know about.
554 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
555 architecture = control.Find("Architecture")
556 upload_suite = self.pkg.changes["distribution"].keys()[0]
558 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
559 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
560 self.rejects.append("Unknown architecture '%s'." % (architecture))
562 # Ensure the architecture of the .deb is one of the ones
563 # listed in the .changes.
564 if not self.pkg.changes["architecture"].has_key(architecture):
565 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
567 # Sanity-check the Depends field
568 depends = control.Find("Depends")
570 self.rejects.append("%s: Depends field is empty." % (f))
572 # Sanity-check the Provides field
573 provides = control.Find("Provides")
575 provide = re_spacestrip.sub('', provides)
577 self.rejects.append("%s: Provides field is empty." % (f))
578 prov_list = provide.split(",")
579 for prov in prov_list:
580 if not re_valid_pkg_name.match(prov):
581 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
583 # Check the section & priority match those given in the .changes (non-fatal)
584 if control.Find("Section") and entry["section"] != "" \
585 and entry["section"] != control.Find("Section"):
586 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
587 (f, control.Find("Section", ""), entry["section"]))
588 if control.Find("Priority") and entry["priority"] != "" \
589 and entry["priority"] != control.Find("Priority"):
590 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
591 (f, control.Find("Priority", ""), entry["priority"]))
593 entry["package"] = package
594 entry["architecture"] = architecture
595 entry["version"] = version
596 entry["maintainer"] = control.Find("Maintainer", "")
598 if f.endswith(".udeb"):
599 self.pkg.files[f]["dbtype"] = "udeb"
600 elif f.endswith(".deb"):
601 self.pkg.files[f]["dbtype"] = "deb"
603 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
605 entry["source"] = control.Find("Source", entry["package"])
607 # Get the source version
608 source = entry["source"]
611 if source.find("(") != -1:
612 m = re_extract_src_version.match(source)
614 source_version = m.group(2)
616 if not source_version:
617 source_version = self.pkg.files[f]["version"]
619 entry["source package"] = source
620 entry["source version"] = source_version
622 # Ensure the filename matches the contents of the .deb
623 m = re_isadeb.match(f)
626 file_package = m.group(1)
627 if entry["package"] != file_package:
628 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
629 (f, file_package, entry["dbtype"], entry["package"]))
630 epochless_version = re_no_epoch.sub('', control.Find("Version"))
633 file_version = m.group(2)
634 if epochless_version != file_version:
635 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
636 (f, file_version, entry["dbtype"], epochless_version))
639 file_architecture = m.group(3)
640 if entry["architecture"] != file_architecture:
641 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
642 (f, file_architecture, entry["dbtype"], entry["architecture"]))
644 # Check for existent source
645 source_version = entry["source version"]
646 source_package = entry["source package"]
647 if self.pkg.changes["architecture"].has_key("source"):
648 if source_version != self.pkg.changes["version"]:
649 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
650 (source_version, f, self.pkg.changes["version"]))
652 # Check in the SQL database
653 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
654 # Check in one of the other directories
655 source_epochless_version = re_no_epoch.sub('', source_version)
656 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
657 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
659 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
662 dsc_file_exists = False
663 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
664 if cnf.has_key("Dir::Queue::%s" % (myq)):
665 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
666 dsc_file_exists = True
669 if not dsc_file_exists:
670 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
672 # Check the version and for file overwrites
673 self.check_binary_against_db(f, session)
675 # Temporarily disable contents generation until we change the table storage layout
678 #if len(b.rejects) > 0:
679 # for j in b.rejects:
680 # self.rejects.append(j)
682 def source_file_checks(self, f, session):
683 entry = self.pkg.files[f]
685 m = re_issource.match(f)
689 entry["package"] = m.group(1)
690 entry["version"] = m.group(2)
691 entry["type"] = m.group(3)
693 # Ensure the source package name matches the Source filed in the .changes
694 if self.pkg.changes["source"] != entry["package"]:
695 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
697 # Ensure the source version matches the version in the .changes file
698 if re_is_orig_source.match(f):
699 changes_version = self.pkg.changes["chopversion2"]
701 changes_version = self.pkg.changes["chopversion"]
703 if changes_version != entry["version"]:
704 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
706 # Ensure the .changes lists source in the Architecture field
707 if not self.pkg.changes["architecture"].has_key("source"):
708 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
710 # Check the signature of a .dsc file
711 if entry["type"] == "dsc":
712 # check_signature returns either:
713 # (None, [list, of, rejects]) or (signature, [])
714 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
716 self.rejects.append(j)
718 entry["architecture"] = "source"
720 def per_suite_file_checks(self, f, suite, session):
722 entry = self.pkg.files[f]
725 if entry.has_key("byhand"):
728 # Check we have fields we need to do these checks
730 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
731 if not entry.has_key(m):
732 self.rejects.append("file '%s' does not have field %s set" % (f, m))
738 # Handle component mappings
739 for m in cnf.ValueList("ComponentMappings"):
740 (source, dest) = m.split()
741 if entry["component"] == source:
742 entry["original component"] = source
743 entry["component"] = dest
745 # Ensure the component is valid for the target suite
746 if cnf.has_key("Suite:%s::Components" % (suite)) and \
747 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
748 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
751 # Validate the component
752 if not get_component(entry["component"], session):
753 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
756 # See if the package is NEW
757 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
760 # Validate the priority
761 if entry["priority"].find('/') != -1:
762 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
764 # Determine the location
765 location = cnf["Dir::Pool"]
766 l = get_location(location, entry["component"], session=session)
768 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %)" % entry["component"])
769 entry["location id"] = -1
771 entry["location id"] = l.location_id
773 # Check the md5sum & size against existing files (if any)
774 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
776 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
777 entry["size"], entry["md5sum"], entry["location id"])
780 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
781 elif found is False and poolfile is not None:
782 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
785 entry["files id"] = None
787 entry["files id"] = poolfile.file_id
789 # Check for packages that have moved from one component to another
790 entry['suite'] = suite
791 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
793 entry["othercomponents"] = res.fetchone()[0]
795 def check_files(self, action=True):
796 file_keys = self.pkg.files.keys()
802 os.chdir(self.pkg.directory)
804 ret = holding.copy_to_holding(f)
806 # XXX: Should we bail out here or try and continue?
807 self.rejects.append(ret)
811 # check we already know the changes file
812 # [NB: this check must be done post-suite mapping]
813 base_filename = os.path.basename(self.pkg.changes_file)
815 session = DBConn().session()
818 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
819 # if in the pool or in a queue other than unchecked, reject
820 if (dbc.in_queue is None) \
821 or (dbc.in_queue is not None
822 and dbc.in_queue.queue_name != 'unchecked'):
823 self.rejects.append("%s file already known to dak" % base_filename)
824 except NoResultFound, e:
831 for f, entry in self.pkg.files.items():
832 # Ensure the file does not already exist in one of the accepted directories
833 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
834 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
835 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
836 self.rejects.append("%s file already exists in the %s directory." % (f, d))
838 if not re_taint_free.match(f):
839 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
841 # Check the file is readable
842 if os.access(f, os.R_OK) == 0:
843 # When running in -n, copy_to_holding() won't have
844 # generated the reject_message, so we need to.
846 if os.path.exists(f):
847 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
849 self.rejects.append("Can't read `%s'. [file not found]" % (f))
850 entry["type"] = "unreadable"
853 # If it's byhand skip remaining checks
854 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
856 entry["type"] = "byhand"
858 # Checks for a binary package...
859 elif re_isadeb.match(f):
861 entry["type"] = "deb"
863 # This routine appends to self.rejects/warnings as appropriate
864 self.binary_file_checks(f, session)
866 # Checks for a source package...
867 elif re_issource.match(f):
870 # This routine appends to self.rejects/warnings as appropriate
871 self.source_file_checks(f, session)
873 # Not a binary or source package? Assume byhand...
876 entry["type"] = "byhand"
878 # Per-suite file checks
879 entry["oldfiles"] = {}
880 for suite in self.pkg.changes["distribution"].keys():
881 self.per_suite_file_checks(f, suite, session)
885 # If the .changes file says it has source, it must have source.
886 if self.pkg.changes["architecture"].has_key("source"):
888 self.rejects.append("no source found and Architecture line in changes mention source.")
890 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
891 self.rejects.append("source only uploads are not supported.")
893 ###########################################################################
894 def check_dsc(self, action=True, session=None):
895 """Returns bool indicating whether or not the source changes are valid"""
896 # Ensure there is source to check
897 if not self.pkg.changes["architecture"].has_key("source"):
902 for f, entry in self.pkg.files.items():
903 if entry["type"] == "dsc":
905 self.rejects.append("can not process a .changes file with multiple .dsc's.")
910 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
912 self.rejects.append("source uploads must contain a dsc file")
915 # Parse the .dsc file
917 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
918 except CantOpenError:
919 # if not -n copy_to_holding() will have done this for us...
921 self.rejects.append("%s: can't read file." % (dsc_filename))
922 except ParseChangesError, line:
923 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
924 except InvalidDscError, line:
925 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
926 except ChangesUnicodeError:
927 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
929 # Build up the file list of files mentioned by the .dsc
931 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
932 except NoFilesFieldError:
933 self.rejects.append("%s: no Files: field." % (dsc_filename))
935 except UnknownFormatError, format:
936 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
938 except ParseChangesError, line:
939 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
942 # Enforce mandatory fields
943 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
944 if not self.pkg.dsc.has_key(i):
945 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
948 # Validate the source and version fields
949 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
950 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
951 if not re_valid_version.match(self.pkg.dsc["version"]):
952 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
954 # Only a limited list of source formats are allowed in each suite
955 for dist in self.pkg.changes["distribution"].keys():
956 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
957 if self.pkg.dsc["format"] not in allowed:
958 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
960 # Validate the Maintainer field
962 # We ignore the return value
963 fix_maintainer(self.pkg.dsc["maintainer"])
964 except ParseMaintError, msg:
965 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
966 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
968 # Validate the build-depends field(s)
969 for field_name in [ "build-depends", "build-depends-indep" ]:
970 field = self.pkg.dsc.get(field_name)
972 # Have apt try to parse them...
974 apt_pkg.ParseSrcDepends(field)
976 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
978 # Ensure the version number in the .dsc matches the version number in the .changes
979 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
980 changes_version = self.pkg.files[dsc_filename]["version"]
982 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
983 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
985 # Ensure the Files field contain only what's expected
986 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
988 # Ensure source is newer than existing source in target suites
989 session = DBConn().session()
990 self.check_source_against_db(dsc_filename, session)
991 self.check_dsc_against_db(dsc_filename, session)
996 ###########################################################################
998 def get_changelog_versions(self, source_dir):
999 """Extracts a the source package and (optionally) grabs the
1000 version history out of debian/changelog for the BTS."""
1004 # Find the .dsc (again)
1006 for f in self.pkg.files.keys():
1007 if self.pkg.files[f]["type"] == "dsc":
1010 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1011 if not dsc_filename:
1014 # Create a symlink mirror of the source files in our temporary directory
1015 for f in self.pkg.files.keys():
1016 m = re_issource.match(f)
1018 src = os.path.join(source_dir, f)
1019 # If a file is missing for whatever reason, give up.
1020 if not os.path.exists(src):
1023 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1024 self.pkg.orig_files[f].has_key("path"):
1026 dest = os.path.join(os.getcwd(), f)
1027 os.symlink(src, dest)
1029 # If the orig files are not a part of the upload, create symlinks to the
1031 for orig_file in self.pkg.orig_files.keys():
1032 if not self.pkg.orig_files[orig_file].has_key("path"):
1034 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1035 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1037 # Extract the source
1038 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1039 (result, output) = commands.getstatusoutput(cmd)
1041 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1042 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1045 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1048 # Get the upstream version
1049 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1050 if re_strip_revision.search(upstr_version):
1051 upstr_version = re_strip_revision.sub('', upstr_version)
1053 # Ensure the changelog file exists
1054 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1055 if not os.path.exists(changelog_filename):
1056 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1059 # Parse the changelog
1060 self.pkg.dsc["bts changelog"] = ""
1061 changelog_file = utils.open_file(changelog_filename)
1062 for line in changelog_file.readlines():
1063 m = re_changelog_versions.match(line)
1065 self.pkg.dsc["bts changelog"] += line
1066 changelog_file.close()
1068 # Check we found at least one revision in the changelog
1069 if not self.pkg.dsc["bts changelog"]:
1070 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1072 def check_source(self):
1074 # a) there's no source
1075 # or c) the orig files are MIA
1076 if not self.pkg.changes["architecture"].has_key("source") \
1077 or len(self.pkg.orig_files) == 0:
1080 tmpdir = utils.temp_dirname()
1082 # Move into the temporary directory
1086 # Get the changelog version history
1087 self.get_changelog_versions(cwd)
1089 # Move back and cleanup the temporary tree
1093 shutil.rmtree(tmpdir)
1095 if e.errno != errno.EACCES:
1097 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1099 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1100 # We probably have u-r or u-w directories so chmod everything
1102 cmd = "chmod -R u+rwx %s" % (tmpdir)
1103 result = os.system(cmd)
1105 utils.fubar("'%s' failed with result %s." % (cmd, result))
1106 shutil.rmtree(tmpdir)
1107 except Exception, e:
1108 print "foobar2 (%s)" % e
1109 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1111 ###########################################################################
1112 def ensure_hashes(self):
1113 # Make sure we recognise the format of the Files: field in the .changes
1114 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1115 if len(format) == 2:
1116 format = int(format[0]), int(format[1])
1118 format = int(float(format[0])), 0
1120 # We need to deal with the original changes blob, as the fields we need
1121 # might not be in the changes dict serialised into the .dak anymore.
1122 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1124 # Copy the checksums over to the current changes dict. This will keep
1125 # the existing modifications to it intact.
1126 for field in orig_changes:
1127 if field.startswith('checksums-'):
1128 self.pkg.changes[field] = orig_changes[field]
1130 # Check for unsupported hashes
1131 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1132 self.rejects.append(j)
1134 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1135 self.rejects.append(j)
1137 # We have to calculate the hash if we have an earlier changes version than
1138 # the hash appears in rather than require it exist in the changes file
1139 for hashname, hashfunc, version in utils.known_hashes:
1140 # TODO: Move _ensure_changes_hash into this class
1141 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1142 self.rejects.append(j)
1143 if "source" in self.pkg.changes["architecture"]:
1144 # TODO: Move _ensure_dsc_hash into this class
1145 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1146 self.rejects.append(j)
1148 def check_hashes(self):
1149 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1150 self.rejects.append(m)
1152 for m in utils.check_size(".changes", self.pkg.files):
1153 self.rejects.append(m)
1155 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1156 self.rejects.append(m)
1158 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1159 self.rejects.append(m)
1161 self.ensure_hashes()
1163 ###########################################################################
1165 def ensure_orig(self, target_dir='.', session=None):
1167 Ensures that all orig files mentioned in the changes file are present
1168 in target_dir. If they do not exist, they are symlinked into place.
1170 An list containing the symlinks that were created are returned (so they
1177 for filename, entry in self.pkg.dsc_files.iteritems():
1178 if not re_is_orig_source.match(filename):
1179 # File is not an orig; ignore
1182 if os.path.exists(filename):
1183 # File exists, no need to continue
1186 def symlink_if_valid(path):
1187 f = utils.open_file(path)
1188 md5sum = apt_pkg.md5sum(f)
1191 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1192 expected = (int(entry['size']), entry['md5sum'])
1194 if fingerprint != expected:
1197 dest = os.path.join(target_dir, filename)
1199 os.symlink(path, dest)
1200 symlinked.append(dest)
1206 session_ = DBConn().session()
1211 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1212 poolfile_path = os.path.join(
1213 poolfile.location.path, poolfile.filename
1216 if symlink_if_valid(poolfile_path):
1226 # Look in some other queues for the file
1227 queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1228 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1230 for queue in queues:
1231 if not cnf.get('Dir::Queue::%s' % queue):
1234 queuefile_path = os.path.join(
1235 cnf['Dir::Queue::%s' % queue], filename
1238 if not os.path.exists(queuefile_path):
1239 # Does not exist in this queue
1242 if symlink_if_valid(queuefile_path):
1247 ###########################################################################
1249 def check_lintian(self):
1252 # Don't reject binary uploads
1253 if not self.pkg.changes['architecture'].has_key('source'):
1256 # Only check some distributions
1258 for dist in ('unstable', 'experimental'):
1259 if dist in self.pkg.changes['distribution']:
1266 tagfile = cnf.get("Dinstall::LintianTags")
1268 # We don't have a tagfile, so just don't do anything.
1271 # Parse the yaml file
1272 sourcefile = file(tagfile, 'r')
1273 sourcecontent = sourcefile.read()
1276 lintiantags = yaml.load(sourcecontent)['lintian']
1277 except yaml.YAMLError, msg:
1278 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1281 # Try and find all orig mentioned in the .dsc
1282 symlinked = self.ensure_orig()
1284 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1285 # so put it together like it. We put all types of tags in one file and then sort
1286 # through lintians output later to see if its a fatal tag we detected, or not.
1287 # So we only run lintian once on all tags, even if we might reject on some, but not
1289 # Additionally build up a set of tags
1291 (fd, temp_filename) = utils.temp_filename()
1292 temptagfile = os.fdopen(fd, 'w')
1293 for tagtype in lintiantags:
1294 for tag in lintiantags[tagtype]:
1295 temptagfile.write("%s\n" % tag)
1299 # So now we should look at running lintian at the .changes file, capturing output
1301 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1302 (result, output) = commands.getstatusoutput(command)
1304 # We are done with lintian, remove our tempfile and any symlinks we created
1305 os.unlink(temp_filename)
1306 for symlink in symlinked:
1310 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1311 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1313 if len(output) == 0:
1318 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1320 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1321 # are having a victim for a reject.
1322 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1323 for line in output.split('\n'):
1324 m = re_parse_lintian.match(line)
1329 epackage = m.group(2)
1333 # So lets check if we know the tag at all.
1334 if etag not in tags:
1338 # We know it and it is overriden. Check that override is allowed.
1339 if etag in lintiantags['warning']:
1340 # The tag is overriden, and it is allowed to be overriden.
1341 # Don't add a reject message.
1343 elif etag in lintiantags['error']:
1344 # The tag is overriden - but is not allowed to be
1345 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1346 log("ftpmaster does not allow tag to be overridable", etag)
1348 # Tag is known, it is not overriden, direct reject.
1349 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1350 # Now tell if they *might* override it.
1351 if etag in lintiantags['warning']:
1352 log("auto rejecting", "overridable", etag)
1353 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1355 log("auto rejecting", "not overridable", etag)
1357 ###########################################################################
1358 def check_urgency(self):
1360 if self.pkg.changes["architecture"].has_key("source"):
1361 if not self.pkg.changes.has_key("urgency"):
1362 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1363 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1364 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1365 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1366 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1367 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1369 ###########################################################################
1371 # Sanity check the time stamps of files inside debs.
1372 # [Files in the near future cause ugly warnings and extreme time
1373 # travel can cause errors on extraction]
1375 def check_timestamps(self):
1378 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1379 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1380 tar = TarTime(future_cutoff, past_cutoff)
1382 for filename, entry in self.pkg.files.items():
1383 if entry["type"] == "deb":
1386 deb_file = utils.open_file(filename)
1387 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1390 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1391 except SystemError, e:
1392 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1393 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1396 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1400 future_files = tar.future_files.keys()
1402 num_future_files = len(future_files)
1403 future_file = future_files[0]
1404 future_date = tar.future_files[future_file]
1405 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1406 % (filename, num_future_files, future_file, time.ctime(future_date)))
1408 ancient_files = tar.ancient_files.keys()
1410 num_ancient_files = len(ancient_files)
1411 ancient_file = ancient_files[0]
1412 ancient_date = tar.ancient_files[ancient_file]
1413 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1414 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1416 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1418 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1419 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1421 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1427 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1428 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1429 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1430 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1431 self.pkg.changes["sponsoremail"] = uid_email
1436 ###########################################################################
1437 # check_signed_by_key checks
1438 ###########################################################################
1440 def check_signed_by_key(self):
1441 """Ensure the .changes is signed by an authorized uploader."""
1442 session = DBConn().session()
1444 # First of all we check that the person has proper upload permissions
1445 # and that this upload isn't blocked
1446 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1449 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1452 # TODO: Check that import-keyring adds UIDs properly
1454 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1457 # Check that the fingerprint which uploaded has permission to do so
1458 self.check_upload_permissions(fpr, session)
1460 # Check that this package is not in a transition
1461 self.check_transition(session)
1466 def check_upload_permissions(self, fpr, session):
1467 # Check any one-off upload blocks
1468 self.check_upload_blocks(fpr, session)
1470 # Start with DM as a special case
1471 # DM is a special case unfortunately, so we check it first
1472 # (keys with no source access get more access than DMs in one
1473 # way; DMs can only upload for their packages whether source
1474 # or binary, whereas keys with no access might be able to
1475 # upload some binaries)
1476 if fpr.source_acl.access_level == 'dm':
1477 self.check_dm_upload(fpr, session)
1479 # Check source-based permissions for other types
1480 if self.pkg.changes["architecture"].has_key("source"):
1481 if fpr.source_acl.access_level is None:
1482 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1483 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1484 self.rejects.append(rej)
1487 # If not a DM, we allow full upload rights
1488 uid_email = "%s@debian.org" % (fpr.uid.uid)
1489 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1492 # Check binary upload permissions
1493 # By this point we know that DMs can't have got here unless they
1494 # are allowed to deal with the package concerned so just apply
1496 if fpr.binary_acl.access_level == 'full':
1499 # Otherwise we're in the map case
1500 tmparches = self.pkg.changes["architecture"].copy()
1501 tmparches.pop('source', None)
1503 for bam in fpr.binary_acl_map:
1504 tmparches.pop(bam.architecture.arch_string, None)
1506 if len(tmparches.keys()) > 0:
1507 if fpr.binary_reject:
1508 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1509 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1510 self.rejects.append(rej)
1512 # TODO: This is where we'll implement reject vs throw away binaries later
1513 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1514 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1515 rej += "\nFingerprint: %s", (fpr.fingerprint)
1516 self.rejects.append(rej)
1519 def check_upload_blocks(self, fpr, session):
1520 """Check whether any upload blocks apply to this source, source
1521 version, uid / fpr combination"""
1523 def block_rej_template(fb):
1524 rej = 'Manual upload block in place for package %s' % fb.source
1525 if fb.version is not None:
1526 rej += ', version %s' % fb.version
1529 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1530 # version is None if the block applies to all versions
1531 if fb.version is None or fb.version == self.pkg.changes['version']:
1532 # Check both fpr and uid - either is enough to cause a reject
1533 if fb.fpr is not None:
1534 if fb.fpr.fingerprint == fpr.fingerprint:
1535 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1536 if fb.uid is not None:
1537 if fb.uid == fpr.uid:
1538 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1541 def check_dm_upload(self, fpr, session):
1542 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1543 ## none of the uploaded packages are NEW
1545 for f in self.pkg.files.keys():
1546 if self.pkg.files[f].has_key("byhand"):
1547 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1549 if self.pkg.files[f].has_key("new"):
1550 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1556 ## the most recent version of the package uploaded to unstable or
1557 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1558 ## section of its control file
1559 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1560 q = q.join(SrcAssociation)
1561 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1562 q = q.order_by(desc('source.version')).limit(1)
1567 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1568 self.rejects.append(rej)
1572 if not r.dm_upload_allowed:
1573 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1574 self.rejects.append(rej)
1577 ## the Maintainer: field of the uploaded .changes file corresponds with
1578 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1580 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1581 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1583 ## the most recent version of the package uploaded to unstable or
1584 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1585 ## non-developer maintainers cannot NMU or hijack packages)
1587 # srcuploaders includes the maintainer
1589 for sup in r.srcuploaders:
1590 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1591 # Eww - I hope we never have two people with the same name in Debian
1592 if email == fpr.uid.uid or name == fpr.uid.name:
1597 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1600 ## none of the packages are being taken over from other source packages
1601 for b in self.pkg.changes["binary"].keys():
1602 for suite in self.pkg.changes["distribution"].keys():
1603 q = session.query(DBSource)
1604 q = q.join(DBBinary).filter_by(package=b)
1605 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1608 if s.source != self.pkg.changes["source"]:
1609 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1613 def check_transition(self, session):
1616 sourcepkg = self.pkg.changes["source"]
1618 # No sourceful upload -> no need to do anything else, direct return
1619 # We also work with unstable uploads, not experimental or those going to some
1620 # proposed-updates queue
1621 if "source" not in self.pkg.changes["architecture"] or \
1622 "unstable" not in self.pkg.changes["distribution"]:
1625 # Also only check if there is a file defined (and existant) with
1627 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1628 if transpath == "" or not os.path.exists(transpath):
1631 # Parse the yaml file
1632 sourcefile = file(transpath, 'r')
1633 sourcecontent = sourcefile.read()
1635 transitions = yaml.load(sourcecontent)
1636 except yaml.YAMLError, msg:
1637 # This shouldn't happen, there is a wrapper to edit the file which
1638 # checks it, but we prefer to be safe than ending up rejecting
1640 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1643 # Now look through all defined transitions
1644 for trans in transitions:
1645 t = transitions[trans]
1646 source = t["source"]
1649 # Will be None if nothing is in testing.
1650 current = get_source_in_suite(source, "testing", session)
1651 if current is not None:
1652 compare = apt_pkg.VersionCompare(current.version, expected)
1654 if current is None or compare < 0:
1655 # This is still valid, the current version in testing is older than
1656 # the new version we wait for, or there is none in testing yet
1658 # Check if the source we look at is affected by this.
1659 if sourcepkg in t['packages']:
1660 # The source is affected, lets reject it.
1662 rejectmsg = "%s: part of the %s transition.\n\n" % (
1665 if current is not None:
1666 currentlymsg = "at version %s" % (current.version)
1668 currentlymsg = "not present in testing"
1670 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1672 rejectmsg += "\n".join(textwrap.wrap("""Your package
1673 is part of a testing transition designed to get %s migrated (it is
1674 currently %s, we need version %s). This transition is managed by the
1675 Release Team, and %s is the Release-Team member responsible for it.
1676 Please mail debian-release@lists.debian.org or contact %s directly if you
1677 need further assistance. You might want to upload to experimental until this
1678 transition is done."""
1679 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1681 self.rejects.append(rejectmsg)
1684 ###########################################################################
1685 # End check_signed_by_key checks
1686 ###########################################################################
1688 def build_summaries(self):
1689 """ Build a summary of changes the upload introduces. """
1691 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1693 short_summary = summary
1695 # This is for direport's benefit...
1696 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1699 summary += "Changes: " + f
1701 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1703 summary += self.announce(short_summary, 0)
1705 return (summary, short_summary)
1707 ###########################################################################
1709 def close_bugs(self, summary, action):
1711 Send mail to close bugs as instructed by the closes field in the changes file.
1712 Also add a line to summary if any work was done.
1714 @type summary: string
1715 @param summary: summary text, as given by L{build_summaries}
1718 @param action: Set to false no real action will be done.
1721 @return: summary. If action was taken, extended by the list of closed bugs.
1725 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1727 bugs = self.pkg.changes["closes"].keys()
1733 summary += "Closing bugs: "
1735 summary += "%s " % (bug)
1738 self.Subst["__BUG_NUMBER__"] = bug
1739 if self.pkg.changes["distribution"].has_key("stable"):
1740 self.Subst["__STABLE_WARNING__"] = """
1741 Note that this package is not part of the released stable Debian
1742 distribution. It may have dependencies on other unreleased software,
1743 or other instabilities. Please take care if you wish to install it.
1744 The update will eventually make its way into the next released Debian
1747 self.Subst["__STABLE_WARNING__"] = ""
1748 mail_message = utils.TemplateSubst(self.Subst, template)
1749 utils.send_mail(mail_message)
1751 # Clear up after ourselves
1752 del self.Subst["__BUG_NUMBER__"]
1753 del self.Subst["__STABLE_WARNING__"]
1755 if action and self.logger:
1756 self.logger.log(["closing bugs"] + bugs)
1762 ###########################################################################
1764 def announce(self, short_summary, action):
1766 Send an announce mail about a new upload.
1768 @type short_summary: string
1769 @param short_summary: Short summary text to include in the mail
1772 @param action: Set to false no real action will be done.
1775 @return: Textstring about action taken.
1780 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1782 # Only do announcements for source uploads with a recent dpkg-dev installed
1783 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1784 self.pkg.changes["architecture"].has_key("source"):
1790 self.Subst["__SHORT_SUMMARY__"] = short_summary
1792 for dist in self.pkg.changes["distribution"].keys():
1793 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1794 if announce_list == "" or lists_done.has_key(announce_list):
1797 lists_done[announce_list] = 1
1798 summary += "Announcing to %s\n" % (announce_list)
1802 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1803 if cnf.get("Dinstall::TrackingServer") and \
1804 self.pkg.changes["architecture"].has_key("source"):
1805 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1806 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1808 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1809 utils.send_mail(mail_message)
1811 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1813 if cnf.FindB("Dinstall::CloseBugs"):
1814 summary = self.close_bugs(summary, action)
1816 del self.Subst["__SHORT_SUMMARY__"]
1820 ###########################################################################
1822 def accept (self, summary, short_summary, session=None):
1826 This moves all files referenced from the .changes into the pool,
1827 sends the accepted mail, announces to lists, closes bugs and
1828 also checks for override disparities. If enabled it will write out
1829 the version history for the BTS Version Tracking and will finally call
1832 @type summary: string
1833 @param summary: Summary text
1835 @type short_summary: string
1836 @param short_summary: Short summary
1840 stats = SummaryStats()
1843 self.logger.log(["installing changes", self.pkg.changes_file])
1845 # Add the .dsc file to the DB first
1846 for newfile, entry in self.pkg.files.items():
1847 if entry["type"] == "dsc":
1848 dsc_component, dsc_location_id = add_dsc_to_db(self, newfile, session)
1850 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1851 for newfile, entry in self.pkg.files.items():
1852 if entry["type"] == "deb":
1853 add_deb_to_db(self, newfile, session)
1855 # If this is a sourceful diff only upload that is moving
1856 # cross-component we need to copy the .orig files into the new
1857 # component too for the same reasons as above.
1858 if self.pkg.changes["architecture"].has_key("source"):
1859 for orig_file in self.pkg.orig_files.keys():
1860 if not self.pkg.orig_files[orig_file].has_key("id"):
1861 continue # Skip if it's not in the pool
1862 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1863 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1864 continue # Skip if the location didn't change
1867 oldf = get_poolfile_by_id(orig_file_id, session)
1868 old_filename = os.path.join(oldf.location.path, oldf.filename)
1869 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1870 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1872 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1874 # TODO: Care about size/md5sum collisions etc
1875 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
1878 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1879 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1881 # TODO: Check that there's only 1 here
1882 source = get_sources_from_name(self.pkg.changes["source"], self.pkg.changes["version"])[0]
1883 dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
1884 dscf.poolfile_id = newf.file_id
1888 # Install the files into the pool
1889 for newfile, entry in self.pkg.files.items():
1890 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1891 utils.move(newfile, destination)
1892 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1893 stats.accept_bytes += float(entry["size"])
1895 # Copy the .changes file across for suite which need it.
1897 for suite_name in self.pkg.changes["distribution"].keys():
1898 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1899 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1901 for dest in copy_changes.keys():
1902 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1904 # We're done - commit the database changes
1906 # Our SQL session will automatically start a new transaction after
1909 # Move the .changes into the 'done' directory
1910 utils.move(self.pkg.changes_file,
1911 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1913 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1914 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1916 # Send accept mail, announce to lists, close bugs and check for
1917 # override disparities
1918 if not cnf["Dinstall::Options::No-Mail"]:
1920 self.Subst["__SUITE__"] = ""
1921 self.Subst["__SUMMARY__"] = summary
1922 mail_message = utils.TemplateSubst(self.Subst,
1923 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1924 utils.send_mail(mail_message)
1925 self.announce(short_summary, 1)
1927 ## Helper stuff for DebBugs Version Tracking
1928 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1929 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1930 # the conditionalization on dsc["bts changelog"] should be
1933 # Write out the version history from the changelog
1934 if self.pkg.changes["architecture"].has_key("source") and \
1935 self.pkg.dsc.has_key("bts changelog"):
1937 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1938 version_history = os.fdopen(fd, 'w')
1939 version_history.write(self.pkg.dsc["bts changelog"])
1940 version_history.close()
1941 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1942 self.pkg.changes_file[:-8]+".versions")
1943 os.rename(temp_filename, filename)
1944 os.chmod(filename, 0644)
1946 # Write out the binary -> source mapping.
1947 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1948 debinfo = os.fdopen(fd, 'w')
1949 for name, entry in sorted(self.pkg.files.items()):
1950 if entry["type"] == "deb":
1951 line = " ".join([entry["package"], entry["version"],
1952 entry["architecture"], entry["source package"],
1953 entry["source version"]])
1954 debinfo.write(line+"\n")
1956 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1957 self.pkg.changes_file[:-8]+".debinfo")
1958 os.rename(temp_filename, filename)
1959 os.chmod(filename, 0644)
1961 # This routine returns None on success or an error on failure
1962 # TODO: Replace queue copying using the new queue.add_file_from_pool routine
1963 # and by looking up which queues in suite.copy_queues
1964 #res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1971 stats.accept_count += 1
1973 def check_override(self):
1975 Checks override entries for validity. Mails "Override disparity" warnings,
1976 if that feature is enabled.
1978 Abandons the check if
1979 - override disparity checks are disabled
1980 - mail sending is disabled
1985 # Abandon the check if:
1986 # a) override disparity checks have been disabled
1987 # b) we're not sending mail
1988 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1989 cnf["Dinstall::Options::No-Mail"]:
1992 summary = self.pkg.check_override()
1997 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2000 self.Subst["__SUMMARY__"] = summary
2001 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2002 utils.send_mail(mail_message)
2003 del self.Subst["__SUMMARY__"]
2005 ###########################################################################
2007 def remove(self, from_dir=None):
2009 Used (for instance) in p-u to remove the package from unchecked
2011 Also removes the package from holding area.
2013 if from_dir is None:
2014 from_dir = self.pkg.directory
2017 for f in self.pkg.files.keys():
2018 os.unlink(os.path.join(from_dir, f))
2019 if os.path.exists(os.path.join(h.holding_dir, f)):
2020 os.unlink(os.path.join(h.holding_dir, f))
2022 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2023 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2024 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2026 ###########################################################################
2028 def move_to_dir (self, dest, perms=0660, changesperms=0664):
2030 Move files to dest with certain perms/changesperms
2033 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2034 dest, perms=changesperms)
2035 for f in self.pkg.files.keys():
2036 utils.move(os.path.join(h.holding_dir, f), dest, perms=perms)
2038 ###########################################################################
2040 def force_reject(self, reject_files):
2042 Forcefully move files from the current directory to the
2043 reject directory. If any file already exists in the reject
2044 directory it will be moved to the morgue to make way for
2048 @param files: file dictionary
2054 for file_entry in reject_files:
2055 # Skip any files which don't exist or which we don't have permission to copy.
2056 if os.access(file_entry, os.R_OK) == 0:
2059 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2062 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2064 # File exists? Let's try and move it to the morgue
2065 if e.errno == errno.EEXIST:
2066 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2068 morgue_file = utils.find_next_free(morgue_file)
2069 except NoFreeFilenameError:
2070 # Something's either gone badly Pete Tong, or
2071 # someone is trying to exploit us.
2072 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2074 utils.move(dest_file, morgue_file, perms=0660)
2076 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2079 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2083 # If we got here, we own the destination file, so we can
2084 # safely overwrite it.
2085 utils.move(file_entry, dest_file, 1, perms=0660)
2088 ###########################################################################
2089 def do_reject (self, manual=0, reject_message="", note=""):
2091 Reject an upload. If called without a reject message or C{manual} is
2092 true, spawn an editor so the user can write one.
2095 @param manual: manual or automated rejection
2097 @type reject_message: string
2098 @param reject_message: A reject message
2103 # If we weren't given a manual rejection message, spawn an
2104 # editor so the user can add one in...
2105 if manual and not reject_message:
2106 (fd, temp_filename) = utils.temp_filename()
2107 temp_file = os.fdopen(fd, 'w')
2110 temp_file.write(line)
2112 editor = os.environ.get("EDITOR","vi")
2114 while answer == 'E':
2115 os.system("%s %s" % (editor, temp_filename))
2116 temp_fh = utils.open_file(temp_filename)
2117 reject_message = "".join(temp_fh.readlines())
2119 print "Reject message:"
2120 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2121 prompt = "[R]eject, Edit, Abandon, Quit ?"
2123 while prompt.find(answer) == -1:
2124 answer = utils.our_raw_input(prompt)
2125 m = re_default_answer.search(prompt)
2128 answer = answer[:1].upper()
2129 os.unlink(temp_filename)
2135 print "Rejecting.\n"
2139 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2140 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2142 # Move all the files into the reject directory
2143 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2144 self.force_reject(reject_files)
2146 # If we fail here someone is probably trying to exploit the race
2147 # so let's just raise an exception ...
2148 if os.path.exists(reason_filename):
2149 os.unlink(reason_filename)
2150 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2152 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2156 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2157 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2158 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2159 os.write(reason_fd, reject_message)
2160 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2162 # Build up the rejection email
2163 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2164 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2165 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2166 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2167 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2168 # Write the rejection email out as the <foo>.reason file
2169 os.write(reason_fd, reject_mail_message)
2171 del self.Subst["__REJECTOR_ADDRESS__"]
2172 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2173 del self.Subst["__CC__"]
2177 # Send the rejection mail if appropriate
2178 if not cnf["Dinstall::Options::No-Mail"]:
2179 utils.send_mail(reject_mail_message)
2182 self.logger.log(["rejected", self.pkg.changes_file])
2186 ################################################################################
2187 def in_override_p(self, package, component, suite, binary_type, filename, session):
2189 Check if a package already has override entries in the DB
2191 @type package: string
2192 @param package: package name
2194 @type component: string
2195 @param component: database id of the component
2198 @param suite: database id of the suite
2200 @type binary_type: string
2201 @param binary_type: type of the package
2203 @type filename: string
2204 @param filename: filename we check
2206 @return: the database result. But noone cares anyway.
2212 if binary_type == "": # must be source
2215 file_type = binary_type
2217 # Override suite name; used for example with proposed-updates
2218 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2219 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2221 result = get_override(package, suite, component, file_type, session)
2223 # If checking for a source package fall back on the binary override type
2224 if file_type == "dsc" and len(result) < 1:
2225 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2227 # Remember the section and priority so we can check them later if appropriate
2230 self.pkg.files[filename]["override section"] = result.section.section
2231 self.pkg.files[filename]["override priority"] = result.priority.priority
2236 ################################################################################
2237 def get_anyversion(self, sv_list, suite):
2240 @param sv_list: list of (suite, version) tuples to check
2243 @param suite: suite name
2249 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2250 for (s, v) in sv_list:
2251 if s in [ x.lower() for x in anysuite ]:
2252 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2257 ################################################################################
2259 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2262 @param sv_list: list of (suite, version) tuples to check
2264 @type filename: string
2265 @param filename: XXX
2267 @type new_version: string
2268 @param new_version: XXX
2270 Ensure versions are newer than existing packages in target
2271 suites and that cross-suite version checking rules as
2272 set out in the conf file are satisfied.
2277 # Check versions for each target suite
2278 for target_suite in self.pkg.changes["distribution"].keys():
2279 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2280 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2282 # Enforce "must be newer than target suite" even if conffile omits it
2283 if target_suite not in must_be_newer_than:
2284 must_be_newer_than.append(target_suite)
2286 for (suite, existent_version) in sv_list:
2287 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2289 if suite in must_be_newer_than and sourceful and vercmp < 1:
2290 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2292 if suite in must_be_older_than and vercmp > -1:
2295 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2296 # we really use the other suite, ignoring the conflicting one ...
2297 addsuite = self.pkg.changes["distribution-version"][suite]
2299 add_version = self.get_anyversion(sv_list, addsuite)
2300 target_version = self.get_anyversion(sv_list, target_suite)
2303 # not add_version can only happen if we map to a suite
2304 # that doesn't enhance the suite we're propup'ing from.
2305 # so "propup-ver x a b c; map a d" is a problem only if
2306 # d doesn't enhance a.
2308 # i think we could always propagate in this case, rather
2309 # than complaining. either way, this isn't a REJECT issue
2311 # And - we really should complain to the dorks who configured dak
2312 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2313 self.pkg.changes.setdefault("propdistribution", {})
2314 self.pkg.changes["propdistribution"][addsuite] = 1
2316 elif not target_version:
2317 # not targets_version is true when the package is NEW
2318 # we could just stick with the "...old version..." REJECT
2319 # for this, I think.
2320 self.rejects.append("Won't propogate NEW packages.")
2321 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2322 # propogation would be redundant. no need to reject though.
2323 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2325 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2326 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2328 self.warnings.append("Propogating upload to %s" % (addsuite))
2329 self.pkg.changes.setdefault("propdistribution", {})
2330 self.pkg.changes["propdistribution"][addsuite] = 1
2334 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2336 ################################################################################
2337 def check_binary_against_db(self, filename, session):
2338 # Ensure version is sane
2339 q = session.query(BinAssociation)
2340 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2341 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2343 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2344 filename, self.pkg.files[filename]["version"], sourceful=False)
2346 # Check for any existing copies of the file
2347 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2348 q = q.filter_by(version=self.pkg.files[filename]["version"])
2349 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2352 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2354 ################################################################################
2356 def check_source_against_db(self, filename, session):
2359 source = self.pkg.dsc.get("source")
2360 version = self.pkg.dsc.get("version")
2362 # Ensure version is sane
2363 q = session.query(SrcAssociation)
2364 q = q.join(DBSource).filter(DBSource.source==source)
2366 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2367 filename, version, sourceful=True)
2369 ################################################################################
2370 def check_dsc_against_db(self, filename, session):
2373 @warning: NB: this function can remove entries from the 'files' index [if
2374 the orig tarball is a duplicate of the one in the archive]; if
2375 you're iterating over 'files' and call this function as part of
2376 the loop, be sure to add a check to the top of the loop to
2377 ensure you haven't just tried to dereference the deleted entry.
2382 self.pkg.orig_files = {} # XXX: do we need to clear it?
2383 orig_files = self.pkg.orig_files
2385 # Try and find all files mentioned in the .dsc. This has
2386 # to work harder to cope with the multiple possible
2387 # locations of an .orig.tar.gz.
2388 # The ordering on the select is needed to pick the newest orig
2389 # when it exists in multiple places.
2390 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2392 if self.pkg.files.has_key(dsc_name):
2393 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2394 actual_size = int(self.pkg.files[dsc_name]["size"])
2395 found = "%s in incoming" % (dsc_name)
2397 # Check the file does not already exist in the archive
2398 ql = get_poolfile_like_name(dsc_name, session)
2400 # Strip out anything that isn't '%s' or '/%s$'
2402 if not i.filename.endswith(dsc_name):
2405 # "[dak] has not broken them. [dak] has fixed a
2406 # brokenness. Your crappy hack exploited a bug in
2409 # "(Come on! I thought it was always obvious that
2410 # one just doesn't release different files with
2411 # the same name and version.)"
2412 # -- ajk@ on d-devel@l.d.o
2415 # Ignore exact matches for .orig.tar.gz
2417 if re_is_orig_source.match(dsc_name):
2419 if self.pkg.files.has_key(dsc_name) and \
2420 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2421 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2422 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2423 # TODO: Don't delete the entry, just mark it as not needed
2424 # This would fix the stupidity of changing something we often iterate over
2425 # whilst we're doing it
2426 del self.pkg.files[dsc_name]
2427 dsc_entry["files id"] = i.file_id
2428 if not orig_files.has_key(dsc_name):
2429 orig_files[dsc_name] = {}
2430 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2434 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2436 elif re_is_orig_source.match(dsc_name):
2438 ql = get_poolfile_like_name(dsc_name, session)
2440 # Strip out anything that isn't '%s' or '/%s$'
2441 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2443 if not i.filename.endswith(dsc_name):
2447 # Unfortunately, we may get more than one match here if,
2448 # for example, the package was in potato but had an -sa
2449 # upload in woody. So we need to choose the right one.
2451 # default to something sane in case we don't match any or have only one
2456 old_file = os.path.join(i.location.path, i.filename)
2457 old_file_fh = utils.open_file(old_file)
2458 actual_md5 = apt_pkg.md5sum(old_file_fh)
2460 actual_size = os.stat(old_file)[stat.ST_SIZE]
2461 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2464 old_file = os.path.join(i.location.path, i.filename)
2465 old_file_fh = utils.open_file(old_file)
2466 actual_md5 = apt_pkg.md5sum(old_file_fh)
2468 actual_size = os.stat(old_file)[stat.ST_SIZE]
2470 suite_type = x.location.archive_type
2471 # need this for updating dsc_files in install()
2472 dsc_entry["files id"] = x.file_id
2473 # See install() in process-accepted...
2474 if not orig_files.has_key(dsc_name):
2475 orig_files[dsc_name] = {}
2476 orig_files[dsc_name]["id"] = x.file_id
2477 orig_files[dsc_name]["path"] = old_file
2478 orig_files[dsc_name]["location"] = x.location.location_id
2480 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2481 # Not there? Check the queue directories...
2482 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2483 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2485 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2486 if os.path.exists(in_otherdir):
2487 in_otherdir_fh = utils.open_file(in_otherdir)
2488 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2489 in_otherdir_fh.close()
2490 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2492 if not orig_files.has_key(dsc_name):
2493 orig_files[dsc_name] = {}
2494 orig_files[dsc_name]["path"] = in_otherdir
2497 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2500 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2502 if actual_md5 != dsc_entry["md5sum"]:
2503 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2504 if actual_size != int(dsc_entry["size"]):
2505 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2507 ################################################################################
2508 # This is used by process-new and process-holding to recheck a changes file
2509 # at the time we're running. It mainly wraps various other internal functions
2510 # and is similar to accepted_checks - these should probably be tidied up
2512 def recheck(self, session):
2514 for f in self.pkg.files.keys():
2515 # The .orig.tar.gz can disappear out from under us is it's a
2516 # duplicate of one in the archive.
2517 if not self.pkg.files.has_key(f):
2520 entry = self.pkg.files[f]
2522 # Check that the source still exists
2523 if entry["type"] == "deb":
2524 source_version = entry["source version"]
2525 source_package = entry["source package"]
2526 if not self.pkg.changes["architecture"].has_key("source") \
2527 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2528 source_epochless_version = re_no_epoch.sub('', source_version)
2529 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2531 for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
2532 if cnf.has_key("Dir::Queue::%s" % (q)):
2533 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2536 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2538 # Version and file overwrite checks
2539 if entry["type"] == "deb":
2540 self.check_binary_against_db(f, session)
2541 elif entry["type"] == "dsc":
2542 self.check_source_against_db(f, session)
2543 self.check_dsc_against_db(f, session)
2545 ################################################################################
2546 def accepted_checks(self, overwrite_checks, session):
2547 # Recheck anything that relies on the database; since that's not
2548 # frozen between accept and our run time when called from p-a.
2550 # overwrite_checks is set to False when installing to stable/oldstable
2555 # Find the .dsc (again)
2557 for f in self.pkg.files.keys():
2558 if self.pkg.files[f]["type"] == "dsc":
2561 for checkfile in self.pkg.files.keys():
2562 # The .orig.tar.gz can disappear out from under us is it's a
2563 # duplicate of one in the archive.
2564 if not self.pkg.files.has_key(checkfile):
2567 entry = self.pkg.files[checkfile]
2569 # Check that the source still exists
2570 if entry["type"] == "deb":
2571 source_version = entry["source version"]
2572 source_package = entry["source package"]
2573 if not self.pkg.changes["architecture"].has_key("source") \
2574 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2575 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2577 # Version and file overwrite checks
2578 if overwrite_checks:
2579 if entry["type"] == "deb":
2580 self.check_binary_against_db(checkfile, session)
2581 elif entry["type"] == "dsc":
2582 self.check_source_against_db(checkfile, session)
2583 self.check_dsc_against_db(dsc_filename, session)
2585 # propogate in the case it is in the override tables:
2586 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2587 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2588 propogate[suite] = 1
2590 nopropogate[suite] = 1
2592 for suite in propogate.keys():
2593 if suite in nopropogate:
2595 self.pkg.changes["distribution"][suite] = 1
2597 for checkfile in self.pkg.files.keys():
2598 # Check the package is still in the override tables
2599 for suite in self.pkg.changes["distribution"].keys():
2600 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2601 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2603 ################################################################################
2604 # This is not really a reject, but an unaccept, but since a) the code for
2605 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2606 # extremely rare, for now we'll go with whining at our admin folks...
2608 def do_unaccept(self):
2612 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2613 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2614 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2615 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2616 if cnf.has_key("Dinstall::Bcc"):
2617 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2619 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2621 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2623 # Write the rejection email out as the <foo>.reason file
2624 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2625 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2627 # If we fail here someone is probably trying to exploit the race
2628 # so let's just raise an exception ...
2629 if os.path.exists(reject_filename):
2630 os.unlink(reject_filename)
2632 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2633 os.write(fd, reject_mail_message)
2636 utils.send_mail(reject_mail_message)
2638 del self.Subst["__REJECTOR_ADDRESS__"]
2639 del self.Subst["__REJECT_MESSAGE__"]
2640 del self.Subst["__CC__"]
2642 ################################################################################
2643 # If any file of an upload has a recent mtime then chances are good
2644 # the file is still being uploaded.
2646 def upload_too_new(self):
2649 # Move back to the original directory to get accurate time stamps
2651 os.chdir(self.pkg.directory)
2652 file_list = self.pkg.files.keys()
2653 file_list.extend(self.pkg.dsc_files.keys())
2654 file_list.append(self.pkg.changes_file)
2657 last_modified = time.time()-os.path.getmtime(f)
2658 if last_modified < int(cnf["Dinstall::SkipTime"]):