5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
58 ###############################################################################
60 def get_type(f, session):
62 Get the file type of C{f}
65 @param f: file entry from Changes object
67 @type session: SQLA Session
68 @param session: SQL Alchemy session object
75 if f.has_key("dbtype"):
76 file_type = f["dbtype"]
77 elif re_source_ext.match(f["type"]):
80 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
82 # Validate the override type
83 type_id = get_override_type(file_type, session)
85 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
89 ################################################################################
91 # Determine what parts in a .changes are NEW
93 def determine_new(changes, files, warn=1):
95 Determine what parts in a C{changes} file are NEW.
97 @type changes: Upload.Pkg.changes dict
98 @param changes: Changes dictionary
100 @type files: Upload.Pkg.files dict
101 @param files: Files dictionary
104 @param warn: Warn if overrides are added for (old)stable
107 @return: dictionary of NEW components.
112 session = DBConn().session()
114 # Build up a list of potentially new things
115 for name, f in files.items():
116 # Skip byhand elements
117 if f["type"] == "byhand":
120 priority = f["priority"]
121 section = f["section"]
122 file_type = get_type(f, session)
123 component = f["component"]
125 if file_type == "dsc":
128 if not new.has_key(pkg):
130 new[pkg]["priority"] = priority
131 new[pkg]["section"] = section
132 new[pkg]["type"] = file_type
133 new[pkg]["component"] = component
134 new[pkg]["files"] = []
136 old_type = new[pkg]["type"]
137 if old_type != file_type:
138 # source gets trumped by deb or udeb
139 if old_type == "dsc":
140 new[pkg]["priority"] = priority
141 new[pkg]["section"] = section
142 new[pkg]["type"] = file_type
143 new[pkg]["component"] = component
145 new[pkg]["files"].append(name)
147 if f.has_key("othercomponents"):
148 new[pkg]["othercomponents"] = f["othercomponents"]
150 for suite in changes["suite"].keys():
151 for pkg in new.keys():
152 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
154 for file_entry in new[pkg]["files"]:
155 if files[file_entry].has_key("new"):
156 del files[file_entry]["new"]
160 for s in ['stable', 'oldstable']:
161 if changes["suite"].has_key(s):
162 print "WARNING: overrides will be added for %s!" % s
163 for pkg in new.keys():
164 if new[pkg].has_key("othercomponents"):
165 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
171 ################################################################################
173 def check_valid(new):
175 Check if section and priority for NEW packages exist in database.
176 Additionally does sanity checks:
177 - debian-installer packages have to be udeb (or source)
178 - non debian-installer packages can not be udeb
179 - source priority can only be assigned to dsc file types
182 @param new: Dict of new packages with their section, priority and type.
185 for pkg in new.keys():
186 section_name = new[pkg]["section"]
187 priority_name = new[pkg]["priority"]
188 file_type = new[pkg]["type"]
190 section = get_section(section_name)
192 new[pkg]["section id"] = -1
194 new[pkg]["section id"] = section.section_id
196 priority = get_priority(priority_name)
198 new[pkg]["priority id"] = -1
200 new[pkg]["priority id"] = priority.priority_id
203 di = section_name.find("debian-installer") != -1
205 # If d-i, we must be udeb and vice-versa
206 if (di and file_type not in ("udeb", "dsc")) or \
207 (not di and file_type == "udeb"):
208 new[pkg]["section id"] = -1
210 # If dsc we need to be source and vice-versa
211 if (priority == "source" and file_type != "dsc") or \
212 (priority != "source" and file_type == "dsc"):
213 new[pkg]["priority id"] = -1
215 ###############################################################################
217 def check_status(files):
219 for f in files.keys():
220 if files[f]["type"] == "byhand":
222 elif files[f].has_key("new"):
226 ###############################################################################
228 # Used by Upload.check_timestamps
229 class TarTime(object):
230 def __init__(self, future_cutoff, past_cutoff):
232 self.future_cutoff = future_cutoff
233 self.past_cutoff = past_cutoff
236 self.future_files = {}
237 self.ancient_files = {}
239 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
240 if MTime > self.future_cutoff:
241 self.future_files[Name] = MTime
242 if MTime < self.past_cutoff:
243 self.ancient_files[Name] = MTime
245 ###############################################################################
247 class Upload(object):
249 Everything that has to do with an upload processed.
257 ###########################################################################
260 """ Reset a number of internal variables."""
262 # Initialize the substitution template map
265 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
266 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
267 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
268 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
276 def package_info(self):
278 Format various messages from this Upload to send to the maintainer.
282 ('Reject Reasons', self.rejects),
283 ('Warnings', self.warnings),
284 ('Notes', self.notes),
288 for title, messages in msgs:
290 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
295 ###########################################################################
296 def update_subst(self):
297 """ Set up the per-package template substitution mappings """
301 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
302 if not self.pkg.changes.has_key("architecture") or not \
303 isinstance(self.pkg.changes["architecture"], dict):
304 self.pkg.changes["architecture"] = { "Unknown" : "" }
306 # and maintainer2047 may not exist.
307 if not self.pkg.changes.has_key("maintainer2047"):
308 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
310 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
311 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
312 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
314 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
315 if self.pkg.changes["architecture"].has_key("source") and \
316 self.pkg.changes["changedby822"] != "" and \
317 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
319 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
320 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
321 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
323 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
324 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
325 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
327 if "sponsoremail" in self.pkg.changes:
328 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
330 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
331 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
333 # Apply any global override of the Maintainer field
334 if cnf.get("Dinstall::OverrideMaintainer"):
335 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
336 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
338 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
339 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
340 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
342 ###########################################################################
343 def load_changes(self, filename):
346 @rvalue: whether the changes file was valid or not. We may want to
347 reject even if this is True (see what gets put in self.rejects).
348 This is simply to prevent us even trying things later which will
349 fail because we couldn't properly parse the file.
352 self.pkg.changes_file = filename
354 # Parse the .changes field into a dictionary
356 self.pkg.changes.update(parse_changes(filename))
357 except CantOpenError:
358 self.rejects.append("%s: can't read file." % (filename))
360 except ParseChangesError, line:
361 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
363 except ChangesUnicodeError:
364 self.rejects.append("%s: changes file not proper utf-8" % (filename))
367 # Parse the Files field from the .changes into another dictionary
369 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
370 except ParseChangesError, line:
371 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
373 except UnknownFormatError, format:
374 self.rejects.append("%s: unknown format '%s'." % (filename, format))
377 # Check for mandatory fields
378 for i in ("distribution", "source", "binary", "architecture",
379 "version", "maintainer", "files", "changes", "description"):
380 if not self.pkg.changes.has_key(i):
381 # Avoid undefined errors later
382 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
385 # Strip a source version in brackets from the source field
386 if re_strip_srcver.search(self.pkg.changes["source"]):
387 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
389 # Ensure the source field is a valid package name.
390 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
391 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
393 # Split multi-value fields into a lower-level dictionary
394 for i in ("architecture", "distribution", "binary", "closes"):
395 o = self.pkg.changes.get(i, "")
397 del self.pkg.changes[i]
399 self.pkg.changes[i] = {}
402 self.pkg.changes[i][j] = 1
404 # Fix the Maintainer: field to be RFC822/2047 compatible
406 (self.pkg.changes["maintainer822"],
407 self.pkg.changes["maintainer2047"],
408 self.pkg.changes["maintainername"],
409 self.pkg.changes["maintaineremail"]) = \
410 fix_maintainer (self.pkg.changes["maintainer"])
411 except ParseMaintError, msg:
412 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
413 % (filename, self.pkg.changes["maintainer"], msg))
415 # ...likewise for the Changed-By: field if it exists.
417 (self.pkg.changes["changedby822"],
418 self.pkg.changes["changedby2047"],
419 self.pkg.changes["changedbyname"],
420 self.pkg.changes["changedbyemail"]) = \
421 fix_maintainer (self.pkg.changes.get("changed-by", ""))
422 except ParseMaintError, msg:
423 self.pkg.changes["changedby822"] = ""
424 self.pkg.changes["changedby2047"] = ""
425 self.pkg.changes["changedbyname"] = ""
426 self.pkg.changes["changedbyemail"] = ""
428 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
429 % (filename, changes["changed-by"], msg))
431 # Ensure all the values in Closes: are numbers
432 if self.pkg.changes.has_key("closes"):
433 for i in self.pkg.changes["closes"].keys():
434 if re_isanum.match (i) == None:
435 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
437 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
438 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
439 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
441 # Check the .changes is non-empty
442 if not self.pkg.files:
443 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
446 # Changes was syntactically valid even if we'll reject
449 ###########################################################################
451 def check_distributions(self):
452 "Check and map the Distribution field"
456 # Handle suite mappings
457 for m in Cnf.ValueList("SuiteMappings"):
460 if mtype == "map" or mtype == "silent-map":
461 (source, dest) = args[1:3]
462 if self.pkg.changes["distribution"].has_key(source):
463 del self.pkg.changes["distribution"][source]
464 self.pkg.changes["distribution"][dest] = 1
465 if mtype != "silent-map":
466 self.notes.append("Mapping %s to %s." % (source, dest))
467 if self.pkg.changes.has_key("distribution-version"):
468 if self.pkg.changes["distribution-version"].has_key(source):
469 self.pkg.changes["distribution-version"][source]=dest
470 elif mtype == "map-unreleased":
471 (source, dest) = args[1:3]
472 if self.pkg.changes["distribution"].has_key(source):
473 for arch in self.pkg.changes["architecture"].keys():
474 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
475 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
476 del self.pkg.changes["distribution"][source]
477 self.pkg.changes["distribution"][dest] = 1
479 elif mtype == "ignore":
481 if self.pkg.changes["distribution"].has_key(suite):
482 del self.pkg.changes["distribution"][suite]
483 self.warnings.append("Ignoring %s as a target suite." % (suite))
484 elif mtype == "reject":
486 if self.pkg.changes["distribution"].has_key(suite):
487 self.rejects.append("Uploads to %s are not accepted." % (suite))
488 elif mtype == "propup-version":
489 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
491 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
492 if self.pkg.changes["distribution"].has_key(args[1]):
493 self.pkg.changes.setdefault("distribution-version", {})
494 for suite in args[2:]:
495 self.pkg.changes["distribution-version"][suite] = suite
497 # Ensure there is (still) a target distribution
498 if len(self.pkg.changes["distribution"].keys()) < 1:
499 self.rejects.append("No valid distribution remaining.")
501 # Ensure target distributions exist
502 for suite in self.pkg.changes["distribution"].keys():
503 if not Cnf.has_key("Suite::%s" % (suite)):
504 self.rejects.append("Unknown distribution `%s'." % (suite))
506 ###########################################################################
508 def binary_file_checks(self, f, session):
510 entry = self.pkg.files[f]
512 # Extract package control information
513 deb_file = utils.open_file(f)
515 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
517 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
519 # Can't continue, none of the checks on control would work.
522 # Check for mandantory "Description:"
525 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
527 self.rejects.append("%s: Missing Description in binary package" % (f))
532 # Check for mandatory fields
533 for field in [ "Package", "Architecture", "Version" ]:
534 if control.Find(field) == None:
536 self.rejects.append("%s: No %s field in control." % (f, field))
539 # Ensure the package name matches the one give in the .changes
540 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
541 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
543 # Validate the package field
544 package = control.Find("Package")
545 if not re_valid_pkg_name.match(package):
546 self.rejects.append("%s: invalid package name '%s'." % (f, package))
548 # Validate the version field
549 version = control.Find("Version")
550 if not re_valid_version.match(version):
551 self.rejects.append("%s: invalid version number '%s'." % (f, version))
553 # Ensure the architecture of the .deb is one we know about.
554 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
555 architecture = control.Find("Architecture")
556 upload_suite = self.pkg.changes["distribution"].keys()[0]
558 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
559 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
560 self.rejects.append("Unknown architecture '%s'." % (architecture))
562 # Ensure the architecture of the .deb is one of the ones
563 # listed in the .changes.
564 if not self.pkg.changes["architecture"].has_key(architecture):
565 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
567 # Sanity-check the Depends field
568 depends = control.Find("Depends")
570 self.rejects.append("%s: Depends field is empty." % (f))
572 # Sanity-check the Provides field
573 provides = control.Find("Provides")
575 provide = re_spacestrip.sub('', provides)
577 self.rejects.append("%s: Provides field is empty." % (f))
578 prov_list = provide.split(",")
579 for prov in prov_list:
580 if not re_valid_pkg_name.match(prov):
581 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
583 # Check the section & priority match those given in the .changes (non-fatal)
584 if control.Find("Section") and entry["section"] != "" \
585 and entry["section"] != control.Find("Section"):
586 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
587 (f, control.Find("Section", ""), entry["section"]))
588 if control.Find("Priority") and entry["priority"] != "" \
589 and entry["priority"] != control.Find("Priority"):
590 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
591 (f, control.Find("Priority", ""), entry["priority"]))
593 entry["package"] = package
594 entry["architecture"] = architecture
595 entry["version"] = version
596 entry["maintainer"] = control.Find("Maintainer", "")
598 if f.endswith(".udeb"):
599 self.pkg.files[f]["dbtype"] = "udeb"
600 elif f.endswith(".deb"):
601 self.pkg.files[f]["dbtype"] = "deb"
603 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
605 entry["source"] = control.Find("Source", entry["package"])
607 # Get the source version
608 source = entry["source"]
611 if source.find("(") != -1:
612 m = re_extract_src_version.match(source)
614 source_version = m.group(2)
616 if not source_version:
617 source_version = self.pkg.files[f]["version"]
619 entry["source package"] = source
620 entry["source version"] = source_version
622 # Ensure the filename matches the contents of the .deb
623 m = re_isadeb.match(f)
626 file_package = m.group(1)
627 if entry["package"] != file_package:
628 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
629 (f, file_package, entry["dbtype"], entry["package"]))
630 epochless_version = re_no_epoch.sub('', control.Find("Version"))
633 file_version = m.group(2)
634 if epochless_version != file_version:
635 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
636 (f, file_version, entry["dbtype"], epochless_version))
639 file_architecture = m.group(3)
640 if entry["architecture"] != file_architecture:
641 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
642 (f, file_architecture, entry["dbtype"], entry["architecture"]))
644 # Check for existent source
645 source_version = entry["source version"]
646 source_package = entry["source package"]
647 if self.pkg.changes["architecture"].has_key("source"):
648 if source_version != self.pkg.changes["version"]:
649 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
650 (source_version, f, self.pkg.changes["version"]))
652 # Check in the SQL database
653 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
654 # Check in one of the other directories
655 source_epochless_version = re_no_epoch.sub('', source_version)
656 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
657 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
659 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
662 dsc_file_exists = False
663 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
664 if cnf.has_key("Dir::Queue::%s" % (myq)):
665 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
666 dsc_file_exists = True
669 if not dsc_file_exists:
670 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
672 # Check the version and for file overwrites
673 self.check_binary_against_db(f, session)
675 # Temporarily disable contents generation until we change the table storage layout
678 #if len(b.rejects) > 0:
679 # for j in b.rejects:
680 # self.rejects.append(j)
682 def source_file_checks(self, f, session):
683 entry = self.pkg.files[f]
685 m = re_issource.match(f)
689 entry["package"] = m.group(1)
690 entry["version"] = m.group(2)
691 entry["type"] = m.group(3)
693 # Ensure the source package name matches the Source filed in the .changes
694 if self.pkg.changes["source"] != entry["package"]:
695 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
697 # Ensure the source version matches the version in the .changes file
698 if re_is_orig_source.match(f):
699 changes_version = self.pkg.changes["chopversion2"]
701 changes_version = self.pkg.changes["chopversion"]
703 if changes_version != entry["version"]:
704 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
706 # Ensure the .changes lists source in the Architecture field
707 if not self.pkg.changes["architecture"].has_key("source"):
708 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
710 # Check the signature of a .dsc file
711 if entry["type"] == "dsc":
712 # check_signature returns either:
713 # (None, [list, of, rejects]) or (signature, [])
714 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
716 self.rejects.append(j)
718 entry["architecture"] = "source"
720 def per_suite_file_checks(self, f, suite, session):
722 entry = self.pkg.files[f]
723 archive = utils.where_am_i()
726 if entry.has_key("byhand"):
729 # Check we have fields we need to do these checks
731 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
732 if not entry.has_key(m):
733 self.rejects.append("file '%s' does not have field %s set" % (f, m))
739 # Handle component mappings
740 for m in cnf.ValueList("ComponentMappings"):
741 (source, dest) = m.split()
742 if entry["component"] == source:
743 entry["original component"] = source
744 entry["component"] = dest
746 # Ensure the component is valid for the target suite
747 if cnf.has_key("Suite:%s::Components" % (suite)) and \
748 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
749 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
752 # Validate the component
753 if not get_component(entry["component"], session):
754 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
757 # See if the package is NEW
758 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
761 # Validate the priority
762 if entry["priority"].find('/') != -1:
763 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
765 # Determine the location
766 location = cnf["Dir::Pool"]
767 l = get_location(location, entry["component"], archive, session)
769 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (entry["component"], archive))
770 entry["location id"] = -1
772 entry["location id"] = l.location_id
774 # Check the md5sum & size against existing files (if any)
775 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
777 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
778 entry["size"], entry["md5sum"], entry["location id"])
781 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
782 elif found is False and poolfile is not None:
783 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
786 entry["files id"] = None
788 entry["files id"] = poolfile.file_id
790 # Check for packages that have moved from one component to another
791 entry['suite'] = suite
792 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
794 entry["othercomponents"] = res.fetchone()[0]
796 def check_files(self, action=True):
797 file_keys = self.pkg.files.keys()
803 os.chdir(self.pkg.directory)
805 ret = holding.copy_to_holding(f)
807 # XXX: Should we bail out here or try and continue?
808 self.rejects.append(ret)
812 # check we already know the changes file
813 # [NB: this check must be done post-suite mapping]
814 base_filename = os.path.basename(self.pkg.changes_file)
816 session = DBConn().session()
819 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
820 if dbc.in_queue is not None and dbc.in_queue.queue_name != 'unchecked':
821 self.rejects.append("%s file already known to dak" % base_filename)
822 except NoResultFound, e:
829 for f, entry in self.pkg.files.items():
830 # Ensure the file does not already exist in one of the accepted directories
831 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
832 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
833 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
834 self.rejects.append("%s file already exists in the %s directory." % (f, d))
836 if not re_taint_free.match(f):
837 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
839 # Check the file is readable
840 if os.access(f, os.R_OK) == 0:
841 # When running in -n, copy_to_holding() won't have
842 # generated the reject_message, so we need to.
844 if os.path.exists(f):
845 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
847 self.rejects.append("Can't read `%s'. [file not found]" % (f))
848 entry["type"] = "unreadable"
851 # If it's byhand skip remaining checks
852 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
854 entry["type"] = "byhand"
856 # Checks for a binary package...
857 elif re_isadeb.match(f):
859 entry["type"] = "deb"
861 # This routine appends to self.rejects/warnings as appropriate
862 self.binary_file_checks(f, session)
864 # Checks for a source package...
865 elif re_issource.match(f):
868 # This routine appends to self.rejects/warnings as appropriate
869 self.source_file_checks(f, session)
871 # Not a binary or source package? Assume byhand...
874 entry["type"] = "byhand"
876 # Per-suite file checks
877 entry["oldfiles"] = {}
878 for suite in self.pkg.changes["distribution"].keys():
879 self.per_suite_file_checks(f, suite, session)
883 # If the .changes file says it has source, it must have source.
884 if self.pkg.changes["architecture"].has_key("source"):
886 self.rejects.append("no source found and Architecture line in changes mention source.")
888 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
889 self.rejects.append("source only uploads are not supported.")
891 ###########################################################################
892 def check_dsc(self, action=True, session=None):
893 """Returns bool indicating whether or not the source changes are valid"""
894 # Ensure there is source to check
895 if not self.pkg.changes["architecture"].has_key("source"):
900 for f, entry in self.pkg.files.items():
901 if entry["type"] == "dsc":
903 self.rejects.append("can not process a .changes file with multiple .dsc's.")
908 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
910 self.rejects.append("source uploads must contain a dsc file")
913 # Parse the .dsc file
915 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
916 except CantOpenError:
917 # if not -n copy_to_holding() will have done this for us...
919 self.rejects.append("%s: can't read file." % (dsc_filename))
920 except ParseChangesError, line:
921 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
922 except InvalidDscError, line:
923 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
924 except ChangesUnicodeError:
925 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
927 # Build up the file list of files mentioned by the .dsc
929 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
930 except NoFilesFieldError:
931 self.rejects.append("%s: no Files: field." % (dsc_filename))
933 except UnknownFormatError, format:
934 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
936 except ParseChangesError, line:
937 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
940 # Enforce mandatory fields
941 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
942 if not self.pkg.dsc.has_key(i):
943 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
946 # Validate the source and version fields
947 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
948 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
949 if not re_valid_version.match(self.pkg.dsc["version"]):
950 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
952 # Only a limited list of source formats are allowed in each suite
953 for dist in self.pkg.changes["distribution"].keys():
954 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
955 if self.pkg.dsc["format"] not in allowed:
956 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
958 # Validate the Maintainer field
960 # We ignore the return value
961 fix_maintainer(self.pkg.dsc["maintainer"])
962 except ParseMaintError, msg:
963 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
964 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
966 # Validate the build-depends field(s)
967 for field_name in [ "build-depends", "build-depends-indep" ]:
968 field = self.pkg.dsc.get(field_name)
970 # Have apt try to parse them...
972 apt_pkg.ParseSrcDepends(field)
974 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
976 # Ensure the version number in the .dsc matches the version number in the .changes
977 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
978 changes_version = self.pkg.files[dsc_filename]["version"]
980 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
981 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
983 # Ensure the Files field contain only what's expected
984 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
986 # Ensure source is newer than existing source in target suites
987 session = DBConn().session()
988 self.check_source_against_db(dsc_filename, session)
989 self.check_dsc_against_db(dsc_filename, session)
994 ###########################################################################
996 def get_changelog_versions(self, source_dir):
997 """Extracts a the source package and (optionally) grabs the
998 version history out of debian/changelog for the BTS."""
1002 # Find the .dsc (again)
1004 for f in self.pkg.files.keys():
1005 if self.pkg.files[f]["type"] == "dsc":
1008 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1009 if not dsc_filename:
1012 # Create a symlink mirror of the source files in our temporary directory
1013 for f in self.pkg.files.keys():
1014 m = re_issource.match(f)
1016 src = os.path.join(source_dir, f)
1017 # If a file is missing for whatever reason, give up.
1018 if not os.path.exists(src):
1021 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1022 self.pkg.orig_files[f].has_key("path"):
1024 dest = os.path.join(os.getcwd(), f)
1025 os.symlink(src, dest)
1027 # If the orig files are not a part of the upload, create symlinks to the
1029 for orig_file in self.pkg.orig_files.keys():
1030 if not self.pkg.orig_files[orig_file].has_key("path"):
1032 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1033 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1035 # Extract the source
1036 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1037 (result, output) = commands.getstatusoutput(cmd)
1039 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1040 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1043 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1046 # Get the upstream version
1047 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1048 if re_strip_revision.search(upstr_version):
1049 upstr_version = re_strip_revision.sub('', upstr_version)
1051 # Ensure the changelog file exists
1052 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1053 if not os.path.exists(changelog_filename):
1054 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1057 # Parse the changelog
1058 self.pkg.dsc["bts changelog"] = ""
1059 changelog_file = utils.open_file(changelog_filename)
1060 for line in changelog_file.readlines():
1061 m = re_changelog_versions.match(line)
1063 self.pkg.dsc["bts changelog"] += line
1064 changelog_file.close()
1066 # Check we found at least one revision in the changelog
1067 if not self.pkg.dsc["bts changelog"]:
1068 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1070 def check_source(self):
1072 # a) there's no source
1073 # or c) the orig files are MIA
1074 if not self.pkg.changes["architecture"].has_key("source") \
1075 or len(self.pkg.orig_files) == 0:
1078 tmpdir = utils.temp_dirname()
1080 # Move into the temporary directory
1084 # Get the changelog version history
1085 self.get_changelog_versions(cwd)
1087 # Move back and cleanup the temporary tree
1091 shutil.rmtree(tmpdir)
1093 if e.errno != errno.EACCES:
1095 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1097 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1098 # We probably have u-r or u-w directories so chmod everything
1100 cmd = "chmod -R u+rwx %s" % (tmpdir)
1101 result = os.system(cmd)
1103 utils.fubar("'%s' failed with result %s." % (cmd, result))
1104 shutil.rmtree(tmpdir)
1105 except Exception, e:
1106 print "foobar2 (%s)" % e
1107 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1109 ###########################################################################
1110 def ensure_hashes(self):
1111 # Make sure we recognise the format of the Files: field in the .changes
1112 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1113 if len(format) == 2:
1114 format = int(format[0]), int(format[1])
1116 format = int(float(format[0])), 0
1118 # We need to deal with the original changes blob, as the fields we need
1119 # might not be in the changes dict serialised into the .dak anymore.
1120 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1122 # Copy the checksums over to the current changes dict. This will keep
1123 # the existing modifications to it intact.
1124 for field in orig_changes:
1125 if field.startswith('checksums-'):
1126 self.pkg.changes[field] = orig_changes[field]
1128 # Check for unsupported hashes
1129 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1130 self.rejects.append(j)
1132 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1133 self.rejects.append(j)
1135 # We have to calculate the hash if we have an earlier changes version than
1136 # the hash appears in rather than require it exist in the changes file
1137 for hashname, hashfunc, version in utils.known_hashes:
1138 # TODO: Move _ensure_changes_hash into this class
1139 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1140 self.rejects.append(j)
1141 if "source" in self.pkg.changes["architecture"]:
1142 # TODO: Move _ensure_dsc_hash into this class
1143 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1144 self.rejects.append(j)
1146 def check_hashes(self):
1147 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1148 self.rejects.append(m)
1150 for m in utils.check_size(".changes", self.pkg.files):
1151 self.rejects.append(m)
1153 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1154 self.rejects.append(m)
1156 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1157 self.rejects.append(m)
1159 self.ensure_hashes()
1161 ###########################################################################
1163 def ensure_orig(self, target_dir='.', session=None):
1165 Ensures that all orig files mentioned in the changes file are present
1166 in target_dir. If they do not exist, they are symlinked into place.
1168 An list containing the symlinks that were created are returned (so they
1175 for filename, entry in self.pkg.dsc_files.iteritems():
1176 if not re_is_orig_source.match(filename):
1177 # File is not an orig; ignore
1180 if os.path.exists(filename):
1181 # File exists, no need to continue
1184 def symlink_if_valid(path):
1185 f = utils.open_file(path)
1186 md5sum = apt_pkg.md5sum(f)
1189 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1190 expected = (int(entry['size']), entry['md5sum'])
1192 if fingerprint != expected:
1195 dest = os.path.join(target_dir, filename)
1197 os.symlink(path, dest)
1198 symlinked.append(dest)
1204 session_ = DBConn().session()
1209 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1210 poolfile_path = os.path.join(
1211 poolfile.location.path, poolfile.filename
1214 if symlink_if_valid(poolfile_path):
1224 # Look in some other queues for the file
1225 queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1226 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1228 for queue in queues:
1229 if not cnf.get('Dir::Queue::%s' % queue):
1232 queuefile_path = os.path.join(
1233 cnf['Dir::Queue::%s' % queue], filename
1236 if not os.path.exists(queuefile_path):
1237 # Does not exist in this queue
1240 if symlink_if_valid(queuefile_path):
1245 ###########################################################################
1247 def check_lintian(self):
1250 # Don't reject binary uploads
1251 if not self.pkg.changes['architecture'].has_key('source'):
1254 # Only check some distributions
1256 for dist in ('unstable', 'experimental'):
1257 if dist in self.pkg.changes['distribution']:
1264 tagfile = cnf.get("Dinstall::LintianTags")
1266 # We don't have a tagfile, so just don't do anything.
1269 # Parse the yaml file
1270 sourcefile = file(tagfile, 'r')
1271 sourcecontent = sourcefile.read()
1274 lintiantags = yaml.load(sourcecontent)['lintian']
1275 except yaml.YAMLError, msg:
1276 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1279 # Try and find all orig mentioned in the .dsc
1280 symlinked = self.ensure_orig()
1282 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1283 # so put it together like it. We put all types of tags in one file and then sort
1284 # through lintians output later to see if its a fatal tag we detected, or not.
1285 # So we only run lintian once on all tags, even if we might reject on some, but not
1287 # Additionally build up a set of tags
1289 (fd, temp_filename) = utils.temp_filename()
1290 temptagfile = os.fdopen(fd, 'w')
1291 for tagtype in lintiantags:
1292 for tag in lintiantags[tagtype]:
1293 temptagfile.write("%s\n" % tag)
1297 # So now we should look at running lintian at the .changes file, capturing output
1299 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1300 (result, output) = commands.getstatusoutput(command)
1302 # We are done with lintian, remove our tempfile and any symlinks we created
1303 os.unlink(temp_filename)
1304 for symlink in symlinked:
1308 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1309 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1311 if len(output) == 0:
1316 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1318 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1319 # are having a victim for a reject.
1320 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1321 for line in output.split('\n'):
1322 m = re_parse_lintian.match(line)
1327 epackage = m.group(2)
1331 # So lets check if we know the tag at all.
1332 if etag not in tags:
1336 # We know it and it is overriden. Check that override is allowed.
1337 if etag in lintiantags['warning']:
1338 # The tag is overriden, and it is allowed to be overriden.
1339 # Don't add a reject message.
1341 elif etag in lintiantags['error']:
1342 # The tag is overriden - but is not allowed to be
1343 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1344 log("ftpmaster does not allow tag to be overridable", etag)
1346 # Tag is known, it is not overriden, direct reject.
1347 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1348 # Now tell if they *might* override it.
1349 if etag in lintiantags['warning']:
1350 log("auto rejecting", "overridable", etag)
1351 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1353 log("auto rejecting", "not overridable", etag)
1355 ###########################################################################
1356 def check_urgency(self):
1358 if self.pkg.changes["architecture"].has_key("source"):
1359 if not self.pkg.changes.has_key("urgency"):
1360 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1361 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1362 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1363 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1364 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1365 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1367 ###########################################################################
1369 # Sanity check the time stamps of files inside debs.
1370 # [Files in the near future cause ugly warnings and extreme time
1371 # travel can cause errors on extraction]
1373 def check_timestamps(self):
1376 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1377 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1378 tar = TarTime(future_cutoff, past_cutoff)
1380 for filename, entry in self.pkg.files.items():
1381 if entry["type"] == "deb":
1384 deb_file = utils.open_file(filename)
1385 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1388 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1389 except SystemError, e:
1390 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1391 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1394 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1398 future_files = tar.future_files.keys()
1400 num_future_files = len(future_files)
1401 future_file = future_files[0]
1402 future_date = tar.future_files[future_file]
1403 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1404 % (filename, num_future_files, future_file, time.ctime(future_date)))
1406 ancient_files = tar.ancient_files.keys()
1408 num_ancient_files = len(ancient_files)
1409 ancient_file = ancient_files[0]
1410 ancient_date = tar.ancient_files[ancient_file]
1411 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1412 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1414 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1416 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1417 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1419 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1425 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1426 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1427 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1428 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1429 self.pkg.changes["sponsoremail"] = uid_email
1434 ###########################################################################
1435 # check_signed_by_key checks
1436 ###########################################################################
1438 def check_signed_by_key(self):
1439 """Ensure the .changes is signed by an authorized uploader."""
1440 session = DBConn().session()
1442 # First of all we check that the person has proper upload permissions
1443 # and that this upload isn't blocked
1444 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1447 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1450 # TODO: Check that import-keyring adds UIDs properly
1452 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1455 # Check that the fingerprint which uploaded has permission to do so
1456 self.check_upload_permissions(fpr, session)
1458 # Check that this package is not in a transition
1459 self.check_transition(session)
1464 def check_upload_permissions(self, fpr, session):
1465 # Check any one-off upload blocks
1466 self.check_upload_blocks(fpr, session)
1468 # Start with DM as a special case
1469 # DM is a special case unfortunately, so we check it first
1470 # (keys with no source access get more access than DMs in one
1471 # way; DMs can only upload for their packages whether source
1472 # or binary, whereas keys with no access might be able to
1473 # upload some binaries)
1474 if fpr.source_acl.access_level == 'dm':
1475 self.check_dm_upload(fpr, session)
1477 # Check source-based permissions for other types
1478 if self.pkg.changes["architecture"].has_key("source"):
1479 if fpr.source_acl.access_level is None:
1480 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1481 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1482 self.rejects.append(rej)
1485 # If not a DM, we allow full upload rights
1486 uid_email = "%s@debian.org" % (fpr.uid.uid)
1487 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1490 # Check binary upload permissions
1491 # By this point we know that DMs can't have got here unless they
1492 # are allowed to deal with the package concerned so just apply
1494 if fpr.binary_acl.access_level == 'full':
1497 # Otherwise we're in the map case
1498 tmparches = self.pkg.changes["architecture"].copy()
1499 tmparches.pop('source', None)
1501 for bam in fpr.binary_acl_map:
1502 tmparches.pop(bam.architecture.arch_string, None)
1504 if len(tmparches.keys()) > 0:
1505 if fpr.binary_reject:
1506 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1507 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1508 self.rejects.append(rej)
1510 # TODO: This is where we'll implement reject vs throw away binaries later
1511 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1512 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1513 rej += "\nFingerprint: %s", (fpr.fingerprint)
1514 self.rejects.append(rej)
1517 def check_upload_blocks(self, fpr, session):
1518 """Check whether any upload blocks apply to this source, source
1519 version, uid / fpr combination"""
1521 def block_rej_template(fb):
1522 rej = 'Manual upload block in place for package %s' % fb.source
1523 if fb.version is not None:
1524 rej += ', version %s' % fb.version
1527 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1528 # version is None if the block applies to all versions
1529 if fb.version is None or fb.version == self.pkg.changes['version']:
1530 # Check both fpr and uid - either is enough to cause a reject
1531 if fb.fpr is not None:
1532 if fb.fpr.fingerprint == fpr.fingerprint:
1533 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1534 if fb.uid is not None:
1535 if fb.uid == fpr.uid:
1536 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1539 def check_dm_upload(self, fpr, session):
1540 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1541 ## none of the uploaded packages are NEW
1543 for f in self.pkg.files.keys():
1544 if self.pkg.files[f].has_key("byhand"):
1545 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1547 if self.pkg.files[f].has_key("new"):
1548 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1554 ## the most recent version of the package uploaded to unstable or
1555 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1556 ## section of its control file
1557 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1558 q = q.join(SrcAssociation)
1559 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1560 q = q.order_by(desc('source.version')).limit(1)
1565 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1566 self.rejects.append(rej)
1570 if not r.dm_upload_allowed:
1571 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1572 self.rejects.append(rej)
1575 ## the Maintainer: field of the uploaded .changes file corresponds with
1576 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1578 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1579 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1581 ## the most recent version of the package uploaded to unstable or
1582 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1583 ## non-developer maintainers cannot NMU or hijack packages)
1585 # srcuploaders includes the maintainer
1587 for sup in r.srcuploaders:
1588 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1589 # Eww - I hope we never have two people with the same name in Debian
1590 if email == fpr.uid.uid or name == fpr.uid.name:
1595 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1598 ## none of the packages are being taken over from other source packages
1599 for b in self.pkg.changes["binary"].keys():
1600 for suite in self.pkg.changes["distribution"].keys():
1601 q = session.query(DBSource)
1602 q = q.join(DBBinary).filter_by(package=b)
1603 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1606 if s.source != self.pkg.changes["source"]:
1607 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1611 def check_transition(self, session):
1614 sourcepkg = self.pkg.changes["source"]
1616 # No sourceful upload -> no need to do anything else, direct return
1617 # We also work with unstable uploads, not experimental or those going to some
1618 # proposed-updates queue
1619 if "source" not in self.pkg.changes["architecture"] or \
1620 "unstable" not in self.pkg.changes["distribution"]:
1623 # Also only check if there is a file defined (and existant) with
1625 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1626 if transpath == "" or not os.path.exists(transpath):
1629 # Parse the yaml file
1630 sourcefile = file(transpath, 'r')
1631 sourcecontent = sourcefile.read()
1633 transitions = yaml.load(sourcecontent)
1634 except yaml.YAMLError, msg:
1635 # This shouldn't happen, there is a wrapper to edit the file which
1636 # checks it, but we prefer to be safe than ending up rejecting
1638 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1641 # Now look through all defined transitions
1642 for trans in transitions:
1643 t = transitions[trans]
1644 source = t["source"]
1647 # Will be None if nothing is in testing.
1648 current = get_source_in_suite(source, "testing", session)
1649 if current is not None:
1650 compare = apt_pkg.VersionCompare(current.version, expected)
1652 if current is None or compare < 0:
1653 # This is still valid, the current version in testing is older than
1654 # the new version we wait for, or there is none in testing yet
1656 # Check if the source we look at is affected by this.
1657 if sourcepkg in t['packages']:
1658 # The source is affected, lets reject it.
1660 rejectmsg = "%s: part of the %s transition.\n\n" % (
1663 if current is not None:
1664 currentlymsg = "at version %s" % (current.version)
1666 currentlymsg = "not present in testing"
1668 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1670 rejectmsg += "\n".join(textwrap.wrap("""Your package
1671 is part of a testing transition designed to get %s migrated (it is
1672 currently %s, we need version %s). This transition is managed by the
1673 Release Team, and %s is the Release-Team member responsible for it.
1674 Please mail debian-release@lists.debian.org or contact %s directly if you
1675 need further assistance. You might want to upload to experimental until this
1676 transition is done."""
1677 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1679 self.rejects.append(rejectmsg)
1682 ###########################################################################
1683 # End check_signed_by_key checks
1684 ###########################################################################
1686 def build_summaries(self):
1687 """ Build a summary of changes the upload introduces. """
1689 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1691 short_summary = summary
1693 # This is for direport's benefit...
1694 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1697 summary += "Changes: " + f
1699 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1701 summary += self.announce(short_summary, 0)
1703 return (summary, short_summary)
1705 ###########################################################################
1707 def close_bugs(self, summary, action):
1709 Send mail to close bugs as instructed by the closes field in the changes file.
1710 Also add a line to summary if any work was done.
1712 @type summary: string
1713 @param summary: summary text, as given by L{build_summaries}
1716 @param action: Set to false no real action will be done.
1719 @return: summary. If action was taken, extended by the list of closed bugs.
1723 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1725 bugs = self.pkg.changes["closes"].keys()
1731 summary += "Closing bugs: "
1733 summary += "%s " % (bug)
1736 self.Subst["__BUG_NUMBER__"] = bug
1737 if self.pkg.changes["distribution"].has_key("stable"):
1738 self.Subst["__STABLE_WARNING__"] = """
1739 Note that this package is not part of the released stable Debian
1740 distribution. It may have dependencies on other unreleased software,
1741 or other instabilities. Please take care if you wish to install it.
1742 The update will eventually make its way into the next released Debian
1745 self.Subst["__STABLE_WARNING__"] = ""
1746 mail_message = utils.TemplateSubst(self.Subst, template)
1747 utils.send_mail(mail_message)
1749 # Clear up after ourselves
1750 del self.Subst["__BUG_NUMBER__"]
1751 del self.Subst["__STABLE_WARNING__"]
1753 if action and self.logger:
1754 self.logger.log(["closing bugs"] + bugs)
1760 ###########################################################################
1762 def announce(self, short_summary, action):
1764 Send an announce mail about a new upload.
1766 @type short_summary: string
1767 @param short_summary: Short summary text to include in the mail
1770 @param action: Set to false no real action will be done.
1773 @return: Textstring about action taken.
1778 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1780 # Only do announcements for source uploads with a recent dpkg-dev installed
1781 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1782 self.pkg.changes["architecture"].has_key("source"):
1788 self.Subst["__SHORT_SUMMARY__"] = short_summary
1790 for dist in self.pkg.changes["distribution"].keys():
1791 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1792 if announce_list == "" or lists_done.has_key(announce_list):
1795 lists_done[announce_list] = 1
1796 summary += "Announcing to %s\n" % (announce_list)
1800 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1801 if cnf.get("Dinstall::TrackingServer") and \
1802 self.pkg.changes["architecture"].has_key("source"):
1803 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1804 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1806 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1807 utils.send_mail(mail_message)
1809 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1811 if cnf.FindB("Dinstall::CloseBugs"):
1812 summary = self.close_bugs(summary, action)
1814 del self.Subst["__SHORT_SUMMARY__"]
1818 ###########################################################################
1820 def accept (self, summary, short_summary, session=None):
1824 This moves all files referenced from the .changes into the pool,
1825 sends the accepted mail, announces to lists, closes bugs and
1826 also checks for override disparities. If enabled it will write out
1827 the version history for the BTS Version Tracking and will finally call
1830 @type summary: string
1831 @param summary: Summary text
1833 @type short_summary: string
1834 @param short_summary: Short summary
1838 stats = SummaryStats()
1841 self.logger.log(["installing changes", self.pkg.changes_file])
1843 # Add the .dsc file to the DB first
1844 for newfile, entry in self.pkg.files.items():
1845 if entry["type"] == "dsc":
1846 dsc_component, dsc_location_id = add_dsc_to_db(self, newfile, session)
1848 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1849 for newfile, entry in self.pkg.files.items():
1850 if entry["type"] == "deb":
1851 add_deb_to_db(self, newfile, session)
1853 # If this is a sourceful diff only upload that is moving
1854 # cross-component we need to copy the .orig files into the new
1855 # component too for the same reasons as above.
1856 if self.pkg.changes["architecture"].has_key("source"):
1857 for orig_file in self.pkg.orig_files.keys():
1858 if not self.pkg.orig_files[orig_file].has_key("id"):
1859 continue # Skip if it's not in the pool
1860 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1861 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1862 continue # Skip if the location didn't change
1865 oldf = get_poolfile_by_id(orig_file_id, session)
1866 old_filename = os.path.join(oldf.location.path, oldf.filename)
1867 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1868 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1870 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1872 # TODO: Care about size/md5sum collisions etc
1873 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
1876 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1877 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1879 # TODO: Check that there's only 1 here
1880 source = get_sources_from_name(self.pkg.changes["source"], self.pkg.changes["version"])[0]
1881 dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
1882 dscf.poolfile_id = newf.file_id
1886 # Install the files into the pool
1887 for newfile, entry in self.pkg.files.items():
1888 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1889 utils.move(newfile, destination)
1890 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1891 stats.accept_bytes += float(entry["size"])
1893 # Copy the .changes file across for suite which need it.
1895 for suite_name in self.pkg.changes["distribution"].keys():
1896 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1897 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1899 for dest in copy_changes.keys():
1900 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1902 # We're done - commit the database changes
1904 # Our SQL session will automatically start a new transaction after
1907 # Move the .changes into the 'done' directory
1908 utils.move(self.pkg.changes_file,
1909 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1911 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1912 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1914 # Send accept mail, announce to lists, close bugs and check for
1915 # override disparities
1916 if not cnf["Dinstall::Options::No-Mail"]:
1918 self.Subst["__SUITE__"] = ""
1919 self.Subst["__SUMMARY__"] = summary
1920 mail_message = utils.TemplateSubst(self.Subst,
1921 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1922 utils.send_mail(mail_message)
1923 self.announce(short_summary, 1)
1925 ## Helper stuff for DebBugs Version Tracking
1926 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1927 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1928 # the conditionalization on dsc["bts changelog"] should be
1931 # Write out the version history from the changelog
1932 if self.pkg.changes["architecture"].has_key("source") and \
1933 self.pkg.dsc.has_key("bts changelog"):
1935 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1936 version_history = os.fdopen(fd, 'w')
1937 version_history.write(self.pkg.dsc["bts changelog"])
1938 version_history.close()
1939 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1940 self.pkg.changes_file[:-8]+".versions")
1941 os.rename(temp_filename, filename)
1942 os.chmod(filename, 0644)
1944 # Write out the binary -> source mapping.
1945 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1946 debinfo = os.fdopen(fd, 'w')
1947 for name, entry in sorted(self.pkg.files.items()):
1948 if entry["type"] == "deb":
1949 line = " ".join([entry["package"], entry["version"],
1950 entry["architecture"], entry["source package"],
1951 entry["source version"]])
1952 debinfo.write(line+"\n")
1954 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1955 self.pkg.changes_file[:-8]+".debinfo")
1956 os.rename(temp_filename, filename)
1957 os.chmod(filename, 0644)
1959 # This routine returns None on success or an error on failure
1960 # TODO: Replace queue copying using the new queue.add_file_from_pool routine
1961 # and by looking up which queues in suite.copy_queues
1962 #res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1969 stats.accept_count += 1
1971 def check_override(self):
1973 Checks override entries for validity. Mails "Override disparity" warnings,
1974 if that feature is enabled.
1976 Abandons the check if
1977 - override disparity checks are disabled
1978 - mail sending is disabled
1983 # Abandon the check if:
1984 # a) override disparity checks have been disabled
1985 # b) we're not sending mail
1986 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1987 cnf["Dinstall::Options::No-Mail"]:
1990 summary = self.pkg.check_override()
1995 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
1998 self.Subst["__SUMMARY__"] = summary
1999 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2000 utils.send_mail(mail_message)
2001 del self.Subst["__SUMMARY__"]
2003 ###########################################################################
2005 def remove(self, from_dir=None):
2007 Used (for instance) in p-u to remove the package from unchecked
2009 Also removes the package from holding area.
2011 if from_dir is None:
2012 from_dir = self.pkg.directory
2015 for f in self.pkg.files.keys():
2016 os.unlink(os.path.join(from_dir, f))
2017 if os.path.exists(os.path.join(h.holding_dir, f)):
2018 os.unlink(os.path.join(h.holding_dir, f))
2020 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2021 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2022 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2024 ###########################################################################
2026 def move_to_dir (self, dest, perms=0660, changesperms=0664):
2028 Move files to dest with certain perms/changesperms
2031 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2032 dest, perms=changesperms)
2033 for f in self.pkg.files.keys():
2034 utils.move(os.path.join(h.holding_dir, f), dest, perms=perms)
2036 ###########################################################################
2038 def force_reject(self, reject_files):
2040 Forcefully move files from the current directory to the
2041 reject directory. If any file already exists in the reject
2042 directory it will be moved to the morgue to make way for
2046 @param files: file dictionary
2052 for file_entry in reject_files:
2053 # Skip any files which don't exist or which we don't have permission to copy.
2054 if os.access(file_entry, os.R_OK) == 0:
2057 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2060 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2062 # File exists? Let's try and move it to the morgue
2063 if e.errno == errno.EEXIST:
2064 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2066 morgue_file = utils.find_next_free(morgue_file)
2067 except NoFreeFilenameError:
2068 # Something's either gone badly Pete Tong, or
2069 # someone is trying to exploit us.
2070 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2072 utils.move(dest_file, morgue_file, perms=0660)
2074 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2077 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2081 # If we got here, we own the destination file, so we can
2082 # safely overwrite it.
2083 utils.move(file_entry, dest_file, 1, perms=0660)
2086 ###########################################################################
2087 def do_reject (self, manual=0, reject_message="", note=""):
2089 Reject an upload. If called without a reject message or C{manual} is
2090 true, spawn an editor so the user can write one.
2093 @param manual: manual or automated rejection
2095 @type reject_message: string
2096 @param reject_message: A reject message
2101 # If we weren't given a manual rejection message, spawn an
2102 # editor so the user can add one in...
2103 if manual and not reject_message:
2104 (fd, temp_filename) = utils.temp_filename()
2105 temp_file = os.fdopen(fd, 'w')
2108 temp_file.write(line)
2110 editor = os.environ.get("EDITOR","vi")
2112 while answer == 'E':
2113 os.system("%s %s" % (editor, temp_filename))
2114 temp_fh = utils.open_file(temp_filename)
2115 reject_message = "".join(temp_fh.readlines())
2117 print "Reject message:"
2118 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2119 prompt = "[R]eject, Edit, Abandon, Quit ?"
2121 while prompt.find(answer) == -1:
2122 answer = utils.our_raw_input(prompt)
2123 m = re_default_answer.search(prompt)
2126 answer = answer[:1].upper()
2127 os.unlink(temp_filename)
2133 print "Rejecting.\n"
2137 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2138 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2140 # Move all the files into the reject directory
2141 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2142 self.force_reject(reject_files)
2144 # If we fail here someone is probably trying to exploit the race
2145 # so let's just raise an exception ...
2146 if os.path.exists(reason_filename):
2147 os.unlink(reason_filename)
2148 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2150 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2154 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2155 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2156 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2157 os.write(reason_fd, reject_message)
2158 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2160 # Build up the rejection email
2161 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2162 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2163 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2164 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2165 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2166 # Write the rejection email out as the <foo>.reason file
2167 os.write(reason_fd, reject_mail_message)
2169 del self.Subst["__REJECTOR_ADDRESS__"]
2170 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2171 del self.Subst["__CC__"]
2175 # Send the rejection mail if appropriate
2176 if not cnf["Dinstall::Options::No-Mail"]:
2177 utils.send_mail(reject_mail_message)
2180 self.logger.log(["rejected", self.pkg.changes_file])
2184 ################################################################################
2185 def in_override_p(self, package, component, suite, binary_type, filename, session):
2187 Check if a package already has override entries in the DB
2189 @type package: string
2190 @param package: package name
2192 @type component: string
2193 @param component: database id of the component
2196 @param suite: database id of the suite
2198 @type binary_type: string
2199 @param binary_type: type of the package
2201 @type filename: string
2202 @param filename: filename we check
2204 @return: the database result. But noone cares anyway.
2210 if binary_type == "": # must be source
2213 file_type = binary_type
2215 # Override suite name; used for example with proposed-updates
2216 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2217 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2219 result = get_override(package, suite, component, file_type, session)
2221 # If checking for a source package fall back on the binary override type
2222 if file_type == "dsc" and len(result) < 1:
2223 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2225 # Remember the section and priority so we can check them later if appropriate
2228 self.pkg.files[filename]["override section"] = result.section.section
2229 self.pkg.files[filename]["override priority"] = result.priority.priority
2234 ################################################################################
2235 def get_anyversion(self, sv_list, suite):
2238 @param sv_list: list of (suite, version) tuples to check
2241 @param suite: suite name
2247 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2248 for (s, v) in sv_list:
2249 if s in [ x.lower() for x in anysuite ]:
2250 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2255 ################################################################################
2257 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2260 @param sv_list: list of (suite, version) tuples to check
2262 @type filename: string
2263 @param filename: XXX
2265 @type new_version: string
2266 @param new_version: XXX
2268 Ensure versions are newer than existing packages in target
2269 suites and that cross-suite version checking rules as
2270 set out in the conf file are satisfied.
2275 # Check versions for each target suite
2276 for target_suite in self.pkg.changes["distribution"].keys():
2277 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2278 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2280 # Enforce "must be newer than target suite" even if conffile omits it
2281 if target_suite not in must_be_newer_than:
2282 must_be_newer_than.append(target_suite)
2284 for (suite, existent_version) in sv_list:
2285 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2287 if suite in must_be_newer_than and sourceful and vercmp < 1:
2288 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2290 if suite in must_be_older_than and vercmp > -1:
2293 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2294 # we really use the other suite, ignoring the conflicting one ...
2295 addsuite = self.pkg.changes["distribution-version"][suite]
2297 add_version = self.get_anyversion(sv_list, addsuite)
2298 target_version = self.get_anyversion(sv_list, target_suite)
2301 # not add_version can only happen if we map to a suite
2302 # that doesn't enhance the suite we're propup'ing from.
2303 # so "propup-ver x a b c; map a d" is a problem only if
2304 # d doesn't enhance a.
2306 # i think we could always propagate in this case, rather
2307 # than complaining. either way, this isn't a REJECT issue
2309 # And - we really should complain to the dorks who configured dak
2310 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2311 self.pkg.changes.setdefault("propdistribution", {})
2312 self.pkg.changes["propdistribution"][addsuite] = 1
2314 elif not target_version:
2315 # not targets_version is true when the package is NEW
2316 # we could just stick with the "...old version..." REJECT
2317 # for this, I think.
2318 self.rejects.append("Won't propogate NEW packages.")
2319 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2320 # propogation would be redundant. no need to reject though.
2321 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2323 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2324 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2326 self.warnings.append("Propogating upload to %s" % (addsuite))
2327 self.pkg.changes.setdefault("propdistribution", {})
2328 self.pkg.changes["propdistribution"][addsuite] = 1
2332 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2334 ################################################################################
2335 def check_binary_against_db(self, filename, session):
2336 # Ensure version is sane
2337 q = session.query(BinAssociation)
2338 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2339 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2341 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2342 filename, self.pkg.files[filename]["version"], sourceful=False)
2344 # Check for any existing copies of the file
2345 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2346 q = q.filter_by(version=self.pkg.files[filename]["version"])
2347 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2350 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2352 ################################################################################
2354 def check_source_against_db(self, filename, session):
2357 source = self.pkg.dsc.get("source")
2358 version = self.pkg.dsc.get("version")
2360 # Ensure version is sane
2361 q = session.query(SrcAssociation)
2362 q = q.join(DBSource).filter(DBSource.source==source)
2364 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2365 filename, version, sourceful=True)
2367 ################################################################################
2368 def check_dsc_against_db(self, filename, session):
2371 @warning: NB: this function can remove entries from the 'files' index [if
2372 the orig tarball is a duplicate of the one in the archive]; if
2373 you're iterating over 'files' and call this function as part of
2374 the loop, be sure to add a check to the top of the loop to
2375 ensure you haven't just tried to dereference the deleted entry.
2380 self.pkg.orig_files = {} # XXX: do we need to clear it?
2381 orig_files = self.pkg.orig_files
2383 # Try and find all files mentioned in the .dsc. This has
2384 # to work harder to cope with the multiple possible
2385 # locations of an .orig.tar.gz.
2386 # The ordering on the select is needed to pick the newest orig
2387 # when it exists in multiple places.
2388 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2390 if self.pkg.files.has_key(dsc_name):
2391 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2392 actual_size = int(self.pkg.files[dsc_name]["size"])
2393 found = "%s in incoming" % (dsc_name)
2395 # Check the file does not already exist in the archive
2396 ql = get_poolfile_like_name(dsc_name, session)
2398 # Strip out anything that isn't '%s' or '/%s$'
2400 if not i.filename.endswith(dsc_name):
2403 # "[dak] has not broken them. [dak] has fixed a
2404 # brokenness. Your crappy hack exploited a bug in
2407 # "(Come on! I thought it was always obvious that
2408 # one just doesn't release different files with
2409 # the same name and version.)"
2410 # -- ajk@ on d-devel@l.d.o
2413 # Ignore exact matches for .orig.tar.gz
2415 if re_is_orig_source.match(dsc_name):
2417 if self.pkg.files.has_key(dsc_name) and \
2418 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2419 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2420 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2421 # TODO: Don't delete the entry, just mark it as not needed
2422 # This would fix the stupidity of changing something we often iterate over
2423 # whilst we're doing it
2424 del self.pkg.files[dsc_name]
2425 dsc_entry["files id"] = i.file_id
2426 if not orig_files.has_key(dsc_name):
2427 orig_files[dsc_name] = {}
2428 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2432 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2434 elif re_is_orig_source.match(dsc_name):
2436 ql = get_poolfile_like_name(dsc_name, session)
2438 # Strip out anything that isn't '%s' or '/%s$'
2439 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2441 if not i.filename.endswith(dsc_name):
2445 # Unfortunately, we may get more than one match here if,
2446 # for example, the package was in potato but had an -sa
2447 # upload in woody. So we need to choose the right one.
2449 # default to something sane in case we don't match any or have only one
2454 old_file = os.path.join(i.location.path, i.filename)
2455 old_file_fh = utils.open_file(old_file)
2456 actual_md5 = apt_pkg.md5sum(old_file_fh)
2458 actual_size = os.stat(old_file)[stat.ST_SIZE]
2459 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2462 old_file = os.path.join(i.location.path, i.filename)
2463 old_file_fh = utils.open_file(old_file)
2464 actual_md5 = apt_pkg.md5sum(old_file_fh)
2466 actual_size = os.stat(old_file)[stat.ST_SIZE]
2468 suite_type = x.location.archive_type
2469 # need this for updating dsc_files in install()
2470 dsc_entry["files id"] = x.file_id
2471 # See install() in process-accepted...
2472 if not orig_files.has_key(dsc_name):
2473 orig_files[dsc_name] = {}
2474 orig_files[dsc_name]["id"] = x.file_id
2475 orig_files[dsc_name]["path"] = old_file
2476 orig_files[dsc_name]["location"] = x.location.location_id
2478 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2479 # Not there? Check the queue directories...
2480 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2481 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2483 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2484 if os.path.exists(in_otherdir):
2485 in_otherdir_fh = utils.open_file(in_otherdir)
2486 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2487 in_otherdir_fh.close()
2488 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2490 if not orig_files.has_key(dsc_name):
2491 orig_files[dsc_name] = {}
2492 orig_files[dsc_name]["path"] = in_otherdir
2495 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2498 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2500 if actual_md5 != dsc_entry["md5sum"]:
2501 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2502 if actual_size != int(dsc_entry["size"]):
2503 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2505 ################################################################################
2506 # This is used by process-new and process-holding to recheck a changes file
2507 # at the time we're running. It mainly wraps various other internal functions
2508 # and is similar to accepted_checks - these should probably be tidied up
2510 def recheck(self, session):
2512 for f in self.pkg.files.keys():
2513 # The .orig.tar.gz can disappear out from under us is it's a
2514 # duplicate of one in the archive.
2515 if not self.pkg.files.has_key(f):
2518 entry = self.pkg.files[f]
2520 # Check that the source still exists
2521 if entry["type"] == "deb":
2522 source_version = entry["source version"]
2523 source_package = entry["source package"]
2524 if not self.pkg.changes["architecture"].has_key("source") \
2525 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2526 source_epochless_version = re_no_epoch.sub('', source_version)
2527 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2529 for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
2530 if cnf.has_key("Dir::Queue::%s" % (q)):
2531 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2534 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2536 # Version and file overwrite checks
2537 if entry["type"] == "deb":
2538 self.check_binary_against_db(f, session)
2539 elif entry["type"] == "dsc":
2540 self.check_source_against_db(f, session)
2541 self.check_dsc_against_db(f, session)
2543 ################################################################################
2544 def accepted_checks(self, overwrite_checks, session):
2545 # Recheck anything that relies on the database; since that's not
2546 # frozen between accept and our run time when called from p-a.
2548 # overwrite_checks is set to False when installing to stable/oldstable
2553 # Find the .dsc (again)
2555 for f in self.pkg.files.keys():
2556 if self.pkg.files[f]["type"] == "dsc":
2559 for checkfile in self.pkg.files.keys():
2560 # The .orig.tar.gz can disappear out from under us is it's a
2561 # duplicate of one in the archive.
2562 if not self.pkg.files.has_key(checkfile):
2565 entry = self.pkg.files[checkfile]
2567 # Check that the source still exists
2568 if entry["type"] == "deb":
2569 source_version = entry["source version"]
2570 source_package = entry["source package"]
2571 if not self.pkg.changes["architecture"].has_key("source") \
2572 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2573 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2575 # Version and file overwrite checks
2576 if overwrite_checks:
2577 if entry["type"] == "deb":
2578 self.check_binary_against_db(checkfile, session)
2579 elif entry["type"] == "dsc":
2580 self.check_source_against_db(checkfile, session)
2581 self.check_dsc_against_db(dsc_filename, session)
2583 # propogate in the case it is in the override tables:
2584 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2585 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2586 propogate[suite] = 1
2588 nopropogate[suite] = 1
2590 for suite in propogate.keys():
2591 if suite in nopropogate:
2593 self.pkg.changes["distribution"][suite] = 1
2595 for checkfile in self.pkg.files.keys():
2596 # Check the package is still in the override tables
2597 for suite in self.pkg.changes["distribution"].keys():
2598 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2599 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2601 ################################################################################
2602 # This is not really a reject, but an unaccept, but since a) the code for
2603 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2604 # extremely rare, for now we'll go with whining at our admin folks...
2606 def do_unaccept(self):
2610 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2611 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2612 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2613 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2614 if cnf.has_key("Dinstall::Bcc"):
2615 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2617 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2619 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2621 # Write the rejection email out as the <foo>.reason file
2622 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2623 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2625 # If we fail here someone is probably trying to exploit the race
2626 # so let's just raise an exception ...
2627 if os.path.exists(reject_filename):
2628 os.unlink(reject_filename)
2630 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2631 os.write(fd, reject_mail_message)
2634 utils.send_mail(reject_mail_message)
2636 del self.Subst["__REJECTOR_ADDRESS__"]
2637 del self.Subst["__REJECT_MESSAGE__"]
2638 del self.Subst["__CC__"]
2640 ################################################################################
2641 # If any file of an upload has a recent mtime then chances are good
2642 # the file is still being uploaded.
2644 def upload_too_new(self):
2647 # Move back to the original directory to get accurate time stamps
2649 os.chdir(self.pkg.directory)
2650 file_list = self.pkg.files.keys()
2651 file_list.extend(self.pkg.dsc_files.keys())
2652 file_list.append(self.pkg.changes_file)
2655 last_modified = time.time()-os.path.getmtime(f)
2656 if last_modified < int(cnf["Dinstall::SkipTime"]):