5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files
55 from textutils import fix_maintainer
56 from binary import Binary
58 ###############################################################################
60 def get_type(f, session):
62 Get the file type of C{f}
65 @param f: file entry from Changes object
67 @type session: SQLA Session
68 @param session: SQL Alchemy session object
75 if f.has_key("dbtype"):
76 file_type = f["dbtype"]
77 elif re_source_ext.match(f["type"]):
80 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
82 # Validate the override type
83 type_id = get_override_type(file_type, session)
85 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
89 ################################################################################
91 # Determine what parts in a .changes are NEW
93 def determine_new(changes, files, warn=1):
95 Determine what parts in a C{changes} file are NEW.
97 @type changes: Upload.Pkg.changes dict
98 @param changes: Changes dictionary
100 @type files: Upload.Pkg.files dict
101 @param files: Files dictionary
104 @param warn: Warn if overrides are added for (old)stable
107 @return: dictionary of NEW components.
112 session = DBConn().session()
114 # Build up a list of potentially new things
115 for name, f in files.items():
116 # Skip byhand elements
117 if f["type"] == "byhand":
120 priority = f["priority"]
121 section = f["section"]
122 file_type = get_type(f, session)
123 component = f["component"]
125 if file_type == "dsc":
128 if not new.has_key(pkg):
130 new[pkg]["priority"] = priority
131 new[pkg]["section"] = section
132 new[pkg]["type"] = file_type
133 new[pkg]["component"] = component
134 new[pkg]["files"] = []
136 old_type = new[pkg]["type"]
137 if old_type != file_type:
138 # source gets trumped by deb or udeb
139 if old_type == "dsc":
140 new[pkg]["priority"] = priority
141 new[pkg]["section"] = section
142 new[pkg]["type"] = file_type
143 new[pkg]["component"] = component
145 new[pkg]["files"].append(name)
147 if f.has_key("othercomponents"):
148 new[pkg]["othercomponents"] = f["othercomponents"]
150 for suite in changes["suite"].keys():
151 for pkg in new.keys():
152 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
154 for file_entry in new[pkg]["files"]:
155 if files[file_entry].has_key("new"):
156 del files[file_entry]["new"]
160 for s in ['stable', 'oldstable']:
161 if changes["suite"].has_key(s):
162 print "WARNING: overrides will be added for %s!" % s
163 for pkg in new.keys():
164 if new[pkg].has_key("othercomponents"):
165 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
171 ################################################################################
173 def check_valid(new):
175 Check if section and priority for NEW packages exist in database.
176 Additionally does sanity checks:
177 - debian-installer packages have to be udeb (or source)
178 - non debian-installer packages can not be udeb
179 - source priority can only be assigned to dsc file types
182 @param new: Dict of new packages with their section, priority and type.
185 for pkg in new.keys():
186 section_name = new[pkg]["section"]
187 priority_name = new[pkg]["priority"]
188 file_type = new[pkg]["type"]
190 section = get_section(section_name)
192 new[pkg]["section id"] = -1
194 new[pkg]["section id"] = section.section_id
196 priority = get_priority(priority_name)
198 new[pkg]["priority id"] = -1
200 new[pkg]["priority id"] = priority.priority_id
203 di = section_name.find("debian-installer") != -1
205 # If d-i, we must be udeb and vice-versa
206 if (di and file_type not in ("udeb", "dsc")) or \
207 (not di and file_type == "udeb"):
208 new[pkg]["section id"] = -1
210 # If dsc we need to be source and vice-versa
211 if (priority == "source" and file_type != "dsc") or \
212 (priority != "source" and file_type == "dsc"):
213 new[pkg]["priority id"] = -1
215 ###############################################################################
217 def check_status(files):
219 for f in files.keys():
220 if files[f]["type"] == "byhand":
222 elif files[f].has_key("new"):
226 ###############################################################################
228 # Used by Upload.check_timestamps
229 class TarTime(object):
230 def __init__(self, future_cutoff, past_cutoff):
232 self.future_cutoff = future_cutoff
233 self.past_cutoff = past_cutoff
236 self.future_files = {}
237 self.ancient_files = {}
239 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
240 if MTime > self.future_cutoff:
241 self.future_files[Name] = MTime
242 if MTime < self.past_cutoff:
243 self.ancient_files[Name] = MTime
245 ###############################################################################
247 class Upload(object):
249 Everything that has to do with an upload processed.
257 ###########################################################################
260 """ Reset a number of internal variables."""
262 # Initialize the substitution template map
265 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
266 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
267 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
268 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
276 def package_info(self):
278 Format various messages from this Upload to send to the maintainer.
282 ('Reject Reasons', self.rejects),
283 ('Warnings', self.warnings),
284 ('Notes', self.notes),
288 for title, messages in msgs:
290 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
295 ###########################################################################
296 def update_subst(self):
297 """ Set up the per-package template substitution mappings """
301 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
302 if not self.pkg.changes.has_key("architecture") or not \
303 isinstance(self.pkg.changes["architecture"], dict):
304 self.pkg.changes["architecture"] = { "Unknown" : "" }
306 # and maintainer2047 may not exist.
307 if not self.pkg.changes.has_key("maintainer2047"):
308 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
310 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
311 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
312 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
314 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
315 if self.pkg.changes["architecture"].has_key("source") and \
316 self.pkg.changes["changedby822"] != "" and \
317 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
319 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
320 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
321 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
323 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
324 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
325 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
327 if "sponsoremail" in self.pkg.changes:
328 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
330 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
331 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
333 # Apply any global override of the Maintainer field
334 if cnf.get("Dinstall::OverrideMaintainer"):
335 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
336 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
338 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
339 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
340 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
342 ###########################################################################
343 def load_changes(self, filename):
346 @rvalue: whether the changes file was valid or not. We may want to
347 reject even if this is True (see what gets put in self.rejects).
348 This is simply to prevent us even trying things later which will
349 fail because we couldn't properly parse the file.
352 self.pkg.changes_file = filename
354 # Parse the .changes field into a dictionary
356 self.pkg.changes.update(parse_changes(filename))
357 except CantOpenError:
358 self.rejects.append("%s: can't read file." % (filename))
360 except ParseChangesError, line:
361 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
363 except ChangesUnicodeError:
364 self.rejects.append("%s: changes file not proper utf-8" % (filename))
367 # Parse the Files field from the .changes into another dictionary
369 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
370 except ParseChangesError, line:
371 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
373 except UnknownFormatError, format:
374 self.rejects.append("%s: unknown format '%s'." % (filename, format))
377 # Check for mandatory fields
378 for i in ("distribution", "source", "binary", "architecture",
379 "version", "maintainer", "files", "changes", "description"):
380 if not self.pkg.changes.has_key(i):
381 # Avoid undefined errors later
382 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
385 # Strip a source version in brackets from the source field
386 if re_strip_srcver.search(self.pkg.changes["source"]):
387 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
389 # Ensure the source field is a valid package name.
390 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
391 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
393 # Split multi-value fields into a lower-level dictionary
394 for i in ("architecture", "distribution", "binary", "closes"):
395 o = self.pkg.changes.get(i, "")
397 del self.pkg.changes[i]
399 self.pkg.changes[i] = {}
402 self.pkg.changes[i][j] = 1
404 # Fix the Maintainer: field to be RFC822/2047 compatible
406 (self.pkg.changes["maintainer822"],
407 self.pkg.changes["maintainer2047"],
408 self.pkg.changes["maintainername"],
409 self.pkg.changes["maintaineremail"]) = \
410 fix_maintainer (self.pkg.changes["maintainer"])
411 except ParseMaintError, msg:
412 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
413 % (filename, self.pkg.changes["maintainer"], msg))
415 # ...likewise for the Changed-By: field if it exists.
417 (self.pkg.changes["changedby822"],
418 self.pkg.changes["changedby2047"],
419 self.pkg.changes["changedbyname"],
420 self.pkg.changes["changedbyemail"]) = \
421 fix_maintainer (self.pkg.changes.get("changed-by", ""))
422 except ParseMaintError, msg:
423 self.pkg.changes["changedby822"] = ""
424 self.pkg.changes["changedby2047"] = ""
425 self.pkg.changes["changedbyname"] = ""
426 self.pkg.changes["changedbyemail"] = ""
428 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
429 % (filename, changes["changed-by"], msg))
431 # Ensure all the values in Closes: are numbers
432 if self.pkg.changes.has_key("closes"):
433 for i in self.pkg.changes["closes"].keys():
434 if re_isanum.match (i) == None:
435 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
437 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
438 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
439 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
441 # Check there isn't already a changes file of the same name in one
442 # of the queue directories.
443 base_filename = os.path.basename(filename)
444 if get_knownchange(base_filename):
445 self.rejects.append("%s: a file with this name already exists." % (base_filename))
447 # Check the .changes is non-empty
448 if not self.pkg.files:
449 self.rejects.append("%s: nothing to do (Files field is empty)." % (base_filename))
452 # Changes was syntactically valid even if we'll reject
455 ###########################################################################
457 def check_distributions(self):
458 "Check and map the Distribution field"
462 # Handle suite mappings
463 for m in Cnf.ValueList("SuiteMappings"):
466 if mtype == "map" or mtype == "silent-map":
467 (source, dest) = args[1:3]
468 if self.pkg.changes["distribution"].has_key(source):
469 del self.pkg.changes["distribution"][source]
470 self.pkg.changes["distribution"][dest] = 1
471 if mtype != "silent-map":
472 self.notes.append("Mapping %s to %s." % (source, dest))
473 if self.pkg.changes.has_key("distribution-version"):
474 if self.pkg.changes["distribution-version"].has_key(source):
475 self.pkg.changes["distribution-version"][source]=dest
476 elif mtype == "map-unreleased":
477 (source, dest) = args[1:3]
478 if self.pkg.changes["distribution"].has_key(source):
479 for arch in self.pkg.changes["architecture"].keys():
480 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
481 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
482 del self.pkg.changes["distribution"][source]
483 self.pkg.changes["distribution"][dest] = 1
485 elif mtype == "ignore":
487 if self.pkg.changes["distribution"].has_key(suite):
488 del self.pkg.changes["distribution"][suite]
489 self.warnings.append("Ignoring %s as a target suite." % (suite))
490 elif mtype == "reject":
492 if self.pkg.changes["distribution"].has_key(suite):
493 self.rejects.append("Uploads to %s are not accepted." % (suite))
494 elif mtype == "propup-version":
495 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
497 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
498 if self.pkg.changes["distribution"].has_key(args[1]):
499 self.pkg.changes.setdefault("distribution-version", {})
500 for suite in args[2:]:
501 self.pkg.changes["distribution-version"][suite] = suite
503 # Ensure there is (still) a target distribution
504 if len(self.pkg.changes["distribution"].keys()) < 1:
505 self.rejects.append("No valid distribution remaining.")
507 # Ensure target distributions exist
508 for suite in self.pkg.changes["distribution"].keys():
509 if not Cnf.has_key("Suite::%s" % (suite)):
510 self.rejects.append("Unknown distribution `%s'." % (suite))
512 ###########################################################################
514 def binary_file_checks(self, f, session):
516 entry = self.pkg.files[f]
518 # Extract package control information
519 deb_file = utils.open_file(f)
521 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
523 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
525 # Can't continue, none of the checks on control would work.
528 # Check for mandantory "Description:"
531 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
533 self.rejects.append("%s: Missing Description in binary package" % (f))
538 # Check for mandatory fields
539 for field in [ "Package", "Architecture", "Version" ]:
540 if control.Find(field) == None:
542 self.rejects.append("%s: No %s field in control." % (f, field))
545 # Ensure the package name matches the one give in the .changes
546 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
547 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
549 # Validate the package field
550 package = control.Find("Package")
551 if not re_valid_pkg_name.match(package):
552 self.rejects.append("%s: invalid package name '%s'." % (f, package))
554 # Validate the version field
555 version = control.Find("Version")
556 if not re_valid_version.match(version):
557 self.rejects.append("%s: invalid version number '%s'." % (f, version))
559 # Ensure the architecture of the .deb is one we know about.
560 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
561 architecture = control.Find("Architecture")
562 upload_suite = self.pkg.changes["distribution"].keys()[0]
564 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session)] \
565 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session)]:
566 self.rejects.append("Unknown architecture '%s'." % (architecture))
568 # Ensure the architecture of the .deb is one of the ones
569 # listed in the .changes.
570 if not self.pkg.changes["architecture"].has_key(architecture):
571 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
573 # Sanity-check the Depends field
574 depends = control.Find("Depends")
576 self.rejects.append("%s: Depends field is empty." % (f))
578 # Sanity-check the Provides field
579 provides = control.Find("Provides")
581 provide = re_spacestrip.sub('', provides)
583 self.rejects.append("%s: Provides field is empty." % (f))
584 prov_list = provide.split(",")
585 for prov in prov_list:
586 if not re_valid_pkg_name.match(prov):
587 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
589 # Check the section & priority match those given in the .changes (non-fatal)
590 if control.Find("Section") and entry["section"] != "" \
591 and entry["section"] != control.Find("Section"):
592 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
593 (f, control.Find("Section", ""), entry["section"]))
594 if control.Find("Priority") and entry["priority"] != "" \
595 and entry["priority"] != control.Find("Priority"):
596 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
597 (f, control.Find("Priority", ""), entry["priority"]))
599 entry["package"] = package
600 entry["architecture"] = architecture
601 entry["version"] = version
602 entry["maintainer"] = control.Find("Maintainer", "")
604 if f.endswith(".udeb"):
605 self.pkg.files[f]["dbtype"] = "udeb"
606 elif f.endswith(".deb"):
607 self.pkg.files[f]["dbtype"] = "deb"
609 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
611 entry["source"] = control.Find("Source", entry["package"])
613 # Get the source version
614 source = entry["source"]
617 if source.find("(") != -1:
618 m = re_extract_src_version.match(source)
620 source_version = m.group(2)
622 if not source_version:
623 source_version = self.pkg.files[f]["version"]
625 entry["source package"] = source
626 entry["source version"] = source_version
628 # Ensure the filename matches the contents of the .deb
629 m = re_isadeb.match(f)
632 file_package = m.group(1)
633 if entry["package"] != file_package:
634 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
635 (f, file_package, entry["dbtype"], entry["package"]))
636 epochless_version = re_no_epoch.sub('', control.Find("Version"))
639 file_version = m.group(2)
640 if epochless_version != file_version:
641 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
642 (f, file_version, entry["dbtype"], epochless_version))
645 file_architecture = m.group(3)
646 if entry["architecture"] != file_architecture:
647 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
648 (f, file_architecture, entry["dbtype"], entry["architecture"]))
650 # Check for existent source
651 source_version = entry["source version"]
652 source_package = entry["source package"]
653 if self.pkg.changes["architecture"].has_key("source"):
654 if source_version != self.pkg.changes["version"]:
655 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
656 (source_version, f, self.pkg.changes["version"]))
658 # Check in the SQL database
659 if not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
660 # Check in one of the other directories
661 source_epochless_version = re_no_epoch.sub('', source_version)
662 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
663 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
665 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
668 dsc_file_exists = False
669 for myq in ["Accepted", "Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
670 if cnf.has_key("Dir::Queue::%s" % (myq)):
671 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
672 dsc_file_exists = True
675 if not dsc_file_exists:
676 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
678 # Check the version and for file overwrites
679 self.check_binary_against_db(f, session)
681 # Temporarily disable contents generation until we change the table storage layout
684 #if len(b.rejects) > 0:
685 # for j in b.rejects:
686 # self.rejects.append(j)
688 def source_file_checks(self, f, session):
689 entry = self.pkg.files[f]
691 m = re_issource.match(f)
695 entry["package"] = m.group(1)
696 entry["version"] = m.group(2)
697 entry["type"] = m.group(3)
699 # Ensure the source package name matches the Source filed in the .changes
700 if self.pkg.changes["source"] != entry["package"]:
701 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
703 # Ensure the source version matches the version in the .changes file
704 if re_is_orig_source.match(f):
705 changes_version = self.pkg.changes["chopversion2"]
707 changes_version = self.pkg.changes["chopversion"]
709 if changes_version != entry["version"]:
710 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
712 # Ensure the .changes lists source in the Architecture field
713 if not self.pkg.changes["architecture"].has_key("source"):
714 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
716 # Check the signature of a .dsc file
717 if entry["type"] == "dsc":
718 # check_signature returns either:
719 # (None, [list, of, rejects]) or (signature, [])
720 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
722 self.rejects.append(j)
724 entry["architecture"] = "source"
726 def per_suite_file_checks(self, f, suite, session):
728 entry = self.pkg.files[f]
729 archive = utils.where_am_i()
732 if entry.has_key("byhand"):
735 # Check we have fields we need to do these checks
737 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
738 if not entry.has_key(m):
739 self.rejects.append("file '%s' does not have field %s set" % (f, m))
745 # Handle component mappings
746 for m in cnf.ValueList("ComponentMappings"):
747 (source, dest) = m.split()
748 if entry["component"] == source:
749 entry["original component"] = source
750 entry["component"] = dest
752 # Ensure the component is valid for the target suite
753 if cnf.has_key("Suite:%s::Components" % (suite)) and \
754 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
755 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
758 # Validate the component
759 if not get_component(entry["component"], session):
760 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
763 # See if the package is NEW
764 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
767 # Validate the priority
768 if entry["priority"].find('/') != -1:
769 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
771 # Determine the location
772 location = cnf["Dir::Pool"]
773 l = get_location(location, entry["component"], archive, session)
775 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (entry["component"], archive))
776 entry["location id"] = -1
778 entry["location id"] = l.location_id
780 # Check the md5sum & size against existing files (if any)
781 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
783 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
784 entry["size"], entry["md5sum"], entry["location id"])
787 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
788 elif found is False and poolfile is not None:
789 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
792 entry["files id"] = None
794 entry["files id"] = poolfile.file_id
796 # Check for packages that have moved from one component to another
797 entry['suite'] = suite
798 res = get_binary_components(self.pkg.files[f]['package'], suite, entry["architecture"], session)
800 entry["othercomponents"] = res.fetchone()[0]
802 def check_files(self, action=True):
803 file_keys = self.pkg.files.keys()
809 os.chdir(self.pkg.directory)
811 ret = holding.copy_to_holding(f)
813 # XXX: Should we bail out here or try and continue?
814 self.rejects.append(ret)
818 # check we already know the changes file
819 # [NB: this check must be done post-suite mapping]
820 base_filename = os.path.basename(self.pkg.changes_file)
822 session = DBConn().session()
825 changes = session.query(KnownChange).filter_by(changesname=base_filename).one()
826 if not changes.approved_for:
827 self.rejects.append("%s file already known to dak" % base_filename)
828 except NoResultFound, e:
835 for f, entry in self.pkg.files.items():
836 # Ensure the file does not already exist in one of the accepted directories
837 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
838 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
839 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
840 self.rejects.append("%s file already exists in the %s directory." % (f, d))
842 if not re_taint_free.match(f):
843 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
845 # Check the file is readable
846 if os.access(f, os.R_OK) == 0:
847 # When running in -n, copy_to_holding() won't have
848 # generated the reject_message, so we need to.
850 if os.path.exists(f):
851 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
853 self.rejects.append("Can't read `%s'. [file not found]" % (f))
854 entry["type"] = "unreadable"
857 # If it's byhand skip remaining checks
858 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
860 entry["type"] = "byhand"
862 # Checks for a binary package...
863 elif re_isadeb.match(f):
865 entry["type"] = "deb"
867 # This routine appends to self.rejects/warnings as appropriate
868 self.binary_file_checks(f, session)
870 # Checks for a source package...
871 elif re_issource.match(f):
874 # This routine appends to self.rejects/warnings as appropriate
875 self.source_file_checks(f, session)
877 # Not a binary or source package? Assume byhand...
880 entry["type"] = "byhand"
882 # Per-suite file checks
883 entry["oldfiles"] = {}
884 for suite in self.pkg.changes["distribution"].keys():
885 self.per_suite_file_checks(f, suite, session)
889 # If the .changes file says it has source, it must have source.
890 if self.pkg.changes["architecture"].has_key("source"):
892 self.rejects.append("no source found and Architecture line in changes mention source.")
894 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
895 self.rejects.append("source only uploads are not supported.")
897 ###########################################################################
898 def check_dsc(self, action=True, session=None):
899 """Returns bool indicating whether or not the source changes are valid"""
900 # Ensure there is source to check
901 if not self.pkg.changes["architecture"].has_key("source"):
906 for f, entry in self.pkg.files.items():
907 if entry["type"] == "dsc":
909 self.rejects.append("can not process a .changes file with multiple .dsc's.")
914 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
916 self.rejects.append("source uploads must contain a dsc file")
919 # Parse the .dsc file
921 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
922 except CantOpenError:
923 # if not -n copy_to_holding() will have done this for us...
925 self.rejects.append("%s: can't read file." % (dsc_filename))
926 except ParseChangesError, line:
927 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
928 except InvalidDscError, line:
929 self.rejects.append("%s: syntax error on line %s." % (dsc_filename, line))
930 except ChangesUnicodeError:
931 self.rejects.append("%s: dsc file not proper utf-8." % (dsc_filename))
933 # Build up the file list of files mentioned by the .dsc
935 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
936 except NoFilesFieldError:
937 self.rejects.append("%s: no Files: field." % (dsc_filename))
939 except UnknownFormatError, format:
940 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
942 except ParseChangesError, line:
943 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
946 # Enforce mandatory fields
947 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
948 if not self.pkg.dsc.has_key(i):
949 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
952 # Validate the source and version fields
953 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
954 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
955 if not re_valid_version.match(self.pkg.dsc["version"]):
956 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
958 # Only a limited list of source formats are allowed in each suite
959 for dist in self.pkg.changes["distribution"].keys():
960 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
961 if self.pkg.dsc["format"] not in allowed:
962 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
964 # Validate the Maintainer field
966 # We ignore the return value
967 fix_maintainer(self.pkg.dsc["maintainer"])
968 except ParseMaintError, msg:
969 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
970 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
972 # Validate the build-depends field(s)
973 for field_name in [ "build-depends", "build-depends-indep" ]:
974 field = self.pkg.dsc.get(field_name)
976 # Have apt try to parse them...
978 apt_pkg.ParseSrcDepends(field)
980 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
982 # Ensure the version number in the .dsc matches the version number in the .changes
983 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
984 changes_version = self.pkg.files[dsc_filename]["version"]
986 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
987 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
989 # Ensure the Files field contain only what's expected
990 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
992 # Ensure source is newer than existing source in target suites
993 session = DBConn().session()
994 self.check_source_against_db(dsc_filename, session)
995 self.check_dsc_against_db(dsc_filename, session)
1000 ###########################################################################
1002 def get_changelog_versions(self, source_dir):
1003 """Extracts a the source package and (optionally) grabs the
1004 version history out of debian/changelog for the BTS."""
1008 # Find the .dsc (again)
1010 for f in self.pkg.files.keys():
1011 if self.pkg.files[f]["type"] == "dsc":
1014 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1015 if not dsc_filename:
1018 # Create a symlink mirror of the source files in our temporary directory
1019 for f in self.pkg.files.keys():
1020 m = re_issource.match(f)
1022 src = os.path.join(source_dir, f)
1023 # If a file is missing for whatever reason, give up.
1024 if not os.path.exists(src):
1027 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1028 self.pkg.orig_files[f].has_key("path"):
1030 dest = os.path.join(os.getcwd(), f)
1031 os.symlink(src, dest)
1033 # If the orig files are not a part of the upload, create symlinks to the
1035 for orig_file in self.pkg.orig_files.keys():
1036 if not self.pkg.orig_files[orig_file].has_key("path"):
1038 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1039 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1041 # Extract the source
1042 cmd = "dpkg-source -sn -x %s" % (dsc_filename)
1043 (result, output) = commands.getstatusoutput(cmd)
1045 self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result))
1046 self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "))
1049 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1052 # Get the upstream version
1053 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1054 if re_strip_revision.search(upstr_version):
1055 upstr_version = re_strip_revision.sub('', upstr_version)
1057 # Ensure the changelog file exists
1058 changelog_filename = "%s-%s/debian/changelog" % (self.pkg.dsc["source"], upstr_version)
1059 if not os.path.exists(changelog_filename):
1060 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1063 # Parse the changelog
1064 self.pkg.dsc["bts changelog"] = ""
1065 changelog_file = utils.open_file(changelog_filename)
1066 for line in changelog_file.readlines():
1067 m = re_changelog_versions.match(line)
1069 self.pkg.dsc["bts changelog"] += line
1070 changelog_file.close()
1072 # Check we found at least one revision in the changelog
1073 if not self.pkg.dsc["bts changelog"]:
1074 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1076 def check_source(self):
1078 # a) there's no source
1079 # or c) the orig files are MIA
1080 if not self.pkg.changes["architecture"].has_key("source") \
1081 or len(self.pkg.orig_files) == 0:
1084 tmpdir = utils.temp_dirname()
1086 # Move into the temporary directory
1090 # Get the changelog version history
1091 self.get_changelog_versions(cwd)
1093 # Move back and cleanup the temporary tree
1097 shutil.rmtree(tmpdir)
1099 if e.errno != errno.EACCES:
1101 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1103 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1104 # We probably have u-r or u-w directories so chmod everything
1106 cmd = "chmod -R u+rwx %s" % (tmpdir)
1107 result = os.system(cmd)
1109 utils.fubar("'%s' failed with result %s." % (cmd, result))
1110 shutil.rmtree(tmpdir)
1111 except Exception, e:
1112 print "foobar2 (%s)" % e
1113 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1115 ###########################################################################
1116 def ensure_hashes(self):
1117 # Make sure we recognise the format of the Files: field in the .changes
1118 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1119 if len(format) == 2:
1120 format = int(format[0]), int(format[1])
1122 format = int(float(format[0])), 0
1124 # We need to deal with the original changes blob, as the fields we need
1125 # might not be in the changes dict serialised into the .dak anymore.
1126 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1128 # Copy the checksums over to the current changes dict. This will keep
1129 # the existing modifications to it intact.
1130 for field in orig_changes:
1131 if field.startswith('checksums-'):
1132 self.pkg.changes[field] = orig_changes[field]
1134 # Check for unsupported hashes
1135 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1136 self.rejects.append(j)
1138 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1139 self.rejects.append(j)
1141 # We have to calculate the hash if we have an earlier changes version than
1142 # the hash appears in rather than require it exist in the changes file
1143 for hashname, hashfunc, version in utils.known_hashes:
1144 # TODO: Move _ensure_changes_hash into this class
1145 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1146 self.rejects.append(j)
1147 if "source" in self.pkg.changes["architecture"]:
1148 # TODO: Move _ensure_dsc_hash into this class
1149 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1150 self.rejects.append(j)
1152 def check_hashes(self):
1153 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1154 self.rejects.append(m)
1156 for m in utils.check_size(".changes", self.pkg.files):
1157 self.rejects.append(m)
1159 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1160 self.rejects.append(m)
1162 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1163 self.rejects.append(m)
1165 self.ensure_hashes()
1167 ###########################################################################
1169 def ensure_orig(self, target_dir='.', session=None):
1171 Ensures that all orig files mentioned in the changes file are present
1172 in target_dir. If they do not exist, they are symlinked into place.
1174 An list containing the symlinks that were created are returned (so they
1181 for filename, entry in self.pkg.dsc_files.iteritems():
1182 if not re_is_orig_source.match(filename):
1183 # File is not an orig; ignore
1186 if os.path.exists(filename):
1187 # File exists, no need to continue
1190 def symlink_if_valid(path):
1191 f = utils.open_file(path)
1192 md5sum = apt_pkg.md5sum(f)
1195 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1196 expected = (int(entry['size']), entry['md5sum'])
1198 if fingerprint != expected:
1201 dest = os.path.join(target_dir, filename)
1203 os.symlink(path, dest)
1204 symlinked.append(dest)
1210 session_ = DBConn().session()
1215 for poolfile in get_poolfile_like_name('/%s' % filename, session_):
1216 poolfile_path = os.path.join(
1217 poolfile.location.path, poolfile.filename
1220 if symlink_if_valid(poolfile_path):
1230 # Look in some other queues for the file
1231 queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
1232 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1234 for queue in queues:
1235 if not cnf.get('Dir::Queue::%s' % queue):
1238 queuefile_path = os.path.join(
1239 cnf['Dir::Queue::%s' % queue], filename
1242 if not os.path.exists(queuefile_path):
1243 # Does not exist in this queue
1246 if symlink_if_valid(queuefile_path):
1251 ###########################################################################
1253 def check_lintian(self):
1256 # Don't reject binary uploads
1257 if not self.pkg.changes['architecture'].has_key('source'):
1260 # Only check some distributions
1262 for dist in ('unstable', 'experimental'):
1263 if dist in self.pkg.changes['distribution']:
1270 tagfile = cnf.get("Dinstall::LintianTags")
1272 # We don't have a tagfile, so just don't do anything.
1275 # Parse the yaml file
1276 sourcefile = file(tagfile, 'r')
1277 sourcecontent = sourcefile.read()
1280 lintiantags = yaml.load(sourcecontent)['lintian']
1281 except yaml.YAMLError, msg:
1282 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1285 # Try and find all orig mentioned in the .dsc
1286 symlinked = self.ensure_orig()
1288 # Now setup the input file for lintian. lintian wants "one tag per line" only,
1289 # so put it together like it. We put all types of tags in one file and then sort
1290 # through lintians output later to see if its a fatal tag we detected, or not.
1291 # So we only run lintian once on all tags, even if we might reject on some, but not
1293 # Additionally build up a set of tags
1295 (fd, temp_filename) = utils.temp_filename()
1296 temptagfile = os.fdopen(fd, 'w')
1297 for tagtype in lintiantags:
1298 for tag in lintiantags[tagtype]:
1299 temptagfile.write("%s\n" % tag)
1303 # So now we should look at running lintian at the .changes file, capturing output
1305 command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
1306 (result, output) = commands.getstatusoutput(command)
1308 # We are done with lintian, remove our tempfile and any symlinks we created
1309 os.unlink(temp_filename)
1310 for symlink in symlinked:
1314 utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
1315 utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
1317 if len(output) == 0:
1322 self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
1324 # We have output of lintian, this package isn't clean. Lets parse it and see if we
1325 # are having a victim for a reject.
1326 # W: tzdata: binary-without-manpage usr/sbin/tzconfig
1327 for line in output.split('\n'):
1328 m = re_parse_lintian.match(line)
1333 epackage = m.group(2)
1337 # So lets check if we know the tag at all.
1338 if etag not in tags:
1342 # We know it and it is overriden. Check that override is allowed.
1343 if etag in lintiantags['warning']:
1344 # The tag is overriden, and it is allowed to be overriden.
1345 # Don't add a reject message.
1347 elif etag in lintiantags['error']:
1348 # The tag is overriden - but is not allowed to be
1349 self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
1350 log("ftpmaster does not allow tag to be overridable", etag)
1352 # Tag is known, it is not overriden, direct reject.
1353 self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
1354 # Now tell if they *might* override it.
1355 if etag in lintiantags['warning']:
1356 log("auto rejecting", "overridable", etag)
1357 self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
1359 log("auto rejecting", "not overridable", etag)
1361 ###########################################################################
1362 def check_urgency(self):
1364 if self.pkg.changes["architecture"].has_key("source"):
1365 if not self.pkg.changes.has_key("urgency"):
1366 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1367 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1368 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1369 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1370 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1371 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1373 ###########################################################################
1375 # Sanity check the time stamps of files inside debs.
1376 # [Files in the near future cause ugly warnings and extreme time
1377 # travel can cause errors on extraction]
1379 def check_timestamps(self):
1382 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1383 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1384 tar = TarTime(future_cutoff, past_cutoff)
1386 for filename, entry in self.pkg.files.items():
1387 if entry["type"] == "deb":
1390 deb_file = utils.open_file(filename)
1391 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1394 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1395 except SystemError, e:
1396 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1397 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1400 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1404 future_files = tar.future_files.keys()
1406 num_future_files = len(future_files)
1407 future_file = future_files[0]
1408 future_date = tar.future_files[future_file]
1409 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1410 % (filename, num_future_files, future_file, time.ctime(future_date)))
1412 ancient_files = tar.ancient_files.keys()
1414 num_ancient_files = len(ancient_files)
1415 ancient_file = ancient_files[0]
1416 ancient_date = tar.ancient_files[ancient_file]
1417 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1418 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1420 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1422 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1423 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1425 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1431 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1432 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1433 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1434 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1435 self.pkg.changes["sponsoremail"] = uid_email
1440 ###########################################################################
1441 # check_signed_by_key checks
1442 ###########################################################################
1444 def check_signed_by_key(self):
1445 """Ensure the .changes is signed by an authorized uploader."""
1446 session = DBConn().session()
1448 # First of all we check that the person has proper upload permissions
1449 # and that this upload isn't blocked
1450 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1453 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1456 # TODO: Check that import-keyring adds UIDs properly
1458 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1461 # Check that the fingerprint which uploaded has permission to do so
1462 self.check_upload_permissions(fpr, session)
1464 # Check that this package is not in a transition
1465 self.check_transition(session)
1470 def check_upload_permissions(self, fpr, session):
1471 # Check any one-off upload blocks
1472 self.check_upload_blocks(fpr, session)
1474 # Start with DM as a special case
1475 # DM is a special case unfortunately, so we check it first
1476 # (keys with no source access get more access than DMs in one
1477 # way; DMs can only upload for their packages whether source
1478 # or binary, whereas keys with no access might be able to
1479 # upload some binaries)
1480 if fpr.source_acl.access_level == 'dm':
1481 self.check_dm_upload(fpr, session)
1483 # Check source-based permissions for other types
1484 if self.pkg.changes["architecture"].has_key("source"):
1485 if fpr.source_acl.access_level is None:
1486 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1487 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1488 self.rejects.append(rej)
1491 # If not a DM, we allow full upload rights
1492 uid_email = "%s@debian.org" % (fpr.uid.uid)
1493 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1496 # Check binary upload permissions
1497 # By this point we know that DMs can't have got here unless they
1498 # are allowed to deal with the package concerned so just apply
1500 if fpr.binary_acl.access_level == 'full':
1503 # Otherwise we're in the map case
1504 tmparches = self.pkg.changes["architecture"].copy()
1505 tmparches.pop('source', None)
1507 for bam in fpr.binary_acl_map:
1508 tmparches.pop(bam.architecture.arch_string, None)
1510 if len(tmparches.keys()) > 0:
1511 if fpr.binary_reject:
1512 rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1513 rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
1514 self.rejects.append(rej)
1516 # TODO: This is where we'll implement reject vs throw away binaries later
1517 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1518 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1519 rej += "\nFingerprint: %s", (fpr.fingerprint)
1520 self.rejects.append(rej)
1523 def check_upload_blocks(self, fpr, session):
1524 """Check whether any upload blocks apply to this source, source
1525 version, uid / fpr combination"""
1527 def block_rej_template(fb):
1528 rej = 'Manual upload block in place for package %s' % fb.source
1529 if fb.version is not None:
1530 rej += ', version %s' % fb.version
1533 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1534 # version is None if the block applies to all versions
1535 if fb.version is None or fb.version == self.pkg.changes['version']:
1536 # Check both fpr and uid - either is enough to cause a reject
1537 if fb.fpr is not None:
1538 if fb.fpr.fingerprint == fpr.fingerprint:
1539 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1540 if fb.uid is not None:
1541 if fb.uid == fpr.uid:
1542 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1545 def check_dm_upload(self, fpr, session):
1546 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1547 ## none of the uploaded packages are NEW
1549 for f in self.pkg.files.keys():
1550 if self.pkg.files[f].has_key("byhand"):
1551 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1553 if self.pkg.files[f].has_key("new"):
1554 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1560 ## the most recent version of the package uploaded to unstable or
1561 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
1562 ## section of its control file
1563 q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
1564 q = q.join(SrcAssociation)
1565 q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
1566 q = q.order_by(desc('source.version')).limit(1)
1571 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1572 self.rejects.append(rej)
1576 if not r.dm_upload_allowed:
1577 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1578 self.rejects.append(rej)
1581 ## the Maintainer: field of the uploaded .changes file corresponds with
1582 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1584 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1585 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1587 ## the most recent version of the package uploaded to unstable or
1588 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1589 ## non-developer maintainers cannot NMU or hijack packages)
1591 # srcuploaders includes the maintainer
1593 for sup in r.srcuploaders:
1594 (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
1595 # Eww - I hope we never have two people with the same name in Debian
1596 if email == fpr.uid.uid or name == fpr.uid.name:
1601 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1604 ## none of the packages are being taken over from other source packages
1605 for b in self.pkg.changes["binary"].keys():
1606 for suite in self.pkg.changes["distribution"].keys():
1607 q = session.query(DBSource)
1608 q = q.join(DBBinary).filter_by(package=b)
1609 q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
1612 if s.source != self.pkg.changes["source"]:
1613 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1617 def check_transition(self, session):
1620 sourcepkg = self.pkg.changes["source"]
1622 # No sourceful upload -> no need to do anything else, direct return
1623 # We also work with unstable uploads, not experimental or those going to some
1624 # proposed-updates queue
1625 if "source" not in self.pkg.changes["architecture"] or \
1626 "unstable" not in self.pkg.changes["distribution"]:
1629 # Also only check if there is a file defined (and existant) with
1631 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1632 if transpath == "" or not os.path.exists(transpath):
1635 # Parse the yaml file
1636 sourcefile = file(transpath, 'r')
1637 sourcecontent = sourcefile.read()
1639 transitions = yaml.load(sourcecontent)
1640 except yaml.YAMLError, msg:
1641 # This shouldn't happen, there is a wrapper to edit the file which
1642 # checks it, but we prefer to be safe than ending up rejecting
1644 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1647 # Now look through all defined transitions
1648 for trans in transitions:
1649 t = transitions[trans]
1650 source = t["source"]
1653 # Will be None if nothing is in testing.
1654 current = get_source_in_suite(source, "testing", session)
1655 if current is not None:
1656 compare = apt_pkg.VersionCompare(current.version, expected)
1658 if current is None or compare < 0:
1659 # This is still valid, the current version in testing is older than
1660 # the new version we wait for, or there is none in testing yet
1662 # Check if the source we look at is affected by this.
1663 if sourcepkg in t['packages']:
1664 # The source is affected, lets reject it.
1666 rejectmsg = "%s: part of the %s transition.\n\n" % (
1669 if current is not None:
1670 currentlymsg = "at version %s" % (current.version)
1672 currentlymsg = "not present in testing"
1674 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1676 rejectmsg += "\n".join(textwrap.wrap("""Your package
1677 is part of a testing transition designed to get %s migrated (it is
1678 currently %s, we need version %s). This transition is managed by the
1679 Release Team, and %s is the Release-Team member responsible for it.
1680 Please mail debian-release@lists.debian.org or contact %s directly if you
1681 need further assistance. You might want to upload to experimental until this
1682 transition is done."""
1683 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1685 self.rejects.append(rejectmsg)
1688 ###########################################################################
1689 # End check_signed_by_key checks
1690 ###########################################################################
1692 def build_summaries(self):
1693 """ Build a summary of changes the upload introduces. """
1695 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1697 short_summary = summary
1699 # This is for direport's benefit...
1700 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1703 summary += "Changes: " + f
1705 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1707 summary += self.announce(short_summary, 0)
1709 return (summary, short_summary)
1711 ###########################################################################
1713 def close_bugs(self, summary, action):
1715 Send mail to close bugs as instructed by the closes field in the changes file.
1716 Also add a line to summary if any work was done.
1718 @type summary: string
1719 @param summary: summary text, as given by L{build_summaries}
1722 @param action: Set to false no real action will be done.
1725 @return: summary. If action was taken, extended by the list of closed bugs.
1729 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1731 bugs = self.pkg.changes["closes"].keys()
1737 summary += "Closing bugs: "
1739 summary += "%s " % (bug)
1742 self.Subst["__BUG_NUMBER__"] = bug
1743 if self.pkg.changes["distribution"].has_key("stable"):
1744 self.Subst["__STABLE_WARNING__"] = """
1745 Note that this package is not part of the released stable Debian
1746 distribution. It may have dependencies on other unreleased software,
1747 or other instabilities. Please take care if you wish to install it.
1748 The update will eventually make its way into the next released Debian
1751 self.Subst["__STABLE_WARNING__"] = ""
1752 mail_message = utils.TemplateSubst(self.Subst, template)
1753 utils.send_mail(mail_message)
1755 # Clear up after ourselves
1756 del self.Subst["__BUG_NUMBER__"]
1757 del self.Subst["__STABLE_WARNING__"]
1759 if action and self.logger:
1760 self.logger.log(["closing bugs"] + bugs)
1766 ###########################################################################
1768 def announce(self, short_summary, action):
1770 Send an announce mail about a new upload.
1772 @type short_summary: string
1773 @param short_summary: Short summary text to include in the mail
1776 @param action: Set to false no real action will be done.
1779 @return: Textstring about action taken.
1784 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
1786 # Only do announcements for source uploads with a recent dpkg-dev installed
1787 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
1788 self.pkg.changes["architecture"].has_key("source"):
1794 self.Subst["__SHORT_SUMMARY__"] = short_summary
1796 for dist in self.pkg.changes["distribution"].keys():
1797 announce_list = cnf.Find("Suite::%s::Announce" % (dist))
1798 if announce_list == "" or lists_done.has_key(announce_list):
1801 lists_done[announce_list] = 1
1802 summary += "Announcing to %s\n" % (announce_list)
1806 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
1807 if cnf.get("Dinstall::TrackingServer") and \
1808 self.pkg.changes["architecture"].has_key("source"):
1809 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
1810 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
1812 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
1813 utils.send_mail(mail_message)
1815 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
1817 if cnf.FindB("Dinstall::CloseBugs"):
1818 summary = self.close_bugs(summary, action)
1820 del self.Subst["__SHORT_SUMMARY__"]
1824 ###########################################################################
1826 def accept (self, summary, short_summary, session=None):
1830 This moves all files referenced from the .changes into the pool,
1831 sends the accepted mail, announces to lists, closes bugs and
1832 also checks for override disparities. If enabled it will write out
1833 the version history for the BTS Version Tracking and will finally call
1836 @type summary: string
1837 @param summary: Summary text
1839 @type short_summary: string
1840 @param short_summary: Short summary
1844 stats = SummaryStats()
1847 self.logger.log(["installing changes", self.pkg.changes_file])
1849 # Add the .dsc file to the DB first
1850 for newfile, entry in self.pkg.files.items():
1851 if entry["type"] == "dsc":
1852 dsc_component, dsc_location_id = add_dsc_to_db(self, newfile, session)
1854 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
1855 for newfile, entry in self.pkg.files.items():
1856 if entry["type"] == "deb":
1857 add_deb_to_db(self, newfile, session)
1859 # If this is a sourceful diff only upload that is moving
1860 # cross-component we need to copy the .orig files into the new
1861 # component too for the same reasons as above.
1862 if self.pkg.changes["architecture"].has_key("source"):
1863 for orig_file in self.pkg.orig_files.keys():
1864 if not self.pkg.orig_files[orig_file].has_key("id"):
1865 continue # Skip if it's not in the pool
1866 orig_file_id = self.pkg.orig_files[orig_file]["id"]
1867 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
1868 continue # Skip if the location didn't change
1871 oldf = get_poolfile_by_id(orig_file_id, session)
1872 old_filename = os.path.join(oldf.location.path, oldf.filename)
1873 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
1874 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
1876 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
1878 # TODO: Care about size/md5sum collisions etc
1879 (found, newf) = check_poolfile(new_filename, file_size, file_md5sum, dsc_location_id, session)
1882 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
1883 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
1885 # TODO: Check that there's only 1 here
1886 source = get_sources_from_name(self.pkg.changes["source"], self.pkg.changes["version"])[0]
1887 dscf = get_dscfiles(source_id=source.source_id, poolfile_id=orig_file_id, session=session)[0]
1888 dscf.poolfile_id = newf.file_id
1892 # Install the files into the pool
1893 for newfile, entry in self.pkg.files.items():
1894 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
1895 utils.move(newfile, destination)
1896 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
1897 stats.accept_bytes += float(entry["size"])
1899 # Copy the .changes file across for suite which need it.
1901 for suite_name in self.pkg.changes["distribution"].keys():
1902 if cnf.has_key("Suite::%s::CopyChanges" % (suite_name)):
1903 copy_changes[cnf["Suite::%s::CopyChanges" % (suite_name)]] = ""
1905 for dest in copy_changes.keys():
1906 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
1908 # We're done - commit the database changes
1910 # Our SQL session will automatically start a new transaction after
1913 # Move the .changes into the 'done' directory
1914 utils.move(self.pkg.changes_file,
1915 os.path.join(cnf["Dir::Queue::Done"], os.path.basename(self.pkg.changes_file)))
1917 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
1918 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
1920 # Send accept mail, announce to lists, close bugs and check for
1921 # override disparities
1922 if not cnf["Dinstall::Options::No-Mail"]:
1924 self.Subst["__SUITE__"] = ""
1925 self.Subst["__SUMMARY__"] = summary
1926 mail_message = utils.TemplateSubst(self.Subst,
1927 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
1928 utils.send_mail(mail_message)
1929 self.announce(short_summary, 1)
1931 ## Helper stuff for DebBugs Version Tracking
1932 if cnf.Find("Dir::Queue::BTSVersionTrack"):
1933 # ??? once queue/* is cleared on *.d.o and/or reprocessed
1934 # the conditionalization on dsc["bts changelog"] should be
1937 # Write out the version history from the changelog
1938 if self.pkg.changes["architecture"].has_key("source") and \
1939 self.pkg.dsc.has_key("bts changelog"):
1941 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1942 version_history = os.fdopen(fd, 'w')
1943 version_history.write(self.pkg.dsc["bts changelog"])
1944 version_history.close()
1945 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1946 self.pkg.changes_file[:-8]+".versions")
1947 os.rename(temp_filename, filename)
1948 os.chmod(filename, 0644)
1950 # Write out the binary -> source mapping.
1951 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
1952 debinfo = os.fdopen(fd, 'w')
1953 for name, entry in sorted(self.pkg.files.items()):
1954 if entry["type"] == "deb":
1955 line = " ".join([entry["package"], entry["version"],
1956 entry["architecture"], entry["source package"],
1957 entry["source version"]])
1958 debinfo.write(line+"\n")
1960 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
1961 self.pkg.changes_file[:-8]+".debinfo")
1962 os.rename(temp_filename, filename)
1963 os.chmod(filename, 0644)
1965 # This routine returns None on success or an error on failure
1966 # TODO: Replace queue copying using the new queue.add_file_from_pool routine
1967 # and by looking up which queues in suite.copy_queues
1968 #res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"])
1975 stats.accept_count += 1
1977 def check_override(self):
1979 Checks override entries for validity. Mails "Override disparity" warnings,
1980 if that feature is enabled.
1982 Abandons the check if
1983 - override disparity checks are disabled
1984 - mail sending is disabled
1989 # Abandon the check if:
1990 # a) override disparity checks have been disabled
1991 # b) we're not sending mail
1992 if not cnf.FindB("Dinstall::OverrideDisparityCheck") or \
1993 cnf["Dinstall::Options::No-Mail"]:
1996 summary = self.pkg.check_override()
2001 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2004 self.Subst["__SUMMARY__"] = summary
2005 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2006 utils.send_mail(mail_message)
2007 del self.Subst["__SUMMARY__"]
2009 ###########################################################################
2011 def remove(self, from_dir=None):
2013 Used (for instance) in p-u to remove the package from unchecked
2015 Also removes the package from holding area.
2017 if from_dir is None:
2018 from_dir = self.pkg.directory
2021 for f in self.pkg.files.keys():
2022 os.unlink(os.path.join(from_dir, f))
2023 if os.path.exists(os.path.join(h.holding_dir, f)):
2024 os.unlink(os.path.join(h.holding_dir, f))
2026 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2027 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2028 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2030 ###########################################################################
2032 def move_to_dir (self, dest, perms=0660, changesperms=0664):
2034 Move files to dest with certain perms/changesperms
2037 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2038 dest, perms=changesperms)
2039 for f in self.pkg.files.keys():
2040 utils.move(os.path.join(h.holding_dir, f), dest, perms=perms)
2042 ###########################################################################
2044 def force_reject(self, reject_files):
2046 Forcefully move files from the current directory to the
2047 reject directory. If any file already exists in the reject
2048 directory it will be moved to the morgue to make way for
2052 @param files: file dictionary
2058 for file_entry in reject_files:
2059 # Skip any files which don't exist or which we don't have permission to copy.
2060 if os.access(file_entry, os.R_OK) == 0:
2063 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2066 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2068 # File exists? Let's try and move it to the morgue
2069 if e.errno == errno.EEXIST:
2070 morgue_file = os.path.join(cnf["Dir::Morgue"], cnf["Dir::MorgueReject"], file_entry)
2072 morgue_file = utils.find_next_free(morgue_file)
2073 except NoFreeFilenameError:
2074 # Something's either gone badly Pete Tong, or
2075 # someone is trying to exploit us.
2076 utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
2078 utils.move(dest_file, morgue_file, perms=0660)
2080 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2083 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2087 # If we got here, we own the destination file, so we can
2088 # safely overwrite it.
2089 utils.move(file_entry, dest_file, 1, perms=0660)
2092 ###########################################################################
2093 def do_reject (self, manual=0, reject_message="", note=""):
2095 Reject an upload. If called without a reject message or C{manual} is
2096 true, spawn an editor so the user can write one.
2099 @param manual: manual or automated rejection
2101 @type reject_message: string
2102 @param reject_message: A reject message
2107 # If we weren't given a manual rejection message, spawn an
2108 # editor so the user can add one in...
2109 if manual and not reject_message:
2110 (fd, temp_filename) = utils.temp_filename()
2111 temp_file = os.fdopen(fd, 'w')
2114 temp_file.write(line)
2116 editor = os.environ.get("EDITOR","vi")
2118 while answer == 'E':
2119 os.system("%s %s" % (editor, temp_filename))
2120 temp_fh = utils.open_file(temp_filename)
2121 reject_message = "".join(temp_fh.readlines())
2123 print "Reject message:"
2124 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2125 prompt = "[R]eject, Edit, Abandon, Quit ?"
2127 while prompt.find(answer) == -1:
2128 answer = utils.our_raw_input(prompt)
2129 m = re_default_answer.search(prompt)
2132 answer = answer[:1].upper()
2133 os.unlink(temp_filename)
2139 print "Rejecting.\n"
2143 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2144 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2146 # Move all the files into the reject directory
2147 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2148 self.force_reject(reject_files)
2150 # If we fail here someone is probably trying to exploit the race
2151 # so let's just raise an exception ...
2152 if os.path.exists(reason_filename):
2153 os.unlink(reason_filename)
2154 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2156 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2160 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2161 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2162 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2163 os.write(reason_fd, reject_message)
2164 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2166 # Build up the rejection email
2167 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2168 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2169 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2170 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2171 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2172 # Write the rejection email out as the <foo>.reason file
2173 os.write(reason_fd, reject_mail_message)
2175 del self.Subst["__REJECTOR_ADDRESS__"]
2176 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2177 del self.Subst["__CC__"]
2181 # Send the rejection mail if appropriate
2182 if not cnf["Dinstall::Options::No-Mail"]:
2183 utils.send_mail(reject_mail_message)
2186 self.logger.log(["rejected", self.pkg.changes_file])
2190 ################################################################################
2191 def in_override_p(self, package, component, suite, binary_type, filename, session):
2193 Check if a package already has override entries in the DB
2195 @type package: string
2196 @param package: package name
2198 @type component: string
2199 @param component: database id of the component
2202 @param suite: database id of the suite
2204 @type binary_type: string
2205 @param binary_type: type of the package
2207 @type filename: string
2208 @param filename: filename we check
2210 @return: the database result. But noone cares anyway.
2216 if binary_type == "": # must be source
2219 file_type = binary_type
2221 # Override suite name; used for example with proposed-updates
2222 if cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
2223 suite = cnf["Suite::%s::OverrideSuite" % (suite)]
2225 result = get_override(package, suite, component, file_type, session)
2227 # If checking for a source package fall back on the binary override type
2228 if file_type == "dsc" and len(result) < 1:
2229 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2231 # Remember the section and priority so we can check them later if appropriate
2234 self.pkg.files[filename]["override section"] = result.section.section
2235 self.pkg.files[filename]["override priority"] = result.priority.priority
2240 ################################################################################
2241 def get_anyversion(self, sv_list, suite):
2244 @param sv_list: list of (suite, version) tuples to check
2247 @param suite: suite name
2253 anysuite = [suite] + Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
2254 for (s, v) in sv_list:
2255 if s in [ x.lower() for x in anysuite ]:
2256 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2261 ################################################################################
2263 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2266 @param sv_list: list of (suite, version) tuples to check
2268 @type filename: string
2269 @param filename: XXX
2271 @type new_version: string
2272 @param new_version: XXX
2274 Ensure versions are newer than existing packages in target
2275 suites and that cross-suite version checking rules as
2276 set out in the conf file are satisfied.
2281 # Check versions for each target suite
2282 for target_suite in self.pkg.changes["distribution"].keys():
2283 must_be_newer_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
2284 must_be_older_than = [ i.lower() for i in cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
2286 # Enforce "must be newer than target suite" even if conffile omits it
2287 if target_suite not in must_be_newer_than:
2288 must_be_newer_than.append(target_suite)
2290 for (suite, existent_version) in sv_list:
2291 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2293 if suite in must_be_newer_than and sourceful and vercmp < 1:
2294 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2296 if suite in must_be_older_than and vercmp > -1:
2299 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2300 # we really use the other suite, ignoring the conflicting one ...
2301 addsuite = self.pkg.changes["distribution-version"][suite]
2303 add_version = self.get_anyversion(sv_list, addsuite)
2304 target_version = self.get_anyversion(sv_list, target_suite)
2307 # not add_version can only happen if we map to a suite
2308 # that doesn't enhance the suite we're propup'ing from.
2309 # so "propup-ver x a b c; map a d" is a problem only if
2310 # d doesn't enhance a.
2312 # i think we could always propagate in this case, rather
2313 # than complaining. either way, this isn't a REJECT issue
2315 # And - we really should complain to the dorks who configured dak
2316 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2317 self.pkg.changes.setdefault("propdistribution", {})
2318 self.pkg.changes["propdistribution"][addsuite] = 1
2320 elif not target_version:
2321 # not targets_version is true when the package is NEW
2322 # we could just stick with the "...old version..." REJECT
2323 # for this, I think.
2324 self.rejects.append("Won't propogate NEW packages.")
2325 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2326 # propogation would be redundant. no need to reject though.
2327 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2329 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2330 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2332 self.warnings.append("Propogating upload to %s" % (addsuite))
2333 self.pkg.changes.setdefault("propdistribution", {})
2334 self.pkg.changes["propdistribution"][addsuite] = 1
2338 self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2340 ################################################################################
2341 def check_binary_against_db(self, filename, session):
2342 # Ensure version is sane
2343 q = session.query(BinAssociation)
2344 q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
2345 q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
2347 self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
2348 filename, self.pkg.files[filename]["version"], sourceful=False)
2350 # Check for any existing copies of the file
2351 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2352 q = q.filter_by(version=self.pkg.files[filename]["version"])
2353 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2356 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2358 ################################################################################
2360 def check_source_against_db(self, filename, session):
2363 source = self.pkg.dsc.get("source")
2364 version = self.pkg.dsc.get("version")
2366 # Ensure version is sane
2367 q = session.query(SrcAssociation)
2368 q = q.join(DBSource).filter(DBSource.source==source)
2370 self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
2371 filename, version, sourceful=True)
2373 ################################################################################
2374 def check_dsc_against_db(self, filename, session):
2377 @warning: NB: this function can remove entries from the 'files' index [if
2378 the orig tarball is a duplicate of the one in the archive]; if
2379 you're iterating over 'files' and call this function as part of
2380 the loop, be sure to add a check to the top of the loop to
2381 ensure you haven't just tried to dereference the deleted entry.
2386 self.pkg.orig_files = {} # XXX: do we need to clear it?
2387 orig_files = self.pkg.orig_files
2389 # Try and find all files mentioned in the .dsc. This has
2390 # to work harder to cope with the multiple possible
2391 # locations of an .orig.tar.gz.
2392 # The ordering on the select is needed to pick the newest orig
2393 # when it exists in multiple places.
2394 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2396 if self.pkg.files.has_key(dsc_name):
2397 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2398 actual_size = int(self.pkg.files[dsc_name]["size"])
2399 found = "%s in incoming" % (dsc_name)
2401 # Check the file does not already exist in the archive
2402 ql = get_poolfile_like_name(dsc_name, session)
2404 # Strip out anything that isn't '%s' or '/%s$'
2406 if not i.filename.endswith(dsc_name):
2409 # "[dak] has not broken them. [dak] has fixed a
2410 # brokenness. Your crappy hack exploited a bug in
2413 # "(Come on! I thought it was always obvious that
2414 # one just doesn't release different files with
2415 # the same name and version.)"
2416 # -- ajk@ on d-devel@l.d.o
2419 # Ignore exact matches for .orig.tar.gz
2421 if re_is_orig_source.match(dsc_name):
2423 if self.pkg.files.has_key(dsc_name) and \
2424 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2425 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2426 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2427 # TODO: Don't delete the entry, just mark it as not needed
2428 # This would fix the stupidity of changing something we often iterate over
2429 # whilst we're doing it
2430 del self.pkg.files[dsc_name]
2431 dsc_entry["files id"] = i.file_id
2432 if not orig_files.has_key(dsc_name):
2433 orig_files[dsc_name] = {}
2434 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2438 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2440 elif re_is_orig_source.match(dsc_name):
2442 ql = get_poolfile_like_name(dsc_name, session)
2444 # Strip out anything that isn't '%s' or '/%s$'
2445 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2447 if not i.filename.endswith(dsc_name):
2451 # Unfortunately, we may get more than one match here if,
2452 # for example, the package was in potato but had an -sa
2453 # upload in woody. So we need to choose the right one.
2455 # default to something sane in case we don't match any or have only one
2460 old_file = os.path.join(i.location.path, i.filename)
2461 old_file_fh = utils.open_file(old_file)
2462 actual_md5 = apt_pkg.md5sum(old_file_fh)
2464 actual_size = os.stat(old_file)[stat.ST_SIZE]
2465 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2468 old_file = os.path.join(i.location.path, i.filename)
2469 old_file_fh = utils.open_file(old_file)
2470 actual_md5 = apt_pkg.md5sum(old_file_fh)
2472 actual_size = os.stat(old_file)[stat.ST_SIZE]
2474 suite_type = x.location.archive_type
2475 # need this for updating dsc_files in install()
2476 dsc_entry["files id"] = x.file_id
2477 # See install() in process-accepted...
2478 if not orig_files.has_key(dsc_name):
2479 orig_files[dsc_name] = {}
2480 orig_files[dsc_name]["id"] = x.file_id
2481 orig_files[dsc_name]["path"] = old_file
2482 orig_files[dsc_name]["location"] = x.location.location_id
2484 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2485 # Not there? Check the queue directories...
2486 for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2487 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2489 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2490 if os.path.exists(in_otherdir):
2491 in_otherdir_fh = utils.open_file(in_otherdir)
2492 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2493 in_otherdir_fh.close()
2494 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2496 if not orig_files.has_key(dsc_name):
2497 orig_files[dsc_name] = {}
2498 orig_files[dsc_name]["path"] = in_otherdir
2501 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2504 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2506 if actual_md5 != dsc_entry["md5sum"]:
2507 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2508 if actual_size != int(dsc_entry["size"]):
2509 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2511 ################################################################################
2512 # This is used by process-new and process-holding to recheck a changes file
2513 # at the time we're running. It mainly wraps various other internal functions
2514 # and is similar to accepted_checks - these should probably be tidied up
2516 def recheck(self, session):
2518 for f in self.pkg.files.keys():
2519 # The .orig.tar.gz can disappear out from under us is it's a
2520 # duplicate of one in the archive.
2521 if not self.pkg.files.has_key(f):
2524 entry = self.pkg.files[f]
2526 # Check that the source still exists
2527 if entry["type"] == "deb":
2528 source_version = entry["source version"]
2529 source_package = entry["source package"]
2530 if not self.pkg.changes["architecture"].has_key("source") \
2531 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
2532 source_epochless_version = re_no_epoch.sub('', source_version)
2533 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2535 for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
2536 if cnf.has_key("Dir::Queue::%s" % (q)):
2537 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2540 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2542 # Version and file overwrite checks
2543 if entry["type"] == "deb":
2544 self.check_binary_against_db(f, session)
2545 elif entry["type"] == "dsc":
2546 self.check_source_against_db(f, session)
2547 self.check_dsc_against_db(f, session)
2549 ################################################################################
2550 def accepted_checks(self, overwrite_checks, session):
2551 # Recheck anything that relies on the database; since that's not
2552 # frozen between accept and our run time when called from p-a.
2554 # overwrite_checks is set to False when installing to stable/oldstable
2559 # Find the .dsc (again)
2561 for f in self.pkg.files.keys():
2562 if self.pkg.files[f]["type"] == "dsc":
2565 for checkfile in self.pkg.files.keys():
2566 # The .orig.tar.gz can disappear out from under us is it's a
2567 # duplicate of one in the archive.
2568 if not self.pkg.files.has_key(checkfile):
2571 entry = self.pkg.files[checkfile]
2573 # Check that the source still exists
2574 if entry["type"] == "deb":
2575 source_version = entry["source version"]
2576 source_package = entry["source package"]
2577 if not self.pkg.changes["architecture"].has_key("source") \
2578 and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys()):
2579 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2581 # Version and file overwrite checks
2582 if overwrite_checks:
2583 if entry["type"] == "deb":
2584 self.check_binary_against_db(checkfile, session)
2585 elif entry["type"] == "dsc":
2586 self.check_source_against_db(checkfile, session)
2587 self.check_dsc_against_db(dsc_filename, session)
2589 # propogate in the case it is in the override tables:
2590 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2591 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2592 propogate[suite] = 1
2594 nopropogate[suite] = 1
2596 for suite in propogate.keys():
2597 if suite in nopropogate:
2599 self.pkg.changes["distribution"][suite] = 1
2601 for checkfile in self.pkg.files.keys():
2602 # Check the package is still in the override tables
2603 for suite in self.pkg.changes["distribution"].keys():
2604 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2605 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2607 ################################################################################
2608 # This is not really a reject, but an unaccept, but since a) the code for
2609 # that is non-trivial (reopen bugs, unannounce etc.), b) this should be
2610 # extremely rare, for now we'll go with whining at our admin folks...
2612 def do_unaccept(self):
2616 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2617 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
2618 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2619 self.Subst["__BCC__"] = "X-DAK: dak process-accepted"
2620 if cnf.has_key("Dinstall::Bcc"):
2621 self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])
2623 template = os.path.join(cnf["Dir::Templates"], "process-accepted.unaccept")
2625 reject_mail_message = utils.TemplateSubst(self.Subst, template)
2627 # Write the rejection email out as the <foo>.reason file
2628 reason_filename = os.path.basename(self.pkg.changes_file[:-8]) + ".reason"
2629 reject_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2631 # If we fail here someone is probably trying to exploit the race
2632 # so let's just raise an exception ...
2633 if os.path.exists(reject_filename):
2634 os.unlink(reject_filename)
2636 fd = os.open(reject_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2637 os.write(fd, reject_mail_message)
2640 utils.send_mail(reject_mail_message)
2642 del self.Subst["__REJECTOR_ADDRESS__"]
2643 del self.Subst["__REJECT_MESSAGE__"]
2644 del self.Subst["__CC__"]
2646 ################################################################################
2647 # If any file of an upload has a recent mtime then chances are good
2648 # the file is still being uploaded.
2650 def upload_too_new(self):
2653 # Move back to the original directory to get accurate time stamps
2655 os.chdir(self.pkg.directory)
2656 file_list = self.pkg.files.keys()
2657 file_list.extend(self.pkg.dsc_files.keys())
2658 file_list.append(self.pkg.changes_file)
2661 last_modified = time.time()-os.path.getmtime(f)
2662 if last_modified < int(cnf["Dinstall::SkipTime"]):