#!/usr/bin/env python
+# vim:set et sw=4:
-# Queue utility functions for dak
-# Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
+"""
+Queue utility functions for dak
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2001 - 2006 James Troup <james@nocrew.org>
+@copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
###############################################################################
-import cPickle, errno, os, pg, re, stat, sys, time
-import apt_inst, apt_pkg
-import utils, database
-
+import errno
+import os
+import stat
+import sys
+import time
+import apt_inst
+import apt_pkg
+import utils
+import commands
+import shutil
+import textwrap
from types import *
+from sqlalchemy.sql.expression import desc
+from sqlalchemy.orm.exc import NoResultFound
+
+import yaml
+
+from dak_exceptions import *
+from changes import *
+from regexes import *
+from config import Config
+from holding import Holding
+from urgencylog import UrgencyLog
+from dbconn import *
+from summarystats import SummaryStats
+from utils import parse_changes, check_dsc_files, build_package_list
+from textutils import fix_maintainer
+from lintian import parse_lintian_output, generate_reject_messages
+from contents import UnpackedSource
+
+################################################################################
+
+def check_valid(overrides, session):
+ """Check if section and priority for new overrides exist in database.
+
+ Additionally does sanity checks:
+ - debian-installer packages have to be udeb (or source)
+ - non debian-installer packages cannot be udeb
+
+ @type overrides: list of dict
+ @param overrides: list of overrides to check. The overrides need
+ to be given in form of a dict with the following keys:
+
+ - package: package name
+ - priority
+ - section
+ - component
+ - type: type of requested override ('dsc', 'deb' or 'udeb')
+
+ All values are strings.
+
+ @rtype: bool
+ @return: C{True} if all overrides are valid, C{False} if there is any
+ invalid override.
+ """
+ all_valid = True
+ for o in overrides:
+ o['valid'] = True
+ if session.query(Priority).filter_by(priority=o['priority']).first() is None:
+ o['valid'] = False
+ if session.query(Section).filter_by(section=o['section']).first() is None:
+ o['valid'] = False
+ if get_mapped_component(o['component'], session) is None:
+ o['valid'] = False
+ if o['type'] not in ('dsc', 'deb', 'udeb'):
+ raise Exception('Unknown override type {0}'.format(o['type']))
+ if o['type'] == 'udeb' and o['section'] != 'debian-installer':
+ o['valid'] = False
+ if o['section'] == 'debian-installer' and o['type'] not in ('dsc', 'udeb'):
+ o['valid'] = False
+ all_valid = all_valid and o['valid']
+ return all_valid
###############################################################################
-re_isanum = re.compile (r"^\d+$")
-re_default_answer = re.compile(r"\[(.*)\]")
-re_fdnic = re.compile(r"\n\n")
-re_bin_only_nmu = re.compile(r"\+b\d+$")
+def prod_maintainer(notes, upload):
+ cnf = Config()
+ changes = upload.changes
+ whitelists = [ upload.target_suite.mail_whitelist ]
+
+ # Here we prepare an editor and get them ready to prod...
+ (fd, temp_filename) = utils.temp_filename()
+ temp_file = os.fdopen(fd, 'w')
+ temp_file.write("\n\n=====\n\n".join([note.comment for note in notes]))
+ temp_file.close()
+ editor = os.environ.get("EDITOR","vi")
+ answer = 'E'
+ while answer == 'E':
+ os.system("%s %s" % (editor, temp_filename))
+ temp_fh = utils.open_file(temp_filename)
+ prod_message = "".join(temp_fh.readlines())
+ temp_fh.close()
+ print "Prod message:"
+ print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
+ prompt = "[P]rod, Edit, Abandon, Quit ?"
+ answer = "XXX"
+ while prompt.find(answer) == -1:
+ answer = utils.our_raw_input(prompt)
+ m = re_default_answer.search(prompt)
+ if answer == "":
+ answer = m.group(1)
+ answer = answer[:1].upper()
+ os.unlink(temp_filename)
+ if answer == 'A':
+ return
+ elif answer == 'Q':
+ return 0
+ # Otherwise, do the proding...
+ user_email_address = utils.whoami() + " <%s>" % (
+ cnf["Dinstall::MyAdminAddress"])
+
+ changed_by = changes.changedby or changes.maintainer
+ maintainer = changes.maintainer
+ maintainer_to = utils.mail_addresses_for_upload(maintainer, changed_by, changes.fingerprint)
+
+ Subst = {
+ '__SOURCE__': upload.changes.source,
+ '__CHANGES_FILENAME__': upload.changes.changesname,
+ '__MAINTAINER_TO__': ", ".join(maintainer_to),
+ }
+
+ Subst["__FROM_ADDRESS__"] = user_email_address
+ Subst["__PROD_MESSAGE__"] = prod_message
+ Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
+
+ prod_mail_message = utils.TemplateSubst(
+ Subst,cnf["Dir::Templates"]+"/process-new.prod")
+
+ # Send the prod mail
+ utils.send_mail(prod_mail_message, whitelists=whitelists)
+
+ print "Sent prodding message"
+
+################################################################################
+
+def edit_note(note, upload, session, trainee=False):
+ # Write the current data to a temporary file
+ (fd, temp_filename) = utils.temp_filename()
+ editor = os.environ.get("EDITOR","vi")
+ answer = 'E'
+ while answer == 'E':
+ os.system("%s %s" % (editor, temp_filename))
+ temp_file = utils.open_file(temp_filename)
+ newnote = temp_file.read().rstrip()
+ temp_file.close()
+ print "New Note:"
+ print utils.prefix_multi_line_string(newnote," ")
+ prompt = "[D]one, Edit, Abandon, Quit ?"
+ answer = "XXX"
+ while prompt.find(answer) == -1:
+ answer = utils.our_raw_input(prompt)
+ m = re_default_answer.search(prompt)
+ if answer == "":
+ answer = m.group(1)
+ answer = answer[:1].upper()
+ os.unlink(temp_filename)
+ if answer == 'A':
+ return
+ elif answer == 'Q':
+ return 0
+
+ comment = NewComment()
+ comment.policy_queue = upload.policy_queue
+ comment.package = upload.changes.source
+ comment.version = upload.changes.version
+ comment.comment = newnote
+ comment.author = utils.whoami()
+ comment.trainee = trainee
+ session.add(comment)
+ session.commit()
###############################################################################
-# Convenience wrapper to carry around all the package information in
+# FIXME: Should move into the database
+# suite names DMs can upload to
+dm_suites = ['unstable', 'experimental', 'squeeze-backports']
+
+def get_newest_source(source, session):
+ 'returns the newest DBSource object in dm_suites'
+ ## the most recent version of the package uploaded to unstable or
+ ## experimental includes the field "DM-Upload-Allowed: yes" in the source
+ ## section of its control file
+ q = session.query(DBSource).filter_by(source = source). \
+ filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
+ order_by(desc('source.version'))
+ return q.first()
+
+def get_suite_version_by_source(source, session):
+ 'returns a list of tuples (suite_name, version) for source package'
+ q = session.query(Suite.suite_name, DBSource.version). \
+ join(Suite.sources).filter_by(source = source)
+ return q.all()
+
+def get_source_by_package_and_suite(package, suite_name, session):
+ '''
+ returns a DBSource query filtered by DBBinary.package and this package's
+ suite_name
+ '''
+ return session.query(DBSource). \
+ join(DBSource.binaries).filter_by(package = package). \
+ join(DBBinary.suites).filter_by(suite_name = suite_name)
+
+def get_suite_version_by_package(package, arch_string, session):
+ '''
+ returns a list of tuples (suite_name, version) for binary package and
+ arch_string
+ '''
+ return session.query(Suite.suite_name, DBBinary.version). \
+ join(Suite.binaries).filter_by(package = package). \
+ join(DBBinary.architecture). \
+ filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
+
+class Upload(object):
+ """
+ Everything that has to do with an upload processed.
+
+ """
+ def __init__(self):
+ self.logger = None
+ self.pkg = Changes()
+ self.reset()
-class Pkg:
- def __init__(self, **kwds):
- self.__dict__.update(kwds)
+ ###########################################################################
- def update(self, **kwds):
- self.__dict__.update(kwds)
+ def update_subst(self):
+ """ Set up the per-package template substitution mappings """
+ raise Exception('to be removed')
-###############################################################################
+ cnf = Config()
-class nmu_p:
- # Read in the group maintainer override file
- def __init__ (self, Cnf):
- self.group_maint = {}
- self.Cnf = Cnf
- if Cnf.get("Dinstall::GroupOverrideFilename"):
- filename = Cnf["Dir::Override"] + Cnf["Dinstall::GroupOverrideFilename"]
- file = utils.open_file(filename)
- for line in file.readlines():
- line = utils.re_comments.sub('', line).lower().strip()
- if line != "":
- self.group_maint[line] = 1
- file.close()
-
- def is_an_nmu (self, pkg):
- Cnf = self.Cnf
- changes = pkg.changes
- dsc = pkg.dsc
-
- i = utils.fix_maintainer (dsc.get("maintainer",
- Cnf["Dinstall::MyEmailAddress"]).lower())
- (dsc_rfc822, dsc_rfc2047, dsc_name, dsc_email) = i
- # changes["changedbyname"] == dsc_name is probably never true, but better safe than sorry
- if dsc_name == changes["maintainername"].lower() and \
- (changes["changedby822"] == "" or changes["changedbyname"].lower() == dsc_name):
- return 0
-
- if dsc.has_key("uploaders"):
- uploaders = dsc["uploaders"].lower().split(",")
- uploadernames = {}
- for i in uploaders:
- (rfc822, rfc2047, name, email) = utils.fix_maintainer (i.strip())
- uploadernames[name] = ""
- if uploadernames.has_key(changes["changedbyname"].lower()):
- return 0
-
- # Some group maintained packages (e.g. Debian QA) are never NMU's
- if self.group_maint.has_key(changes["maintaineremail"].lower()):
- return 0
-
- return 1
-
-###############################################################################
+ # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
+ if not self.pkg.changes.has_key("architecture") or not \
+ isinstance(self.pkg.changes["architecture"], dict):
+ self.pkg.changes["architecture"] = { "Unknown" : "" }
-class Upload:
+ # and maintainer2047 may not exist.
+ if not self.pkg.changes.has_key("maintainer2047"):
+ self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
- def __init__(self, Cnf):
- self.Cnf = Cnf
- # Read in the group-maint override file
- self.nmu = nmu_p(Cnf)
- self.accept_count = 0
- self.accept_bytes = 0L
- self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
- legacy_source_untouchable = {})
+ self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
+ self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
+ self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
- # Initialize the substitution template mapping global
- Subst = self.Subst = {}
- Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
- Subst["__BUG_SERVER__"] = Cnf["Dinstall::BugServer"]
- Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
- Subst["__DAK_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
+ # For source uploads the Changed-By field wins; otherwise Maintainer wins.
+ if self.pkg.changes["architecture"].has_key("source") and \
+ self.pkg.changes["changedby822"] != "" and \
+ (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
- self.projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"]))
- database.init(Cnf, self.projectB)
+ self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
+ self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
+ self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
+ else:
+ self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
+ self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
+ self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
+
+ # Process policy doesn't set the fingerprint field and I don't want to make it
+ # do it for now as I don't want to have to deal with the case where we accepted
+ # the package into PU-NEW, but the fingerprint has gone away from the keyring in
+ # the meantime so the package will be remarked as rejectable. Urgh.
+ # TODO: Fix this properly
+ if self.pkg.changes.has_key('fingerprint'):
+ session = DBConn().session()
+ fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
+ if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
+ if self.pkg.changes.has_key("sponsoremail"):
+ self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
+ session.close()
+
+ if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
+ self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
- ###########################################################################
+ # Apply any global override of the Maintainer field
+ if cnf.get("Dinstall::OverrideMaintainer"):
+ self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
+ self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
- def init_vars (self):
- for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
- exec "self.pkg.%s.clear();" % (i)
- self.pkg.orig_tar_id = None
- self.pkg.orig_tar_location = ""
- self.pkg.orig_tar_gz = None
+ self.Subst["__REJECT_MESSAGE__"] = self.package_info()
+ self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
+ self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
+ self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
###########################################################################
- def update_vars (self):
- dump_filename = self.pkg.changes_file[:-8]+".dak"
- dump_file = utils.open_file(dump_filename)
- p = cPickle.Unpickler(dump_file)
- for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
- exec "self.pkg.%s.update(p.load());" % (i)
- for i in [ "orig_tar_id", "orig_tar_location" ]:
- exec "self.pkg.%s = p.load();" % (i)
- dump_file.close()
+ def check_distributions(self):
+ "Check and map the Distribution field"
+
+ Cnf = Config()
+
+ # Handle suite mappings
+ for m in Cnf.value_list("SuiteMappings"):
+ args = m.split()
+ mtype = args[0]
+ if mtype == "map" or mtype == "silent-map":
+ (source, dest) = args[1:3]
+ if self.pkg.changes["distribution"].has_key(source):
+ del self.pkg.changes["distribution"][source]
+ self.pkg.changes["distribution"][dest] = 1
+ if mtype != "silent-map":
+ self.notes.append("Mapping %s to %s." % (source, dest))
+ if self.pkg.changes.has_key("distribution-version"):
+ if self.pkg.changes["distribution-version"].has_key(source):
+ self.pkg.changes["distribution-version"][source]=dest
+ elif mtype == "map-unreleased":
+ (source, dest) = args[1:3]
+ if self.pkg.changes["distribution"].has_key(source):
+ for arch in self.pkg.changes["architecture"].keys():
+ if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
+ self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
+ del self.pkg.changes["distribution"][source]
+ self.pkg.changes["distribution"][dest] = 1
+ break
+ elif mtype == "ignore":
+ suite = args[1]
+ if self.pkg.changes["distribution"].has_key(suite):
+ del self.pkg.changes["distribution"][suite]
+ self.warnings.append("Ignoring %s as a target suite." % (suite))
+ elif mtype == "reject":
+ suite = args[1]
+ if self.pkg.changes["distribution"].has_key(suite):
+ self.rejects.append("Uploads to %s are not accepted." % (suite))
+ elif mtype == "propup-version":
+ # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
+ #
+ # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
+ if self.pkg.changes["distribution"].has_key(args[1]):
+ self.pkg.changes.setdefault("distribution-version", {})
+ for suite in args[2:]:
+ self.pkg.changes["distribution-version"][suite] = suite
- ###########################################################################
+ # Ensure there is (still) a target distribution
+ if len(self.pkg.changes["distribution"].keys()) < 1:
+ self.rejects.append("No valid distribution remaining.")
- # This could just dump the dictionaries as is, but I'd like to
- # avoid this so there's some idea of what process-accepted &
- # process-new use from process-unchecked
-
- def dump_vars(self, dest_dir):
- for i in [ "changes", "dsc", "files", "dsc_files",
- "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
- exec "%s = self.pkg.%s;" % (i,i)
- dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
- dump_file = utils.open_file(dump_filename, 'w')
- try:
- os.chmod(dump_filename, 0660)
- except OSError, e:
- if errno.errorcode[e.errno] == 'EPERM':
- perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
- if perms & stat.S_IROTH:
- utils.fubar("%s is world readable and chmod failed." % (dump_filename))
- else:
- raise
-
- p = cPickle.Pickler(dump_file, 1)
- for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
- exec "%s = {}" % i
- ## files
- for file in files.keys():
- d_files[file] = {}
- for i in [ "package", "version", "architecture", "type", "size",
- "md5sum", "component", "location id", "source package",
- "source version", "maintainer", "dbtype", "files id",
- "new", "section", "priority", "othercomponents",
- "pool name", "original component" ]:
- if files[file].has_key(i):
- d_files[file][i] = files[file][i]
- ## changes
- # Mandatory changes fields
- for i in [ "distribution", "source", "architecture", "version",
- "maintainer", "urgency", "fingerprint", "changedby822",
- "changedby2047", "changedbyname", "maintainer822",
- "maintainer2047", "maintainername", "maintaineremail",
- "closes", "changes" ]:
- d_changes[i] = changes[i]
- # Optional changes fields
- for i in [ "changed-by", "filecontents", "format", "process-new note", "adv id", "distribution-version" ]:
- if changes.has_key(i):
- d_changes[i] = changes[i]
- ## dsc
- for i in [ "source", "version", "maintainer", "fingerprint",
- "uploaders", "bts changelog" ]:
- if dsc.has_key(i):
- d_dsc[i] = dsc[i]
- ## dsc_files
- for file in dsc_files.keys():
- d_dsc_files[file] = {}
- # Mandatory dsc_files fields
- for i in [ "size", "md5sum" ]:
- d_dsc_files[file][i] = dsc_files[file][i]
- # Optional dsc_files fields
- for i in [ "files id" ]:
- if dsc_files[file].has_key(i):
- d_dsc_files[file][i] = dsc_files[file][i]
-
- for i in [ d_changes, d_dsc, d_files, d_dsc_files,
- legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
- p.dump(i)
- dump_file.close()
+ # Ensure target distributions exist
+ for suite in self.pkg.changes["distribution"].keys():
+ if not get_suite(suite.lower()):
+ self.rejects.append("Unknown distribution `%s'." % (suite))
###########################################################################
- # Set up the per-package template substitution mappings
+ def per_suite_file_checks(self, f, suite, session):
+ raise Exception('removed')
- def update_subst (self, reject_message = ""):
- Subst = self.Subst
- changes = self.pkg.changes
- # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
- if not changes.has_key("architecture") or not isinstance(changes["architecture"], DictType):
- changes["architecture"] = { "Unknown" : "" }
- # and maintainer2047 may not exist.
- if not changes.has_key("maintainer2047"):
- changes["maintainer2047"] = self.Cnf["Dinstall::MyEmailAddress"]
+ # Handle component mappings
+ for m in cnf.value_list("ComponentMappings"):
+ (source, dest) = m.split()
+ if entry["component"] == source:
+ entry["original component"] = source
+ entry["component"] = dest
- Subst["__ARCHITECTURE__"] = " ".join(changes["architecture"].keys())
- Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
- Subst["__FILE_CONTENTS__"] = changes.get("filecontents", "")
+ ###########################################################################
- # For source uploads the Changed-By field wins; otherwise Maintainer wins.
- if changes["architecture"].has_key("source") and changes["changedby822"] != "" and (changes["changedby822"] != changes["maintainer822"]):
- Subst["__MAINTAINER_FROM__"] = changes["changedby2047"]
- Subst["__MAINTAINER_TO__"] = "%s, %s" % (changes["changedby2047"],
- changes["maintainer2047"])
- Subst["__MAINTAINER__"] = changes.get("changed-by", "Unknown")
+ # Sanity check the time stamps of files inside debs.
+ # [Files in the near future cause ugly warnings and extreme time
+ # travel can cause errors on extraction]
+
+ def check_if_upload_is_sponsored(self, uid_email, uid_name):
+ for key in "maintaineremail", "changedbyemail", "maintainername", "changedbyname":
+ if not self.pkg.changes.has_key(key):
+ return False
+ uid_email = '@'.join(uid_email.split('@')[:2])
+ if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
+ sponsored = False
+ elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
+ sponsored = False
+ if uid_name == "":
+ sponsored = True
else:
- Subst["__MAINTAINER_FROM__"] = changes["maintainer2047"]
- Subst["__MAINTAINER_TO__"] = changes["maintainer2047"]
- Subst["__MAINTAINER__"] = changes.get("maintainer", "Unknown")
- if self.Cnf.has_key("Dinstall::TrackingServer") and changes.has_key("source"):
- Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (changes["source"], self.Cnf["Dinstall::TrackingServer"])
-
- # Apply any global override of the Maintainer field
- if self.Cnf.get("Dinstall::OverrideMaintainer"):
- Subst["__MAINTAINER_TO__"] = self.Cnf["Dinstall::OverrideMaintainer"]
- Subst["__MAINTAINER_FROM__"] = self.Cnf["Dinstall::OverrideMaintainer"]
-
- Subst["__REJECT_MESSAGE__"] = reject_message
- Subst["__SOURCE__"] = changes.get("source", "Unknown")
- Subst["__VERSION__"] = changes.get("version", "Unknown")
+ sponsored = True
+ sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
+ debian_emails = filter(lambda addr: addr.endswith('@debian.org'), sponsor_addresses)
+ if uid_email not in debian_emails:
+ if debian_emails:
+ uid_email = debian_emails[0]
+ if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
+ if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
+ self.pkg.changes["changedbyemail"] not in sponsor_addresses):
+ self.pkg.changes["sponsoremail"] = uid_email
+
+ return sponsored
+
+ def check_dm_upload(self, fpr, session):
+ # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
+ ## none of the uploaded packages are NEW
+ ## none of the packages are being taken over from other source packages
+ for b in self.pkg.changes["binary"].keys():
+ for suite in self.pkg.changes["distribution"].keys():
+ for s in get_source_by_package_and_suite(b, suite, session):
+ if s.source != self.pkg.changes["source"]:
+ self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
###########################################################################
+ # End check_signed_by_key checks
+ ###########################################################################
def build_summaries(self):
- changes = self.pkg.changes
- files = self.pkg.files
-
- byhand = summary = new = ""
-
- # changes["distribution"] may not exist in corner cases
- # (e.g. unreadable changes files)
- if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
- changes["distribution"] = {}
-
- override_summary ="";
- file_keys = files.keys()
- file_keys.sort()
- for file in file_keys:
- if files[file].has_key("byhand"):
- byhand = 1
- summary += file + " byhand\n"
- elif files[file].has_key("new"):
- new = 1
- summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
- if files[file].has_key("othercomponents"):
- summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
- if files[file]["type"] == "deb":
- deb_fh = utils.open_file(file)
- summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
- deb_fh.close()
- else:
- files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
- destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
- summary += file + "\n to " + destination + "\n"
- if not files[file].has_key("type"):
- files[file]["type"] = "unknown"
- if files[file]["type"] in ["deb", "udeb", "dsc"]:
- # (queue/unchecked), there we have override entries already, use them
- # (process-new), there we dont have override entries, use the newly generated ones.
- override_prio = files[file].get("override priority", files[file]["priority"])
- override_sect = files[file].get("override section", files[file]["section"])
- override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
+ """ Build a summary of changes the upload introduces. """
+
+ (byhand, new, summary, override_summary) = self.pkg.file_summary()
short_summary = summary
# This is for direport's benefit...
- f = re_fdnic.sub("\n .\n", changes.get("changes",""))
+ f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
- if byhand or new:
- summary += "Changes: " + f
+ summary += "\n\nChanges:\n" + f
summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
###########################################################################
- def close_bugs (self, summary, action):
- changes = self.pkg.changes
- Subst = self.Subst
- Cnf = self.Cnf
-
- bugs = changes["closes"].keys()
-
- if not bugs:
- return summary
-
- bugs.sort()
- if not self.nmu.is_an_nmu(self.pkg):
- if changes["distribution"].has_key("experimental"):
- # tag bugs as fixed-in-experimental for uploads to experimental
- summary += "Setting bugs to severity fixed: "
- control_message = ""
- for bug in bugs:
- summary += "%s " % (bug)
- control_message += "tag %s + fixed-in-experimental\n" % (bug)
- if action and control_message != "":
- Subst["__CONTROL_MESSAGE__"] = control_message
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-experimental-fixed")
- utils.send_mail (mail_message)
- if action:
- self.Logger.log(["setting bugs to fixed"]+bugs)
-
-
- else:
- summary += "Closing bugs: "
- for bug in bugs:
- summary += "%s " % (bug)
- if action:
- Subst["__BUG_NUMBER__"] = bug
- if changes["distribution"].has_key("stable"):
- Subst["__STABLE_WARNING__"] = """
-Note that this package is not part of the released stable Debian
-distribution. It may have dependencies on other unreleased software,
-or other instabilities. Please take care if you wish to install it.
-The update will eventually make its way into the next released Debian
-distribution."""
- else:
- Subst["__STABLE_WARNING__"] = ""
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-close")
- utils.send_mail (mail_message)
- if action:
- self.Logger.log(["closing bugs"]+bugs)
-
- else: # NMU
- summary += "Setting bugs to severity fixed: "
- control_message = ""
- for bug in bugs:
- summary += "%s " % (bug)
- control_message += "tag %s + fixed\n" % (bug)
- if action and control_message != "":
- Subst["__CONTROL_MESSAGE__"] = control_message
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.bug-nmu-fixed")
- utils.send_mail (mail_message)
- if action:
- self.Logger.log(["setting bugs to fixed"]+bugs)
- summary += "\n"
- return summary
-
- ###########################################################################
-
- def announce (self, short_summary, action):
- Subst = self.Subst
- Cnf = self.Cnf
- changes = self.pkg.changes
+ def announce(self, short_summary, action):
+ """
+ Send an announce mail about a new upload.
- # Only do announcements for source uploads with a recent dpkg-dev installed
- if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
- return ""
+ @type short_summary: string
+ @param short_summary: Short summary text to include in the mail
- lists_done = {}
- summary = ""
- Subst["__SHORT_SUMMARY__"] = short_summary
+ @type action: bool
+ @param action: Set to false no real action will be done.
- for dist in changes["distribution"].keys():
- list = Cnf.Find("Suite::%s::Announce" % (dist))
- if list == "" or lists_done.has_key(list):
- continue
- lists_done[list] = 1
- summary += "Announcing to %s\n" % (list)
+ @rtype: string
+ @return: Textstring about action taken.
- if action:
- Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
- if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
- Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
- utils.send_mail (mail_message)
+ """
- if Cnf.FindB("Dinstall::CloseBugs"):
- summary = self.close_bugs(summary, action)
+ cnf = Config()
- return summary
+ # Skip all of this if not sending mail to avoid confusing people
+ if cnf.has_key("Dinstall::Options::No-Mail") and cnf["Dinstall::Options::No-Mail"]:
+ return ""
- ###########################################################################
+ # Only do announcements for source uploads with a recent dpkg-dev installed
+ if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
+ self.pkg.changes["architecture"].has_key("source"):
+ return ""
- def accept (self, summary, short_summary):
- Cnf = self.Cnf
- Subst = self.Subst
- files = self.pkg.files
- changes = self.pkg.changes
- changes_file = self.pkg.changes_file
- dsc = self.pkg.dsc
-
- print "Accepting."
- self.Logger.log(["Accepting changes",changes_file])
-
- self.dump_vars(Cnf["Dir::Queue::Accepted"])
-
- # Move all the files into the accepted directory
- utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
- file_keys = files.keys()
- for file in file_keys:
- utils.move(file, Cnf["Dir::Queue::Accepted"])
- self.accept_bytes += float(files[file]["size"])
- self.accept_count += 1
-
- # Send accept mail, announce to lists, close bugs and check for
- # override disparities
- if not Cnf["Dinstall::Options::No-Mail"]:
- Subst["__SUITE__"] = ""
- Subst["__SUMMARY__"] = summary
- mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
- utils.send_mail(mail_message)
- self.announce(short_summary, 1)
-
-
- ## Helper stuff for DebBugs Version Tracking
- if Cnf.Find("Dir::Queue::BTSVersionTrack"):
- # ??? once queue/* is cleared on *.d.o and/or reprocessed
- # the conditionalization on dsc["bts changelog"] should be
- # dropped.
-
- # Write out the version history from the changelog
- if changes["architecture"].has_key("source") and \
- dsc.has_key("bts changelog"):
-
- temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
- dotprefix=1, perms=0644)
- version_history = utils.open_file(temp_filename, 'w')
- version_history.write(dsc["bts changelog"])
- version_history.close()
- filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
- changes_file[:-8]+".versions")
- os.rename(temp_filename, filename)
-
- # Write out the binary -> source mapping.
- temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
- dotprefix=1, perms=0644)
- debinfo = utils.open_file(temp_filename, 'w')
- for file in file_keys:
- f = files[file]
- if f["type"] == "deb":
- line = " ".join([f["package"], f["version"],
- f["architecture"], f["source package"],
- f["source version"]])
- debinfo.write(line+"\n")
- debinfo.close()
- filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
- changes_file[:-8]+".debinfo")
- os.rename(temp_filename, filename)
-
- self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
+ announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
- ###########################################################################
+ lists_todo = {}
+ summary = ""
- def queue_build (self, queue, path):
- Cnf = self.Cnf
- Subst = self.Subst
- files = self.pkg.files
- changes = self.pkg.changes
- changes_file = self.pkg.changes_file
- dsc = self.pkg.dsc
- file_keys = files.keys()
-
- ## Special support to enable clean auto-building of queued packages
- queue_id = database.get_or_set_queue_id(queue)
-
- self.projectB.query("BEGIN WORK")
- for suite in changes["distribution"].keys():
- if suite not in Cnf.ValueList("Dinstall::QueueBuildSuites"):
- continue
- suite_id = database.get_suite_id(suite)
- dest_dir = Cnf["Dir::QueueBuild"]
- if Cnf.FindB("Dinstall::SecurityQueueBuild"):
- dest_dir = os.path.join(dest_dir, suite)
- for file in file_keys:
- src = os.path.join(path, file)
- dest = os.path.join(dest_dir, file)
- if Cnf.FindB("Dinstall::SecurityQueueBuild"):
- # Copy it since the original won't be readable by www-data
- utils.copy(src, dest)
- else:
- # Create a symlink to it
- os.symlink(src, dest)
- # Add it to the list of packages for later processing by apt-ftparchive
- self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
- # If the .orig.tar.gz is in the pool, create a symlink to
- # it (if one doesn't already exist)
- if self.pkg.orig_tar_id:
- # Determine the .orig.tar.gz file name
- for dsc_file in self.pkg.dsc_files.keys():
- if dsc_file.endswith(".orig.tar.gz"):
- filename = dsc_file
- dest = os.path.join(dest_dir, filename)
- # If it doesn't exist, create a symlink
- if not os.path.exists(dest):
- # Find the .orig.tar.gz in the pool
- q = self.projectB.query("SELECT l.path, f.filename from location l, files f WHERE f.id = %s and f.location = l.id" % (self.pkg.orig_tar_id))
- ql = q.getresult()
- if not ql:
- utils.fubar("[INTERNAL ERROR] Couldn't find id %s in files table." % (self.pkg.orig_tar_id))
- src = os.path.join(ql[0][0], ql[0][1])
- os.symlink(src, dest)
- # Add it to the list of packages for later processing by apt-ftparchive
- self.projectB.query("INSERT INTO queue_build (suite, queue, filename, in_queue) VALUES (%s, %s, '%s', 't')" % (suite_id, queue_id, dest))
- # if it does, update things to ensure it's not removed prematurely
- else:
- self.projectB.query("UPDATE queue_build SET in_queue = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id))
+ # Get a unique list of target lists
+ for dist in self.pkg.changes["distribution"].keys():
+ suite = get_suite(dist)
+ if suite is None: continue
+ for tgt in suite.announce:
+ lists_todo[tgt] = 1
- self.projectB.query("COMMIT WORK")
+ self.Subst["__SHORT_SUMMARY__"] = short_summary
- ###########################################################################
+ for announce_list in lists_todo.keys():
+ summary += "Announcing to %s\n" % (announce_list)
- def check_override (self):
- Subst = self.Subst
- changes = self.pkg.changes
- files = self.pkg.files
- Cnf = self.Cnf
-
- # Abandon the check if:
- # a) it's a non-sourceful upload
- # b) override disparity checks have been disabled
- # c) we're not sending mail
- if not changes["architecture"].has_key("source") or \
- not Cnf.FindB("Dinstall::OverrideDisparityCheck") or \
- Cnf["Dinstall::Options::No-Mail"]:
- return
-
- summary = ""
- file_keys = files.keys()
- file_keys.sort()
- for file in file_keys:
- if not files[file].has_key("new") and files[file]["type"] == "deb":
- section = files[file]["section"]
- override_section = files[file]["override section"]
- if section.lower() != override_section.lower() and section != "-":
- # Ignore this; it's a common mistake and not worth whining about
- if section.lower() == "non-us/main" and override_section.lower() == "non-us":
- continue
- summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
- priority = files[file]["priority"]
- override_priority = files[file]["override priority"]
- if priority != override_priority and priority != "-":
- summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
-
- if summary == "":
- return
+ if action:
+ self.update_subst()
+ self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
+ if cnf.get("Dinstall::TrackingServer") and \
+ self.pkg.changes["architecture"].has_key("source"):
+ trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
+ self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
- Subst["__SUMMARY__"] = summary
- mail_message = utils.TemplateSubst(Subst,self.Cnf["Dir::Templates"]+"/process-unchecked.override-disparity")
- utils.send_mail(mail_message)
+ mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
+ utils.send_mail(mail_message)
- ###########################################################################
+ del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
- def force_reject (self, files):
- """Forcefully move files from the current directory to the
- reject directory. If any file already exists in the reject
- directory it will be moved to the morgue to make way for
- the new file."""
+ if cnf.find_b("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"):
+ summary = self.close_bugs(summary, action)
- Cnf = self.Cnf
+ del self.Subst["__SHORT_SUMMARY__"]
- for file in files:
- # Skip any files which don't exist or which we don't have permission to copy.
- if os.access(file,os.R_OK) == 0:
- continue
- dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
- try:
- dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
- except OSError, e:
- # File exists? Let's try and move it to the morgue
- if errno.errorcode[e.errno] == 'EEXIST':
- morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
- try:
- morgue_file = utils.find_next_free(morgue_file)
- except utils.tried_too_hard_exc:
- # Something's either gone badly Pete Tong, or
- # someone is trying to exploit us.
- utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
- return
- utils.move(dest_file, morgue_file, perms=0660)
- try:
- dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
- except OSError, e:
- # Likewise
- utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
- return
- else:
- raise
- # If we got here, we own the destination file, so we can
- # safely overwrite it.
- utils.move(file, dest_file, 1, perms=0660)
- os.close(dest_fd)
+ return summary
###########################################################################
- def do_reject (self, manual = 0, reject_message = ""):
- # If we weren't given a manual rejection message, spawn an
- # editor so the user can add one in...
- if manual and not reject_message:
- temp_filename = utils.temp_filename()
- editor = os.environ.get("EDITOR","vi")
- answer = 'E'
- while answer == 'E':
- os.system("%s %s" % (editor, temp_filename))
- temp_fh = utils.open_file(temp_filename)
- reject_message = "".join(temp_fh.readlines())
- temp_fh.close()
- print "Reject message:"
- print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
- prompt = "[R]eject, Edit, Abandon, Quit ?"
- answer = "XXX"
- while prompt.find(answer) == -1:
- answer = utils.our_raw_input(prompt)
- m = re_default_answer.search(prompt)
- if answer == "":
- answer = m.group(1)
- answer = answer[:1].upper()
- os.unlink(temp_filename)
- if answer == 'A':
- return 1
- elif answer == 'Q':
- sys.exit(0)
-
- print "Rejecting.\n"
-
- Cnf = self.Cnf
- Subst = self.Subst
- pkg = self.pkg
-
- reason_filename = pkg.changes_file[:-8] + ".reason"
- reason_filename = Cnf["Dir::Queue::Reject"] + '/' + reason_filename
-
- # Move all the files into the reject directory
- reject_files = pkg.files.keys() + [pkg.changes_file]
- self.force_reject(reject_files)
-
- # If we fail here someone is probably trying to exploit the race
- # so let's just raise an exception ...
- if os.path.exists(reason_filename):
- os.unlink(reason_filename)
- reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
-
- if not manual:
- Subst["__REJECTOR_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
- Subst["__MANUAL_REJECT_MESSAGE__"] = ""
- Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)"
- os.write(reason_fd, reject_message)
- reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
- else:
- # Build up the rejection email
- user_email_address = utils.whoami() + " <%s>" % (Cnf["Dinstall::MyAdminAddress"])
-
- Subst["__REJECTOR_ADDRESS__"] = user_email_address
- Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
- Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
- reject_mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/queue.rejected")
- # Write the rejection email out as the <foo>.reason file
- os.write(reason_fd, reject_mail_message)
+ def check_override(self):
+ """
+ Checks override entries for validity. Mails "Override disparity" warnings,
+ if that feature is enabled.
- os.close(reason_fd)
+ Abandons the check if
+ - override disparity checks are disabled
+ - mail sending is disabled
+ """
- # Send the rejection mail if appropriate
- if not Cnf["Dinstall::Options::No-Mail"]:
- utils.send_mail(reject_mail_message)
+ cnf = Config()
- self.Logger.log(["rejected", pkg.changes_file])
- return 0
+ # Abandon the check if override disparity checks have been disabled
+ if not cnf.find_b("Dinstall::OverrideDisparityCheck"):
+ return
- ################################################################################
+ summary = self.pkg.check_override()
- # Ensure that source exists somewhere in the archive for the binary
- # upload being processed.
- #
- # (1) exact match => 1.0-3
- # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
-
- def source_exists (self, package, source_version, suites = ["any"]):
- okay = 1
- for suite in suites:
- if suite == "any":
- que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
- (package)
- else:
- # source must exist in suite X, or in some other suite that's
- # mapped to X, recursively... silent-maps are counted too,
- # unreleased-maps aren't.
- maps = self.Cnf.ValueList("SuiteMappings")[:]
- maps.reverse()
- maps = [ m.split() for m in maps ]
- maps = [ (x[1], x[2]) for x in maps
- if x[0] == "map" or x[0] == "silent-map" ]
- s = [suite]
- for x in maps:
- if x[1] in s and x[0] not in s:
- s.append(x[0])
-
- que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
- q = self.projectB.query(que)
-
- # Reduce the query results to a list of version numbers
- ql = [ i[0] for i in q.getresult() ]
-
- # Try (1)
- if source_version in ql:
- continue
+ if summary == "":
+ return
- # Try (2)
- orig_source_version = re_bin_only_nmu.sub('', source_version)
- if orig_source_version in ql:
- continue
+ overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
- # No source found...
- okay = 0
- break
- return okay
+ self.update_subst()
+ self.Subst["__SUMMARY__"] = summary
+ mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
+ utils.send_mail(mail_message)
+ del self.Subst["__SUMMARY__"]
################################################################################
-
- def in_override_p (self, package, component, suite, binary_type, file):
- files = self.pkg.files
+ def get_anyversion(self, sv_list, suite):
+ """
+ @type sv_list: list
+ @param sv_list: list of (suite, version) tuples to check
+
+ @type suite: string
+ @param suite: suite name
+
+ Description: TODO
+ """
+ Cnf = Config()
+ anyversion = None
+ anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
+ for (s, v) in sv_list:
+ if s in [ x.lower() for x in anysuite ]:
+ if not anyversion or apt_pkg.version_compare(anyversion, v) <= 0:
+ anyversion = v
- if binary_type == "": # must be source
- type = "dsc"
- else:
- type = binary_type
-
- # Override suite name; used for example with proposed-updates
- if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
- suite = self.Cnf["Suite::%s::OverrideSuite" % (suite)]
-
- # Avoid <undef> on unknown distributions
- suite_id = database.get_suite_id(suite)
- if suite_id == -1:
- return None
- component_id = database.get_component_id(component)
- type_id = database.get_override_type_id(type)
-
- # FIXME: nasty non-US speficic hack
- if component.lower().startswith("non-us/"):
- component = component[7:]
-
- q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
- % (package, suite_id, component_id, type_id))
- result = q.getresult()
- # If checking for a source package fall back on the binary override type
- if type == "dsc" and not result:
- deb_type_id = database.get_override_type_id("deb")
- udeb_type_id = database.get_override_type_id("udeb")
- q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
- % (package, suite_id, component_id, deb_type_id, udeb_type_id))
- result = q.getresult()
-
- # Remember the section and priority so we can check them later if appropriate
- if result:
- files[file]["override section"] = result[0][0]
- files[file]["override priority"] = result[0][1]
-
- return result
+ return anyversion
################################################################################
- def reject (self, str, prefix="Rejected: "):
- if str:
- # Unlike other rejects we add new lines first to avoid trailing
- # new lines when this message is passed back up to a caller.
- if self.reject_message:
- self.reject_message += "\n"
- self.reject_message += prefix + str
-
- ################################################################################
+ def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
+ """
+ @type sv_list: list
+ @param sv_list: list of (suite, version) tuples to check
- def get_anyversion(self, query_result, suite):
- anyversion=None
- anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
- for (v, s) in query_result:
- if s in [ x.lower() for x in anysuite ]:
- if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
- anyversion=v
- return anyversion
+ @type filename: string
+ @param filename: XXX
- ################################################################################
+ @type new_version: string
+ @param new_version: XXX
- def cross_suite_version_check(self, query_result, file, new_version):
- """Ensure versions are newer than existing packages in target
+ Ensure versions are newer than existing packages in target
suites and that cross-suite version checking rules as
- set out in the conf file are satisfied."""
+ set out in the conf file are satisfied.
+ """
+
+ cnf = Config()
# Check versions for each target suite
for target_suite in self.pkg.changes["distribution"].keys():
- must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
- must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
+ # Check we can find the target suite
+ ts = get_suite(target_suite)
+ if ts is None:
+ self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite)
+ continue
+
+ must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
+ must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
+
# Enforce "must be newer than target suite" even if conffile omits it
if target_suite not in must_be_newer_than:
must_be_newer_than.append(target_suite)
- for entry in query_result:
- existent_version = entry[0]
- suite = entry[1]
- if suite in must_be_newer_than and \
- apt_pkg.VersionCompare(new_version, existent_version) < 1:
- self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
- if suite in must_be_older_than and \
- apt_pkg.VersionCompare(new_version, existent_version) > -1:
- ch = self.pkg.changes
+
+ for (suite, existent_version) in sv_list:
+ vercmp = apt_pkg.version_compare(new_version, existent_version)
+
+ if suite in must_be_newer_than and sourceful and vercmp < 1:
+ self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
+
+ if suite in must_be_older_than and vercmp > -1:
cansave = 0
- if ch.get('distribution-version', {}).has_key(suite):
+
+ if self.pkg.changes.get('distribution-version', {}).has_key(suite):
# we really use the other suite, ignoring the conflicting one ...
- addsuite = ch["distribution-version"][suite]
-
- add_version = self.get_anyversion(query_result, addsuite)
- target_version = self.get_anyversion(query_result, target_suite)
-
+ addsuite = self.pkg.changes["distribution-version"][suite]
+
+ add_version = self.get_anyversion(sv_list, addsuite)
+ target_version = self.get_anyversion(sv_list, target_suite)
+
if not add_version:
# not add_version can only happen if we map to a suite
# that doesn't enhance the suite we're propup'ing from.
# than complaining. either way, this isn't a REJECT issue
#
# And - we really should complain to the dorks who configured dak
- self.reject("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite), "Warning: ")
+ self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
self.pkg.changes.setdefault("propdistribution", {})
self.pkg.changes["propdistribution"][addsuite] = 1
cansave = 1
# not targets_version is true when the package is NEW
# we could just stick with the "...old version..." REJECT
# for this, I think.
- self.reject("Won't propogate NEW packages.")
- elif apt_pkg.VersionCompare(new_version, add_version) < 0:
+ self.rejects.append("Won't propogate NEW packages.")
+ elif apt_pkg.version_compare(new_version, add_version) < 0:
# propogation would be redundant. no need to reject though.
- self.reject("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite), "Warning: ")
+ self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
cansave = 1
- elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
- apt_pkg.VersionCompare(add_version, target_version) >= 0:
+ elif apt_pkg.version_compare(new_version, add_version) > 0 and \
+ apt_pkg.version_compare(add_version, target_version) >= 0:
# propogate!!
- self.reject("Propogating upload to %s" % (addsuite), "Warning: ")
+ self.warnings.append("Propogating upload to %s" % (addsuite))
self.pkg.changes.setdefault("propdistribution", {})
self.pkg.changes["propdistribution"][addsuite] = 1
cansave = 1
-
+
if not cansave:
- self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
+ self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
################################################################################
- def check_binary_against_db(self, file):
- self.reject_message = ""
- files = self.pkg.files
-
- # Ensure version is sane
- q = self.projectB.query("""
-SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
- architecture a
- WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
- AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
- % (files[file]["package"],
- files[file]["architecture"]))
- self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
-
- # Check for any existing copies of the file
- q = self.projectB.query("""
-SELECT b.id FROM binaries b, architecture a
- WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
- AND a.id = b.architecture"""
- % (files[file]["package"],
- files[file]["version"],
- files[file]["architecture"]))
- if q.getresult():
- self.reject("%s: can not overwrite existing copy already in the archive." % (file))
-
- return self.reject_message
-
- ################################################################################
+ def accepted_checks(self, overwrite_checks, session):
+ # Recheck anything that relies on the database; since that's not
+ # frozen between accept and our run time when called from p-a.
- def check_source_against_db(self, file):
- self.reject_message = ""
- dsc = self.pkg.dsc
+ # overwrite_checks is set to False when installing to stable/oldstable
- # Ensure version is sane
- q = self.projectB.query("""
-SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
- WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
- self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
+ propogate={}
+ nopropogate={}
- return self.reject_message
+ for checkfile in self.pkg.files.keys():
+ # The .orig.tar.gz can disappear out from under us is it's a
+ # duplicate of one in the archive.
+ if not self.pkg.files.has_key(checkfile):
+ continue
- ################################################################################
+ entry = self.pkg.files[checkfile]
- # **WARNING**
- # NB: this function can remove entries from the 'files' index [if
- # the .orig.tar.gz is a duplicate of the one in the archive]; if
- # you're iterating over 'files' and call this function as part of
- # the loop, be sure to add a check to the top of the loop to
- # ensure you haven't just tried to derefernece the deleted entry.
- # **WARNING**
-
- def check_dsc_against_db(self, file):
- self.reject_message = ""
- files = self.pkg.files
- dsc_files = self.pkg.dsc_files
- legacy_source_untouchable = self.pkg.legacy_source_untouchable
- self.pkg.orig_tar_gz = None
-
- # Try and find all files mentioned in the .dsc. This has
- # to work harder to cope with the multiple possible
- # locations of an .orig.tar.gz.
- # The ordering on the select is needed to pick the newest orig
- # when it exists in multiple places.
- for dsc_file in dsc_files.keys():
- found = None
- if files.has_key(dsc_file):
- actual_md5 = files[dsc_file]["md5sum"]
- actual_size = int(files[dsc_file]["size"])
- found = "%s in incoming" % (dsc_file)
- # Check the file does not already exist in the archive
- q = self.projectB.query("SELECT f.size, f.md5sum, l.path, f.filename FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location ORDER BY f.id DESC" % (dsc_file))
- ql = q.getresult()
- # Strip out anything that isn't '%s' or '/%s$'
- for i in ql:
- if i[3] != dsc_file and i[3][-(len(dsc_file)+1):] != '/'+dsc_file:
- ql.remove(i)
-
- # "[dak] has not broken them. [dak] has fixed a
- # brokenness. Your crappy hack exploited a bug in
- # the old dinstall.
- #
- # "(Come on! I thought it was always obvious that
- # one just doesn't release different files with
- # the same name and version.)"
- # -- ajk@ on d-devel@l.d.o
-
- if ql:
- # Ignore exact matches for .orig.tar.gz
- match = 0
- if dsc_file.endswith(".orig.tar.gz"):
- for i in ql:
- if files.has_key(dsc_file) and \
- int(files[dsc_file]["size"]) == int(i[0]) and \
- files[dsc_file]["md5sum"] == i[1]:
- self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ")
- del files[dsc_file]
- self.pkg.orig_tar_gz = i[2] + i[3]
- match = 1
-
- if not match:
- self.reject("can not overwrite existing copy of '%s' already in the archive." % (dsc_file))
- elif dsc_file.endswith(".orig.tar.gz"):
- # Check in the pool
- q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE f.filename LIKE '%%%s%%' AND l.id = f.location" % (dsc_file))
- ql = q.getresult()
- # Strip out anything that isn't '%s' or '/%s$'
- for i in ql:
- if i[1] != dsc_file and i[1][-(len(dsc_file)+1):] != '/'+dsc_file:
- ql.remove(i)
-
- if ql:
- # Unfortunately, we may get more than one match here if,
- # for example, the package was in potato but had an -sa
- # upload in woody. So we need to choose the right one.
-
- x = ql[0]; # default to something sane in case we don't match any or have only one
-
- if len(ql) > 1:
- for i in ql:
- old_file = i[0] + i[1]
- old_file_fh = utils.open_file(old_file)
- actual_md5 = apt_pkg.md5sum(old_file_fh)
- old_file_fh.close()
- actual_size = os.stat(old_file)[stat.ST_SIZE]
- if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
- x = i
- else:
- legacy_source_untouchable[i[3]] = ""
-
- old_file = x[0] + x[1]
- old_file_fh = utils.open_file(old_file)
- actual_md5 = apt_pkg.md5sum(old_file_fh)
- old_file_fh.close()
- actual_size = os.stat(old_file)[stat.ST_SIZE]
- found = old_file
- suite_type = x[2]
- dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
- # See install() in process-accepted...
- self.pkg.orig_tar_id = x[3]
- self.pkg.orig_tar_gz = old_file
- if suite_type == "legacy" or suite_type == "legacy-mixed":
- self.pkg.orig_tar_location = "legacy"
- else:
- self.pkg.orig_tar_location = x[4]
+ # propogate in the case it is in the override tables:
+ for suite in self.pkg.changes.get("propdistribution", {}).keys():
+ if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
+ propogate[suite] = 1
else:
- # Not there? Check the queue directories...
-
- in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
- # See process_it() in 'dak process-unchecked' for explanation of this
- if os.path.exists(in_unchecked):
- return (self.reject_message, in_unchecked)
- else:
- for dir in [ "Accepted", "New", "Byhand" ]:
- in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
- if os.path.exists(in_otherdir):
- in_otherdir_fh = utils.open_file(in_otherdir)
- actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
- in_otherdir_fh.close()
- actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
- found = in_otherdir
- self.pkg.orig_tar_gz = in_otherdir
-
- if not found:
- self.reject("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_file))
- self.pkg.orig_tar_gz = -1
- continue
- else:
- self.reject("%s refers to %s, but I can't find it in the queue." % (file, dsc_file))
+ nopropogate[suite] = 1
+
+ for suite in propogate.keys():
+ if suite in nopropogate:
continue
- if actual_md5 != dsc_files[dsc_file]["md5sum"]:
- self.reject("md5sum for %s doesn't match %s." % (found, file))
- if actual_size != int(dsc_files[dsc_file]["size"]):
- self.reject("size for %s doesn't match %s." % (found, file))
-
- return (self.reject_message, None)
-
- def do_query(self, q):
- sys.stderr.write("query: \"%s\" ... " % (q))
- before = time.time()
- r = self.projectB.query(q)
- time_diff = time.time()-before
- sys.stderr.write("took %.3f seconds.\n" % (time_diff))
- return r
+ self.pkg.changes["distribution"][suite] = 1
+
+ for checkfile in self.pkg.files.keys():
+ # Check the package is still in the override tables
+ for suite in self.pkg.changes["distribution"].keys():
+ if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
+ self.rejects.append("%s is NEW for %s." % (checkfile, suite))