#!/usr/bin/env python
+# vim:set et sw=4:
-# Queue utility functions for dak
-# Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
+"""
+Queue utility functions for dak
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2001 - 2006 James Troup <james@nocrew.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
###############################################################################
-import cPickle, errno, os, pg, re, stat, sys, time
-import apt_inst, apt_pkg
-import utils, database
+import cPickle
+import errno
+import os
+import pg
+import stat
+import sys
+import time
+import apt_inst
+import apt_pkg
+import utils
+import database
+from dak_exceptions import *
+from regexes import re_default_answer, re_fdnic, re_bin_only_nmu
from types import *
###############################################################################
-re_isanum = re.compile (r"^\d+$")
-re_default_answer = re.compile(r"\[(.*)\]")
-re_fdnic = re.compile(r"\n\n")
-re_bin_only_nmu = re.compile(r"\+b\d+$")
-
-################################################################################
-
# Determine what parts in a .changes are NEW
def determine_new(changes, files, projectB, warn=1):
+ """
+ Determine what parts in a C{changes} file are NEW.
+
+ @type changes: Upload.Pkg.changes dict
+ @param changes: Changes dictionary
+
+ @type files: Upload.Pkg.files dict
+ @param files: Files dictionary
+
+ @type projectB: pgobject
+ @param projectB: DB handle
+
+ @type warn: bool
+ @param warn: Warn if overrides are added for (old)stable
+
+ @rtype: dict
+ @return: dictionary of NEW components.
+
+ """
new = {}
# Build up a list of potentially new things
- for file in files.keys():
- f = files[file]
+ for file_entry in files.keys():
+ f = files[file_entry]
# Skip byhand elements
if f["type"] == "byhand":
continue
pkg = f["package"]
priority = f["priority"]
section = f["section"]
- type = get_type(f)
+ file_type = get_type(f)
component = f["component"]
- if type == "dsc":
+ if file_type == "dsc":
priority = "source"
if not new.has_key(pkg):
new[pkg] = {}
new[pkg]["priority"] = priority
new[pkg]["section"] = section
- new[pkg]["type"] = type
+ new[pkg]["type"] = file_type
new[pkg]["component"] = component
new[pkg]["files"] = []
else:
old_type = new[pkg]["type"]
- if old_type != type:
+ if old_type != file_type:
# source gets trumped by deb or udeb
if old_type == "dsc":
new[pkg]["priority"] = priority
new[pkg]["section"] = section
- new[pkg]["type"] = type
+ new[pkg]["type"] = file_type
new[pkg]["component"] = component
- new[pkg]["files"].append(file)
+ new[pkg]["files"].append(file_entry)
if f.has_key("othercomponents"):
new[pkg]["othercomponents"] = f["othercomponents"]
q = projectB.query("SELECT package FROM override WHERE package = '%s' AND suite = %s AND component = %s AND type = %s" % (pkg, suite_id, component_id, type_id))
ql = q.getresult()
if ql:
- for file in new[pkg]["files"]:
- if files[file].has_key("new"):
- del files[file]["new"]
+ for file_entry in new[pkg]["files"]:
+ if files[file_entry].has_key("new"):
+ del files[file_entry]["new"]
del new[pkg]
if warn:
################################################################################
-def get_type(f):
+def get_type(file):
+ """
+ Get the file type of C{file}
+
+ @type file: dict
+ @param file: file entry
+
+ @rtype: string
+ @return: filetype
+
+ """
# Determine the type
- if f.has_key("dbtype"):
- type = f["dbtype"]
- elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
- type = "dsc"
+ if file.has_key("dbtype"):
+ file_type = file["dbtype"]
+ elif file["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
+ file_type = "dsc"
else:
- utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (type))
+ utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
# Validate the override type
- type_id = database.get_override_type_id(type)
+ type_id = database.get_override_type_id(file_type)
if type_id == -1:
- utils.fubar("invalid type (%s) for new. Say wha?" % (type))
+ utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
- return type
+ return file_type
################################################################################
-# check if section/priority values are valid
+
def check_valid(new):
+ """
+ Check if section and priority for NEW packages exist in database.
+ Additionally does sanity checks:
+ - debian-installer packages have to be udeb (or source)
+ - non debian-installer packages can not be udeb
+ - source priority can only be assigned to dsc file types
+
+ @type new: dict
+ @param new: Dict of new packages with their section, priority and type.
+
+ """
for pkg in new.keys():
section = new[pkg]["section"]
priority = new[pkg]["priority"]
- type = new[pkg]["type"]
+ file_type = new[pkg]["type"]
new[pkg]["section id"] = database.get_section_id(section)
new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
# Sanity checks
di = section.find("debian-installer") != -1
- if (di and type != "udeb") or (not di and type == "udeb"):
+ if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
new[pkg]["section id"] = -1
- if (priority == "source" and type != "dsc") or \
- (priority != "source" and type == "dsc"):
+ if (priority == "source" and file_type != "dsc") or \
+ (priority != "source" and file_type == "dsc"):
new[pkg]["priority id"] = -1
###############################################################################
-# Convenience wrapper to carry around all the package information in
-
class Pkg:
+ """ Convenience wrapper to carry around all the package information """
def __init__(self, **kwds):
self.__dict__.update(kwds)
###############################################################################
class Upload:
+ """
+ Everything that has to do with an upload processed.
+ """
def __init__(self, Cnf):
+ """
+ Initialize various variables and the global substitution template mappings.
+ Also connect to the DB and initialize the Database module.
+
+ """
self.Cnf = Cnf
self.accept_count = 0
self.accept_bytes = 0L
- self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
- legacy_source_untouchable = {})
+ self.reject_message = ""
+ self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {})
# Initialize the substitution template mapping global
Subst = self.Subst = {}
###########################################################################
def init_vars (self):
- for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
- exec "self.pkg.%s.clear();" % (i)
+ """ Reset a number of entries from our Pkg object. """
+ self.pkg.changes.clear()
+ self.pkg.dsc.clear()
+ self.pkg.files.clear()
+ self.pkg.dsc_files.clear()
self.pkg.orig_tar_id = None
self.pkg.orig_tar_location = ""
self.pkg.orig_tar_gz = None
###########################################################################
def update_vars (self):
+ """
+ Update our Pkg object by reading a previously created cPickle .dak dumpfile.
+ """
dump_filename = self.pkg.changes_file[:-8]+".dak"
dump_file = utils.open_file(dump_filename)
p = cPickle.Unpickler(dump_file)
- for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
- exec "self.pkg.%s.update(p.load());" % (i)
- for i in [ "orig_tar_id", "orig_tar_location" ]:
- exec "self.pkg.%s = p.load();" % (i)
+
+ self.pkg.changes.update(p.load())
+ self.pkg.dsc.update(p.load())
+ self.pkg.files.update(p.load())
+ self.pkg.dsc_files.update(p.load())
+
+ self.pkg.orig_tar_id = p.load()
+ self.pkg.orig_tar_location = p.load()
+
dump_file.close()
###########################################################################
- # This could just dump the dictionaries as is, but I'd like to
- # avoid this so there's some idea of what process-accepted &
- # process-new use from process-unchecked
def dump_vars(self, dest_dir):
- for i in [ "changes", "dsc", "files", "dsc_files",
- "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
- exec "%s = self.pkg.%s;" % (i,i)
+ """
+ Dump our Pkg object into a cPickle file.
+
+ @type dest_dir: string
+ @param dest_dir: Path where the dumpfile should be stored
+
+ @note: This could just dump the dictionaries as is, but I'd like to avoid this so
+ there's some idea of what process-accepted & process-new use from
+ process-unchecked. (JT)
+
+ """
+
+ changes = self.pkg.changes
+ dsc = self.pkg.dsc
+ files = self.pkg.files
+ dsc_files = self.pkg.dsc_files
+ orig_tar_id = self.pkg.orig_tar_id
+ orig_tar_location = self.pkg.orig_tar_location
+
dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
dump_file = utils.open_file(dump_filename, 'w')
try:
- os.chmod(dump_filename, 0660)
+ os.chmod(dump_filename, 0664)
except OSError, e:
+ # chmod may fail when the dumpfile is not owned by the user
+ # invoking dak (like e.g. when NEW is processed by a member
+ # of ftpteam)
if errno.errorcode[e.errno] == 'EPERM':
perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
- if perms & stat.S_IROTH:
- utils.fubar("%s is world readable and chmod failed." % (dump_filename))
+ # security precaution, should never happen unless a weird
+ # umask is set anywhere
+ if perms & stat.S_IWOTH:
+ utils.fubar("%s is world writable and chmod failed." % \
+ (dump_filename,))
+ # ignore the failed chmod otherwise as the file should
+ # already have the right privileges and is just, at worst,
+ # unreadable for world
else:
raise
p = cPickle.Pickler(dump_file, 1)
- for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
- exec "%s = {}" % i
+ d_changes = {}
+ d_dsc = {}
+ d_files = {}
+ d_dsc_files = {}
+
## files
- for file in files.keys():
- d_files[file] = {}
+ for file_entry in files.keys():
+ d_files[file_entry] = {}
for i in [ "package", "version", "architecture", "type", "size",
- "md5sum", "component", "location id", "source package",
- "source version", "maintainer", "dbtype", "files id",
- "new", "section", "priority", "othercomponents",
+ "md5sum", "sha1sum", "sha256sum", "component",
+ "location id", "source package", "source version",
+ "maintainer", "dbtype", "files id", "new",
+ "section", "priority", "othercomponents",
"pool name", "original component" ]:
- if files[file].has_key(i):
- d_files[file][i] = files[file][i]
+ if files[file_entry].has_key(i):
+ d_files[file_entry][i] = files[file_entry][i]
## changes
# Mandatory changes fields
for i in [ "distribution", "source", "architecture", "version",
if dsc.has_key(i):
d_dsc[i] = dsc[i]
## dsc_files
- for file in dsc_files.keys():
- d_dsc_files[file] = {}
+ for file_entry in dsc_files.keys():
+ d_dsc_files[file_entry] = {}
# Mandatory dsc_files fields
for i in [ "size", "md5sum" ]:
- d_dsc_files[file][i] = dsc_files[file][i]
+ d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
# Optional dsc_files fields
for i in [ "files id" ]:
- if dsc_files[file].has_key(i):
- d_dsc_files[file][i] = dsc_files[file][i]
+ if dsc_files[file_entry].has_key(i):
+ d_dsc_files[file_entry][i] = dsc_files[file_entry][i]
for i in [ d_changes, d_dsc, d_files, d_dsc_files,
- legacy_source_untouchable, orig_tar_id, orig_tar_location ]:
+ orig_tar_id, orig_tar_location ]:
p.dump(i)
dump_file.close()
# Set up the per-package template substitution mappings
def update_subst (self, reject_message = ""):
+ """ Set up the per-package template substitution mappings """
+
Subst = self.Subst
changes = self.pkg.changes
# If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
###########################################################################
def build_summaries(self):
+ """ Build a summary of changes the upload introduces. """
changes = self.pkg.changes
files = self.pkg.files
if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
changes["distribution"] = {}
- override_summary ="";
+ override_summary =""
file_keys = files.keys()
file_keys.sort()
- for file in file_keys:
- if files[file].has_key("byhand"):
+ for file_entry in file_keys:
+ if files[file_entry].has_key("byhand"):
byhand = 1
- summary += file + " byhand\n"
- elif files[file].has_key("new"):
+ summary += file_entry + " byhand\n"
+ elif files[file_entry].has_key("new"):
new = 1
- summary += "(new) %s %s %s\n" % (file, files[file]["priority"], files[file]["section"])
- if files[file].has_key("othercomponents"):
- summary += "WARNING: Already present in %s distribution.\n" % (files[file]["othercomponents"])
- if files[file]["type"] == "deb":
- deb_fh = utils.open_file(file)
+ summary += "(new) %s %s %s\n" % (file_entry, files[file_entry]["priority"], files[file_entry]["section"])
+ if files[file_entry].has_key("othercomponents"):
+ summary += "WARNING: Already present in %s distribution.\n" % (files[file_entry]["othercomponents"])
+ if files[file_entry]["type"] == "deb":
+ deb_fh = utils.open_file(file_entry)
summary += apt_pkg.ParseSection(apt_inst.debExtractControl(deb_fh))["Description"] + '\n'
deb_fh.close()
else:
- files[file]["pool name"] = utils.poolify (changes.get("source",""), files[file]["component"])
- destination = self.Cnf["Dir::PoolRoot"] + files[file]["pool name"] + file
- summary += file + "\n to " + destination + "\n"
- if not files[file].has_key("type"):
- files[file]["type"] = "unknown"
- if files[file]["type"] in ["deb", "udeb", "dsc"]:
+ files[file_entry]["pool name"] = utils.poolify (changes.get("source",""), files[file_entry]["component"])
+ destination = self.Cnf["Dir::PoolRoot"] + files[file_entry]["pool name"] + file_entry
+ summary += file_entry + "\n to " + destination + "\n"
+ if not files[file_entry].has_key("type"):
+ files[file_entry]["type"] = "unknown"
+ if files[file_entry]["type"] in ["deb", "udeb", "dsc"]:
# (queue/unchecked), there we have override entries already, use them
# (process-new), there we dont have override entries, use the newly generated ones.
- override_prio = files[file].get("override priority", files[file]["priority"])
- override_sect = files[file].get("override section", files[file]["section"])
- override_summary += "%s - %s %s\n" % (file, override_prio, override_sect)
+ override_prio = files[file_entry].get("override priority", files[file_entry]["priority"])
+ override_sect = files[file_entry].get("override section", files[file_entry]["section"])
+ override_summary += "%s - %s %s\n" % (file_entry, override_prio, override_sect)
short_summary = summary
###########################################################################
def close_bugs (self, summary, action):
+ """
+ Send mail to close bugs as instructed by the closes field in the changes file.
+ Also add a line to summary if any work was done.
+
+ @type summary: string
+ @param summary: summary text, as given by L{build_summaries}
+
+ @type action: bool
+ @param action: Set to false no real action will be done.
+
+ @rtype: string
+ @return: summary. If action was taken, extended by the list of closed bugs.
+
+ """
changes = self.pkg.changes
Subst = self.Subst
Cnf = self.Cnf
###########################################################################
def announce (self, short_summary, action):
+ """
+ Send an announce mail about a new upload.
+
+ @type short_summary: string
+ @param short_summary: Short summary text to include in the mail
+
+ @type action: bool
+ @param action: Set to false no real action will be done.
+
+ @rtype: string
+ @return: Textstring about action taken.
+
+ """
Subst = self.Subst
Cnf = self.Cnf
changes = self.pkg.changes
Subst["__SHORT_SUMMARY__"] = short_summary
for dist in changes["distribution"].keys():
- list = Cnf.Find("Suite::%s::Announce" % (dist))
- if list == "" or lists_done.has_key(list):
+ announce_list = Cnf.Find("Suite::%s::Announce" % (dist))
+ if announce_list == "" or lists_done.has_key(announce_list):
continue
- lists_done[list] = 1
- summary += "Announcing to %s\n" % (list)
+ lists_done[announce_list] = 1
+ summary += "Announcing to %s\n" % (announce_list)
if action:
- Subst["__ANNOUNCE_LIST_ADDRESS__"] = list
+ Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
if Cnf.get("Dinstall::TrackingServer") and changes["architecture"].has_key("source"):
Subst["__ANNOUNCE_LIST_ADDRESS__"] = Subst["__ANNOUNCE_LIST_ADDRESS__"] + "\nBcc: %s@%s" % (changes["source"], Cnf["Dinstall::TrackingServer"])
mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-unchecked.announce")
###########################################################################
def accept (self, summary, short_summary):
+ """
+ Accept an upload.
+
+ This moves all files referenced from the .changes into the I{accepted}
+ queue, sends the accepted mail, announces to lists, closes bugs and
+ also checks for override disparities. If enabled it will write out
+ the version history for the BTS Version Tracking and will finally call
+ L{queue_build}.
+
+ @type summary: string
+ @param summary: Summary text
+
+ @type short_summary: string
+ @param short_summary: Short summary
+
+ """
+
Cnf = self.Cnf
Subst = self.Subst
files = self.pkg.files
# Move all the files into the accepted directory
utils.move(changes_file, Cnf["Dir::Queue::Accepted"])
file_keys = files.keys()
- for file in file_keys:
- utils.move(file, Cnf["Dir::Queue::Accepted"])
- self.accept_bytes += float(files[file]["size"])
+ for file_entry in file_keys:
+ utils.move(file_entry, Cnf["Dir::Queue::Accepted"])
+ self.accept_bytes += float(files[file_entry]["size"])
self.accept_count += 1
# Send accept mail, announce to lists, close bugs and check for
if changes["architecture"].has_key("source") and \
dsc.has_key("bts changelog"):
- temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
- dotprefix=1, perms=0644)
- version_history = utils.open_file(temp_filename, 'w')
+ (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
+ version_history = os.fdopen(fd, 'w')
version_history.write(dsc["bts changelog"])
version_history.close()
filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
changes_file[:-8]+".versions")
os.rename(temp_filename, filename)
+ os.chmod(filename, 0644)
# Write out the binary -> source mapping.
- temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
- dotprefix=1, perms=0644)
- debinfo = utils.open_file(temp_filename, 'w')
- for file in file_keys:
- f = files[file]
+ (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
+ debinfo = os.fdopen(fd, 'w')
+ for file_entry in file_keys:
+ f = files[file_entry]
if f["type"] == "deb":
line = " ".join([f["package"], f["version"],
f["architecture"], f["source package"],
filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
changes_file[:-8]+".debinfo")
os.rename(temp_filename, filename)
+ os.chmod(filename, 0644)
self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
###########################################################################
def queue_build (self, queue, path):
+ """
+ Prepare queue_build database table used for incoming autobuild support.
+
+ @type queue: string
+ @param queue: queue name
+
+ @type path: string
+ @param path: path for the queue file entries/link destinations
+ """
+
Cnf = self.Cnf
Subst = self.Subst
files = self.pkg.files
dest_dir = Cnf["Dir::QueueBuild"]
if Cnf.FindB("Dinstall::SecurityQueueBuild"):
dest_dir = os.path.join(dest_dir, suite)
- for file in file_keys:
- src = os.path.join(path, file)
- dest = os.path.join(dest_dir, file)
+ for file_entry in file_keys:
+ src = os.path.join(path, file_entry)
+ dest = os.path.join(dest_dir, file_entry)
if Cnf.FindB("Dinstall::SecurityQueueBuild"):
# Copy it since the original won't be readable by www-data
utils.copy(src, dest)
###########################################################################
def check_override (self):
+ """
+ Checks override entries for validity. Mails "Override disparity" warnings,
+ if that feature is enabled.
+
+ Abandons the check if
+ - this is a non-sourceful upload
+ - override disparity checks are disabled
+ - mail sending is disabled
+
+ """
Subst = self.Subst
changes = self.pkg.changes
files = self.pkg.files
summary = ""
file_keys = files.keys()
file_keys.sort()
- for file in file_keys:
- if not files[file].has_key("new") and files[file]["type"] == "deb":
- section = files[file]["section"]
- override_section = files[file]["override section"]
+ for file_entry in file_keys:
+ if not files[file_entry].has_key("new") and files[file_entry]["type"] == "deb":
+ section = files[file_entry]["section"]
+ override_section = files[file_entry]["override section"]
if section.lower() != override_section.lower() and section != "-":
- summary += "%s: package says section is %s, override says %s.\n" % (file, section, override_section)
- priority = files[file]["priority"]
- override_priority = files[file]["override priority"]
+ summary += "%s: package says section is %s, override says %s.\n" % (file_entry, section, override_section)
+ priority = files[file_entry]["priority"]
+ override_priority = files[file_entry]["override priority"]
if priority != override_priority and priority != "-":
- summary += "%s: package says priority is %s, override says %s.\n" % (file, priority, override_priority)
+ summary += "%s: package says priority is %s, override says %s.\n" % (file_entry, priority, override_priority)
if summary == "":
return
###########################################################################
def force_reject (self, files):
- """Forcefully move files from the current directory to the
- reject directory. If any file already exists in the reject
- directory it will be moved to the morgue to make way for
- the new file."""
+ """
+ Forcefully move files from the current directory to the
+ reject directory. If any file already exists in the reject
+ directory it will be moved to the morgue to make way for
+ the new file.
+
+ @type files: dict
+ @param files: file dictionary
+
+ """
Cnf = self.Cnf
- for file in files:
+ for file_entry in files:
# Skip any files which don't exist or which we don't have permission to copy.
- if os.access(file,os.R_OK) == 0:
+ if os.access(file_entry,os.R_OK) == 0:
continue
- dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file)
+ dest_file = os.path.join(Cnf["Dir::Queue::Reject"], file_entry)
try:
dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
except OSError, e:
# File exists? Let's try and move it to the morgue
if errno.errorcode[e.errno] == 'EEXIST':
- morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file)
+ morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
try:
morgue_file = utils.find_next_free(morgue_file)
- except utils.tried_too_hard_exc:
+ except NoFreeFilenameError:
# Something's either gone badly Pete Tong, or
# someone is trying to exploit us.
- utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file))
+ utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
return
utils.move(dest_file, morgue_file, perms=0660)
try:
dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
except OSError, e:
# Likewise
- utils.warn("**WARNING** failed to claim %s in the reject directory." % (file))
+ utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
return
else:
raise
# If we got here, we own the destination file, so we can
# safely overwrite it.
- utils.move(file, dest_file, 1, perms=0660)
+ utils.move(file_entry, dest_file, 1, perms=0660)
os.close(dest_fd)
###########################################################################
- def do_reject (self, manual = 0, reject_message = ""):
+ def do_reject (self, manual = 0, reject_message = "", note = ""):
+ """
+ Reject an upload. If called without a reject message or C{manual} is
+ true, spawn an editor so the user can write one.
+
+ @type manual: bool
+ @param manual: manual or automated rejection
+
+ @type reject_message: string
+ @param reject_message: A reject message
+
+ @return: 0
+
+ """
# If we weren't given a manual rejection message, spawn an
# editor so the user can add one in...
if manual and not reject_message:
- temp_filename = utils.temp_filename()
+ (fd, temp_filename) = utils.temp_filename()
+ temp_file = os.fdopen(fd, 'w')
+ if len(note) > 0:
+ for line in note:
+ temp_file.write(line)
+ temp_file.close()
editor = os.environ.get("EDITOR","vi")
answer = 'E'
while answer == 'E':
################################################################################
- # Ensure that source exists somewhere in the archive for the binary
- # upload being processed.
- #
- # (1) exact match => 1.0-3
- # (2) Bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
-
def source_exists (self, package, source_version, suites = ["any"]):
- okay = 1
- for suite in suites:
- if suite == "any":
- que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
- (package)
- else:
- # source must exist in suite X, or in some other suite that's
- # mapped to X, recursively... silent-maps are counted too,
- # unreleased-maps aren't.
- maps = self.Cnf.ValueList("SuiteMappings")[:]
- maps.reverse()
- maps = [ m.split() for m in maps ]
- maps = [ (x[1], x[2]) for x in maps
- if x[0] == "map" or x[0] == "silent-map" ]
- s = [suite]
- for x in maps:
- if x[1] in s and x[0] not in s:
- s.append(x[0])
-
- que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
+ """
+ Ensure that source exists somewhere in the archive for the binary
+ upload being processed.
+ 1. exact match => 1.0-3
+ 2. bin-only NMU => 1.0-3+b1 , 1.0-3.1+b1
+
+ @type package: string
+ @param package: package source name
+
+ @type source_version: string
+ @param source_version: expected source version
+
+ @type suites: list
+ @param suites: list of suites to check in, default I{any}
+
+ @rtype: int
+ @return: returns 1 if a source with expected version is found, otherwise 0
+
+ """
+ okay = 1
+ for suite in suites:
+ if suite == "any":
+ que = "SELECT s.version FROM source s WHERE s.source = '%s'" % \
+ (package)
+ else:
+ # source must exist in suite X, or in some other suite that's
+ # mapped to X, recursively... silent-maps are counted too,
+ # unreleased-maps aren't.
+ maps = self.Cnf.ValueList("SuiteMappings")[:]
+ maps.reverse()
+ maps = [ m.split() for m in maps ]
+ maps = [ (x[1], x[2]) for x in maps
+ if x[0] == "map" or x[0] == "silent-map" ]
+ s = [suite]
+ for x in maps:
+ if x[1] in s and x[0] not in s:
+ s.append(x[0])
+
+ que = "SELECT s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) JOIN suite su ON (sa.suite = su.id) WHERE s.source = '%s' AND (%s)" % (package, " OR ".join(["su.suite_name = '%s'" % a for a in s]))
q = self.projectB.query(que)
# Reduce the query results to a list of version numbers
# No source found...
okay = 0
- break
- return okay
+ break
+ return okay
################################################################################
-
+
def in_override_p (self, package, component, suite, binary_type, file):
+ """
+ Check if a package already has override entries in the DB
+
+ @type package: string
+ @param package: package name
+
+ @type component: string
+ @param component: database id of the component, as returned by L{database.get_component_id}
+
+ @type suite: int
+ @param suite: database id of the suite, as returned by L{database.get_suite_id}
+
+ @type binary_type: string
+ @param binary_type: type of the package
+
+ @type file: string
+ @param file: filename we check
+
+ @return: the database result. But noone cares anyway.
+
+ """
files = self.pkg.files
if binary_type == "": # must be source
- type = "dsc"
+ file_type = "dsc"
else:
- type = binary_type
+ file_type = binary_type
# Override suite name; used for example with proposed-updates
if self.Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
if suite_id == -1:
return None
component_id = database.get_component_id(component)
- type_id = database.get_override_type_id(type)
+ type_id = database.get_override_type_id(file_type)
q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
% (package, suite_id, component_id, type_id))
result = q.getresult()
# If checking for a source package fall back on the binary override type
- if type == "dsc" and not result:
+ if file_type == "dsc" and not result:
deb_type_id = database.get_override_type_id("deb")
udeb_type_id = database.get_override_type_id("udeb")
q = self.projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND (type = %s OR type = %s) AND o.section = s.id AND o.priority = p.id"
################################################################################
def reject (self, str, prefix="Rejected: "):
+ """
+ Add C{str} to reject_message. Adds C{prefix}, by default "Rejected: "
+
+ @type str: string
+ @param str: Reject text
+
+ @type prefix: string
+ @param prefix: Prefix text, default Rejected:
+
+ """
if str:
# Unlike other rejects we add new lines first to avoid trailing
# new lines when this message is passed back up to a caller.
################################################################################
def get_anyversion(self, query_result, suite):
+ """ """
anyversion=None
anysuite = [suite] + self.Cnf.ValueList("Suite::%s::VersionChecks::Enhances" % (suite))
for (v, s) in query_result:
################################################################################
- def cross_suite_version_check(self, query_result, file, new_version):
- """Ensure versions are newer than existing packages in target
+ def cross_suite_version_check(self, query_result, file, new_version,
+ sourceful=False):
+ """
+ Ensure versions are newer than existing packages in target
suites and that cross-suite version checking rules as
- set out in the conf file are satisfied."""
+ set out in the conf file are satisfied.
+
+ """
# Check versions for each target suite
for target_suite in self.pkg.changes["distribution"].keys():
- must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
- must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
+ must_be_newer_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
+ must_be_older_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
# Enforce "must be newer than target suite" even if conffile omits it
if target_suite not in must_be_newer_than:
must_be_newer_than.append(target_suite)
for entry in query_result:
existent_version = entry[0]
suite = entry[1]
- if suite in must_be_newer_than and \
+ if suite in must_be_newer_than and sourceful and \
apt_pkg.VersionCompare(new_version, existent_version) < 1:
self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
if suite in must_be_older_than and \
ch = self.pkg.changes
cansave = 0
if ch.get('distribution-version', {}).has_key(suite):
- # we really use the other suite, ignoring the conflicting one ...
+ # we really use the other suite, ignoring the conflicting one ...
addsuite = ch["distribution-version"][suite]
-
+
add_version = self.get_anyversion(query_result, addsuite)
target_version = self.get_anyversion(query_result, target_suite)
-
+
if not add_version:
# not add_version can only happen if we map to a suite
# that doesn't enhance the suite we're propup'ing from.
self.pkg.changes.setdefault("propdistribution", {})
self.pkg.changes["propdistribution"][addsuite] = 1
cansave = 1
-
+
if not cansave:
self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
################################################################################
def check_binary_against_db(self, file):
+ """
+
+ """
self.reject_message = ""
files = self.pkg.files
AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
% (files[file]["package"],
files[file]["architecture"]))
- self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
+ self.cross_suite_version_check(q.getresult(), file,
+ files[file]["version"], sourceful=False)
# Check for any existing copies of the file
q = self.projectB.query("""
################################################################################
def check_source_against_db(self, file):
+ """
+ """
self.reject_message = ""
dsc = self.pkg.dsc
q = self.projectB.query("""
SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
- self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
+ self.cross_suite_version_check(q.getresult(), file, dsc.get("version"),
+ sourceful=True)
return self.reject_message
################################################################################
- # **WARNING**
- # NB: this function can remove entries from the 'files' index [if
- # the .orig.tar.gz is a duplicate of the one in the archive]; if
- # you're iterating over 'files' and call this function as part of
- # the loop, be sure to add a check to the top of the loop to
- # ensure you haven't just tried to dereference the deleted entry.
- # **WARNING**
def check_dsc_against_db(self, file):
+ """
+
+ @warning: NB: this function can remove entries from the 'files' index [if
+ the .orig.tar.gz is a duplicate of the one in the archive]; if
+ you're iterating over 'files' and call this function as part of
+ the loop, be sure to add a check to the top of the loop to
+ ensure you haven't just tried to dereference the deleted entry.
+
+ """
self.reject_message = ""
files = self.pkg.files
dsc_files = self.pkg.dsc_files
- legacy_source_untouchable = self.pkg.legacy_source_untouchable
self.pkg.orig_tar_gz = None
# Try and find all files mentioned in the .dsc. This has
# for example, the package was in potato but had an -sa
# upload in woody. So we need to choose the right one.
- x = ql[0]; # default to something sane in case we don't match any or have only one
+ # default to something sane in case we don't match any or have only one
+ x = ql[0]
if len(ql) > 1:
for i in ql:
actual_size = os.stat(old_file)[stat.ST_SIZE]
if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
x = i
- else:
- legacy_source_untouchable[i[3]] = ""
old_file = x[0] + x[1]
old_file_fh = utils.open_file(old_file)
actual_size = os.stat(old_file)[stat.ST_SIZE]
found = old_file
suite_type = x[2]
- dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
+ # need this for updating dsc_files in install()
+ dsc_files[dsc_file]["files id"] = x[3]
# See install() in process-accepted...
self.pkg.orig_tar_id = x[3]
self.pkg.orig_tar_gz = old_file
- if suite_type == "legacy" or suite_type == "legacy-mixed":
- self.pkg.orig_tar_location = "legacy"
- else:
- self.pkg.orig_tar_location = x[4]
+ self.pkg.orig_tar_location = x[4]
else:
# Not there? Check the queue directories...
in_unchecked = os.path.join(self.Cnf["Dir::Queue::Unchecked"],dsc_file)
# See process_it() in 'dak process-unchecked' for explanation of this
- # in_unchecked check dropped by ajt 2007-08-28, how did that
- # ever make sense?
+ # in_unchecked check dropped by ajt 2007-08-28, how did that
+ # ever make sense?
if os.path.exists(in_unchecked) and False:
return (self.reject_message, in_unchecked)
else:
- for dir in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
- in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (dir)],dsc_file)
+ for directory in [ "Accepted", "New", "Byhand", "ProposedUpdates", "OldProposedUpdates" ]:
+ in_otherdir = os.path.join(self.Cnf["Dir::Queue::%s" % (directory)],dsc_file)
if os.path.exists(in_otherdir):
in_otherdir_fh = utils.open_file(in_otherdir)
actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
return (self.reject_message, None)
- def do_query(self, q):
- sys.stderr.write("query: \"%s\" ... " % (q))
+ def do_query(self, query):
+ """
+ Executes a database query. Writes statistics / timing to stderr.
+
+ @type query: string
+ @param query: database query string, passed unmodified
+
+ @return: db result
+
+ @warning: The query is passed B{unmodified}, so be careful what you use this for.
+ """
+ sys.stderr.write("query: \"%s\" ... " % (query))
before = time.time()
- r = self.projectB.query(q)
+ r = self.projectB.query(query)
time_diff = time.time()-before
sys.stderr.write("took %.3f seconds.\n" % (time_diff))
return r