#!/usr/bin/env python
+# vim:set et sw=4:
# Queue utility functions for dak
# Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
import cPickle, errno, os, pg, re, stat, sys, time
import apt_inst, apt_pkg
import utils, database
+from dak_exceptions import *
+from regexes import re_default_answer, re_fdnic, re_bin_only_nmu
from types import *
###############################################################################
-re_isanum = re.compile (r"^\d+$")
-re_default_answer = re.compile(r"\[(.*)\]")
-re_fdnic = re.compile(r"\n\n")
-re_bin_only_nmu = re.compile(r"\+b\d+$")
-
-################################################################################
-
# Determine what parts in a .changes are NEW
def determine_new(changes, files, projectB, warn=1):
new[pkg]["priority id"] = database.get_priority_id(new[pkg]["priority"])
# Sanity checks
di = section.find("debian-installer") != -1
- if (di and file_type != "udeb") or (not di and file_type == "udeb"):
+ if (di and file_type not in ("udeb", "dsc")) or (not di and file_type == "udeb"):
new[pkg]["section id"] = -1
if (priority == "source" and file_type != "dsc") or \
(priority != "source" and file_type == "dsc"):
self.Cnf = Cnf
self.accept_count = 0
self.accept_bytes = 0L
+ self.reject_message = ""
self.pkg = Pkg(changes = {}, dsc = {}, dsc_files = {}, files = {},
legacy_source_untouchable = {})
###########################################################################
def init_vars (self):
- for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
- exec "self.pkg.%s.clear();" % (i)
+ self.pkg.changes.clear()
+ self.pkg.dsc.clear()
+ self.pkg.files.clear()
+ self.pkg.dsc_files.clear()
+ self.pkg.legacy_source_untouchable.clear()
self.pkg.orig_tar_id = None
self.pkg.orig_tar_location = ""
self.pkg.orig_tar_gz = None
dump_filename = self.pkg.changes_file[:-8]+".dak"
dump_file = utils.open_file(dump_filename)
p = cPickle.Unpickler(dump_file)
- for i in [ "changes", "dsc", "files", "dsc_files", "legacy_source_untouchable" ]:
- exec "self.pkg.%s.update(p.load());" % (i)
- for i in [ "orig_tar_id", "orig_tar_location" ]:
- exec "self.pkg.%s = p.load();" % (i)
+
+ self.pkg.changes.update(p.load())
+ self.pkg.dsc.update(p.load())
+ self.pkg.files.update(p.load())
+ self.pkg.dsc_files.update(p.load())
+ self.pkg.legacy_source_untouchable.update(p.load())
+
+ self.pkg.orig_tar_id = p.load()
+ self.pkg.orig_tar_location = p.load()
+
dump_file.close()
###########################################################################
# process-new use from process-unchecked
def dump_vars(self, dest_dir):
- for i in [ "changes", "dsc", "files", "dsc_files",
- "legacy_source_untouchable", "orig_tar_id", "orig_tar_location" ]:
- exec "%s = self.pkg.%s;" % (i,i)
+
+ changes = self.pkg.changes
+ dsc = self.pkg.dsc
+ files = self.pkg.files
+ dsc_files = self.pkg.dsc_files
+ legacy_source_untouchable = self.pkg.legacy_source_untouchable
+ orig_tar_id = self.pkg.orig_tar_id
+ orig_tar_location = self.pkg.orig_tar_location
+
dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".dak")
dump_file = utils.open_file(dump_filename, 'w')
try:
- os.chmod(dump_filename, 0660)
+ os.chmod(dump_filename, 0664)
except OSError, e:
+ # chmod may fail when the dumpfile is not owned by the user
+ # invoking dak (like e.g. when NEW is processed by a member
+ # of ftpteam)
if errno.errorcode[e.errno] == 'EPERM':
perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE])
- if perms & stat.S_IROTH:
- utils.fubar("%s is world readable and chmod failed." % (dump_filename))
+ # security precaution, should never happen unless a weird
+ # umask is set anywhere
+ if perms & stat.S_IWOTH:
+ utils.fubar("%s is world writable and chmod failed." % \
+ (dump_filename,))
+ # ignore the failed chmod otherwise as the file should
+ # already have the right privileges and is just, at worst,
+ # unreadable for world
else:
raise
p = cPickle.Pickler(dump_file, 1)
- for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
- exec "%s = {}" % i
+ d_changes = {}
+ d_dsc = {}
+ d_files = {}
+ d_dsc_files = {}
+
## files
for file_entry in files.keys():
d_files[file_entry] = {}
for i in [ "package", "version", "architecture", "type", "size",
- "md5sum", "component", "location id", "source package",
- "source version", "maintainer", "dbtype", "files id",
- "new", "section", "priority", "othercomponents",
+ "md5sum", "sha1sum", "sha256sum", "component",
+ "location id", "source package", "source version",
+ "maintainer", "dbtype", "files id", "new",
+ "section", "priority", "othercomponents",
"pool name", "original component" ]:
if files[file_entry].has_key(i):
d_files[file_entry][i] = files[file_entry][i]
if not changes.has_key("distribution") or not isinstance(changes["distribution"], DictType):
changes["distribution"] = {}
- override_summary ="";
+ override_summary =""
file_keys = files.keys()
file_keys.sort()
for file_entry in file_keys:
if changes["architecture"].has_key("source") and \
dsc.has_key("bts changelog"):
- temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
- dotprefix=1, perms=0644)
- version_history = utils.open_file(temp_filename, 'w')
+ (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
+ version_history = os.fdopen(fd, 'w')
version_history.write(dsc["bts changelog"])
version_history.close()
filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
changes_file[:-8]+".versions")
os.rename(temp_filename, filename)
+ os.chmod(filename, 0644)
# Write out the binary -> source mapping.
- temp_filename = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"],
- dotprefix=1, perms=0644)
- debinfo = utils.open_file(temp_filename, 'w')
+ (fd, temp_filename) = utils.temp_filename(Cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
+ debinfo = os.fdopen(fd, 'w')
for file_entry in file_keys:
f = files[file_entry]
if f["type"] == "deb":
filename = "%s/%s" % (Cnf["Dir::Queue::BTSVersionTrack"],
changes_file[:-8]+".debinfo")
os.rename(temp_filename, filename)
+ os.chmod(filename, 0644)
self.queue_build("accepted", Cnf["Dir::Queue::Accepted"])
morgue_file = os.path.join(Cnf["Dir::Morgue"],Cnf["Dir::MorgueReject"],file_entry)
try:
morgue_file = utils.find_next_free(morgue_file)
- except utils.tried_too_hard_exc:
+ except NoFreeFilenameError:
# Something's either gone badly Pete Tong, or
# someone is trying to exploit us.
utils.warn("**WARNING** failed to move %s from the reject directory to the morgue." % (file_entry))
# If we weren't given a manual rejection message, spawn an
# editor so the user can add one in...
if manual and not reject_message:
- temp_filename = utils.temp_filename()
+ (fd, temp_filename) = utils.temp_filename()
editor = os.environ.get("EDITOR","vi")
answer = 'E'
while answer == 'E':
################################################################################
- def cross_suite_version_check(self, query_result, file, new_version):
+ def cross_suite_version_check(self, query_result, file, new_version,
+ sourceful=False):
"""Ensure versions are newer than existing packages in target
suites and that cross-suite version checking rules as
set out in the conf file are satisfied."""
# Check versions for each target suite
for target_suite in self.pkg.changes["distribution"].keys():
- must_be_newer_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
- must_be_older_than = [ i.lower for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
+ must_be_newer_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)) ]
+ must_be_older_than = [ i.lower() for i in self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)) ]
# Enforce "must be newer than target suite" even if conffile omits it
if target_suite not in must_be_newer_than:
must_be_newer_than.append(target_suite)
for entry in query_result:
existent_version = entry[0]
suite = entry[1]
- if suite in must_be_newer_than and \
+ if suite in must_be_newer_than and sourceful and \
apt_pkg.VersionCompare(new_version, existent_version) < 1:
self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
if suite in must_be_older_than and \
AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
% (files[file]["package"],
files[file]["architecture"]))
- self.cross_suite_version_check(q.getresult(), file, files[file]["version"])
+ self.cross_suite_version_check(q.getresult(), file,
+ files[file]["version"], sourceful=False)
# Check for any existing copies of the file
q = self.projectB.query("""
q = self.projectB.query("""
SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")))
- self.cross_suite_version_check(q.getresult(), file, dsc.get("version"))
+ self.cross_suite_version_check(q.getresult(), file, dsc.get("version"),
+ sourceful=True)
return self.reject_message
# for example, the package was in potato but had an -sa
# upload in woody. So we need to choose the right one.
- x = ql[0]; # default to something sane in case we don't match any or have only one
+ # default to something sane in case we don't match any or have only one
+ x = ql[0]
if len(ql) > 1:
for i in ql:
actual_size = os.stat(old_file)[stat.ST_SIZE]
found = old_file
suite_type = x[2]
- dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
+ # need this for updating dsc_files in install()
+ dsc_files[dsc_file]["files id"] = x[3]
# See install() in process-accepted...
self.pkg.orig_tar_id = x[3]
self.pkg.orig_tar_gz = old_file