# Utility functions for katie
# Copyright (C) 2001, 2002 James Troup <james@nocrew.org>
-# $Id: katie.py,v 1.18 2002-05-08 11:52:31 troup Exp $
+# $Id: katie.py,v 1.24 2002-06-08 00:19:55 troup Exp $
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
exec "%s = self.pkg.%s;" % (i,i);
dump_filename = os.path.join(dest_dir,self.pkg.changes_file[:-8] + ".katie");
dump_file = utils.open_file(dump_filename, 'w');
+ try:
+ os.chmod(dump_filename, 0660);
+ except OSError, e:
+ if errno.errorcode[e.errno] == 'EPERM':
+ perms = stat.S_IMODE(os.stat(dump_filename)[stat.ST_MODE]);
+ if perms & stat.S_IROTH:
+ utils.fubar("%s is world readable and chmod failed." % (dump_filename));
+ else:
+ raise;
+
p = cPickle.Pickler(dump_file, 1);
for i in [ "d_changes", "d_dsc", "d_files", "d_dsc_files" ]:
exec "%s = {}" % i;
for i in [ "package", "version", "architecture", "type", "size",
"md5sum", "component", "location id", "source package",
"source version", "maintainer", "dbtype", "files id",
- "new", "section", "priority", "oldfiles", "othercomponents",
- "pool name" ]:
+ "new", "section", "priority", "othercomponents",
+ "pool name", "original component" ]:
if files[file].has_key(i):
d_files[file][i] = files[file][i];
## changes
d_changes[i] = changes[i];
# Optional changes fields
# FIXME: changes should be mandatory
- for i in [ "changed-by", "maintainer822", "filecontents", "format", "changes" ]:
+ for i in [ "changed-by", "maintainer822", "filecontents", "format",
+ "changes", "lisa note" ]:
if changes.has_key(i):
d_changes[i] = changes[i];
## dsc
Subst = self.Subst;
Cnf = self.Cnf;
changes = self.pkg.changes;
- dsc = self.pkg.dsc;
# Only do announcements for source uploads with a recent dpkg-dev installed
if float(changes.get("format", 0)) < 1.6 or not changes["architecture"].has_key("source"):
self.announce(short_summary, 1)
# Special support to enable clean auto-building of accepted packages
- if Cnf.FindB("Dinstall::SpecialAcceptedAutoBuild") and \
- self.pkg.changes["distribution"].has_key("unstable"):
- self.projectB.query("BEGIN WORK");
+ self.projectB.query("BEGIN WORK");
+ for suite in self.pkg.changes["distribution"].keys():
+ if suite not in Cnf.ValueList("Dinstall::AcceptedAutoBuildSuites"):
+ continue;
+ suite_id = db_access.get_suite_id(suite);
+ dest_dir = Cnf["Dir::AcceptedAutoBuild"];
+ if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
+ dest_dir = os.path.join(dest_dir, suite);
for file in file_keys:
src = os.path.join(Cnf["Dir::Queue::Accepted"], file);
- dest = os.path.join(Cnf["Dir::AcceptedAutoBuild"], file);
- # Create a symlink to it
- os.symlink(src, dest);
+ dest = os.path.join(dest_dir, file);
+ if Cnf.FindB("Dinstall::SecurityAcceptedAutoBuild"):
+ # Copy it since the original won't be readable by www-data
+ utils.copy(src, dest);
+ else:
+ # Create a symlink to it
+ os.symlink(src, dest);
# Add it to the list of packages for later processing by apt-ftparchive
- self.projectB.query("INSERT INTO unstable_accepted (filename, in_accepted) VALUES ('%s', 't')" % (dest));
+ self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
# If the .orig.tar.gz is in the pool, create a symlink to
# it (if one doesn't already exist)
if self.pkg.orig_tar_id:
for dsc_file in self.pkg.dsc_files.keys():
if dsc_file[-12:] == ".orig.tar.gz":
filename = dsc_file;
- dest = os.path.join(Cnf["Dir::AcceptedAutoBuild"],filename);
+ dest = os.path.join(dest_dir, filename);
# If it doesn't exist, create a symlink
if not os.path.exists(dest):
# Find the .orig.tar.gz in the pool
src = os.path.join(ql[0][0], ql[0][1]);
os.symlink(src, dest);
# Add it to the list of packages for later processing by apt-ftparchive
- self.projectB.query("INSERT INTO unstable_accepted (filename, in_accepted) VALUES ('%s', 't')" % (dest));
+ self.projectB.query("INSERT INTO accepted_autobuild (suite, filename, in_accepted) VALUES (%s, '%s', 't')" % (suite_id, dest));
+ # if it does, update things to ensure it's not removed prematurely
+ else:
+ self.projectB.query("UPDATE accepted_autobuild SET in_accepted = 't', last_used = NULL WHERE filename = '%s' AND suite = %s" % (dest, suite_id));
- self.projectB.query("COMMIT WORK");
+ self.projectB.query("COMMIT WORK");
###########################################################################
result = q.getresult();
# Remember the section and priority so we can check them later if appropriate
- if result != []:
+ if result:
files[file]["override section"] = result[0][0];
files[file]["override priority"] = result[0][1];
self.reject_message = self.reject_message + "\n";
self.reject_message = self.reject_message + prefix + str;
- def check_binaries_against_db(self, file, suite):
+ ################################################################################
+
+ def cross_suite_version_check(self, query_result, file, new_version):
+ """Ensure versions are newer than existing packages in target
+ suites and that cross-suite version checking rules as
+ set out in the conf file are satisfied."""
+
+ # Check versions for each target suite
+ for target_suite in self.pkg.changes["distribution"].keys():
+ must_be_newer_than = map(lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeNewerThan" % (target_suite)));
+ must_be_older_than = map(lower, self.Cnf.ValueList("Suite::%s::VersionChecks::MustBeOlderThan" % (target_suite)));
+ # Enforce "must be newer than target suite" even if conffile omits it
+ if target_suite not in must_be_newer_than:
+ must_be_newer_than.append(target_suite);
+ for entry in query_result:
+ existent_version = entry[0];
+ suite = entry[1];
+ if suite in must_be_newer_than and \
+ apt_pkg.VersionCompare(new_version, existent_version) != 1:
+ self.reject("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
+ if suite in must_be_older_than and \
+ apt_pkg.VersionCompare(new_version, existent_version) != -1:
+ self.reject("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite));
+
+ ################################################################################
+
+ def check_binary_against_db(self, file):
self.reject_message = "";
files = self.pkg.files;
- # Find any old binary packages
- q = self.projectB.query("SELECT b.id, b.version, f.filename, l.path, c.name FROM binaries b, bin_associations ba, suite s, location l, component c, architecture a, files f WHERE b.package = '%s' AND s.suite_name = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND ba.bin = b.id AND ba.suite = s.id AND b.architecture = a.id AND f.location = l.id AND l.component = c.id AND b.file = f.id"
- % (files[file]["package"], suite, files[file]["architecture"]))
- for oldfile in q.dictresult():
- files[file]["oldfiles"][suite] = oldfile;
- # Check versions [NB: per-suite only; no cross-suite checking done (yet)]
- if apt_pkg.VersionCompare(files[file]["version"], oldfile["version"]) != 1:
- self.reject("%s: old version (%s) >= new version (%s)." % (file, oldfile["version"], files[file]["version"]));
+ # Ensure version is sane
+ q = self.projectB.query("""
+SELECT b.version, su.suite_name FROM binaries b, bin_associations ba, suite su,
+ architecture a
+ WHERE b.package = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all')
+ AND ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id"""
+ % (files[file]["package"],
+ files[file]["architecture"]));
+ self.cross_suite_version_check(q.getresult(), file, files[file]["version"]);
+
# Check for any existing copies of the file
- q = self.projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s' AND a.id = b.architecture" % (files[file]["package"], files[file]["version"], files[file]["architecture"]))
- if q.getresult() != []:
+ q = self.projectB.query("""
+SELECT b.id FROM binaries b, architecture a
+ WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s'
+ AND a.id = b.architecture"""
+ % (files[file]["package"],
+ files[file]["version"],
+ files[file]["architecture"]))
+ if q.getresult():
self.reject("can not overwrite existing copy of '%s' already in the archive." % (file));
return self.reject_message;
################################################################################
def check_source_against_db(self, file):
- """Ensure source is newer than existing source in target suites."""
self.reject_message = "";
- changes = self.pkg.changes;
dsc = self.pkg.dsc;
- package = dsc.get("source");
- new_version = dsc.get("version");
- for suite in changes["distribution"].keys():
- q = self.projectB.query("SELECT s.version FROM source s, src_associations sa, suite su WHERE s.source = '%s' AND su.suite_name = '%s' AND sa.source = s.id AND sa.suite = su.id"
- % (package, suite));
- ql = map(lambda x: x[0], q.getresult());
- for old_version in ql:
- if apt_pkg.VersionCompare(new_version, old_version) != 1:
- self.reject("%s: Old version `%s' >= new version `%s'." % (file, old_version, new_version));
+ # Ensure version is sane
+ q = self.projectB.query("""
+SELECT s.version, su.suite_name FROM source s, src_associations sa, suite su
+ WHERE s.source = '%s' AND sa.source = s.id AND sa.suite = su.id""" % (dsc.get("source")));
+ self.cross_suite_version_check(q.getresult(), file, dsc.get("version"));
+
return self.reject_message;
################################################################################
ql = q.getresult();
if ql:
+ # Ignore exact matches for .orig.tar.gz
match = 0;
if dsc_file[-12:] == ".orig.tar.gz":
for i in ql:
- if int(files[dsc_file]["size"]) == int(i[0]) and \
+ if files.has_key(dsc_file) and \
+ int(files[dsc_file]["size"]) == int(i[0]) and \
files[dsc_file]["md5sum"] == i[1]:
self.reject("ignoring %s, since it's already in the archive." % (dsc_file), "Warning: ");
del files[dsc_file];
q = self.projectB.query("SELECT l.path, f.filename, l.type, f.id, l.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
ql = q.getresult();
- if ql != []:
+ if ql:
# Unfortunately, we make get more than one
# match here if, for example, the package was
# in potato but had a -sa upload in woody. So