#!/usr/bin/env python
# Installs Debian packaes
-# Copyright (C) 2000 James Troup <james@nocrew.org>
-# $Id: katie,v 1.8 2000-12-01 22:09:14 troup Exp $
+# Copyright (C) 2000, 2001 James Troup <james@nocrew.org>
+# $Id: katie,v 1.31 2001-03-14 05:12:53 troup Exp $
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
#########################################################################################
-import FCNTL, commands, fcntl, getopt, os, pg, pwd, re, shutil, stat, string, sys, tempfile, time
+import FCNTL, commands, fcntl, getopt, gzip, os, pg, pwd, re, shutil, stat, string, sys, tempfile, time
import apt_inst, apt_pkg
import utils, db_access
###############################################################################
re_isanum = re.compile (r'^\d+$');
-re_isadeb = re.compile (r'.*\.u?deb$');
-re_issource = re.compile (r'(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)');
-re_dpackage = re.compile (r'^package:\s*(.*)', re.IGNORECASE);
-re_darchitecture = re.compile (r'^architecture:\s*(.*)', re.IGNORECASE);
-re_dversion = re.compile (r'^version:\s*(.*)', re.IGNORECASE);
-re_dsection = re.compile (r'^section:\s*(.*)', re.IGNORECASE);
-re_dpriority = re.compile (r'^priority:\s*(.*)', re.IGNORECASE);
re_changes = re.compile (r'changes$');
-re_override_package = re.compile(r'(\S*)\s+.*');
re_default_answer = re.compile(r"\[(.*)\]");
re_fdnic = re.compile("\n\n");
+re_bad_diff = re.compile("^[\-\+][\-\+][\-\+] /dev/null");
###############################################################################
projectB = None;
new_ack_new = {};
new_ack_old = {};
-overrides = {};
install_count = 0;
install_bytes = 0.0;
reprocess = 0;
orig_tar_id = None;
+legacy_source_untouchable = {};
#########################################################################################
def usage (exit_code):
print """Usage: dinstall [OPTION]... [CHANGES]...
-a, --automatic automatic run
- -d, --debug=VALUE debug
- -k, --ack-new acknowledge new packages
+ -D, --debug=VALUE turn on debugging
+ -h, --help show this help and exit.
+ -k, --ack-new acknowledge new packages !! for cron.daily only !!
-m, --manual-reject=MSG manual reject with `msg'
- -n, --dry-run don't do anything
+ -n, --no-action don't do anything
-p, --no-lock don't check lockfile !! for cron.daily only !!
- -r, --no-version-check override version check
- -u, --distribution=DIST override distribution to `dist'"""
+ -u, --distribution=DIST override distribution to `dist'
+ -v, --version display the version number and exit"""
sys.exit(exit_code)
def check_signature (filename):
(result, output) = commands.getstatusoutput("gpg --emulate-md-encode-bug --batch --no-options --no-default-keyring --always-trust --keyring=%s --keyring=%s < %s >/dev/null" % (Cnf["Dinstall::PGPKeyring"], Cnf["Dinstall::GPGKeyring"], filename))
if (result != 0):
- reject_message = "Rejected: GPG signature check failed on `%s'.\n%s\n" % (filename, output)
+ reject_message = "Rejected: GPG signature check failed on `%s'.\n%s\n" % (os.path.basename(filename), output)
return 0
return 1
#####################################################################################################################
-def read_override_file (filename, suite, component):
- global overrides;
-
- file = utils.open_file(filename, 'r');
- for line in file.readlines():
- line = string.strip(utils.re_comments.sub('', line))
- override_package = re_override_package.sub(r'\1', line)
- if override_package != "":
- overrides[suite][component][override_package] = 1
- file.close()
-
+# See if a given package is in the override table
-# See if a given package is in the override file. Caches and only loads override files on demand.
+def in_override_p (package, component, suite, binary_type, file):
+ global files;
+
+ if binary_type == "": # must be source
+ type = "dsc";
+ else:
+ type = binary_type;
-def in_override_p (package, component, suite):
- global overrides;
+ # Override suite name; used for example with proposed-updates
+ if Cnf.Find("Suite::%s::OverrideSuite" % (suite)) != "":
+ suite = Cnf["Suite::%s::OverrideSuite" % (suite)];
# Avoid <undef> on unknown distributions
- if db_access.get_suite_id(suite) == -1:
+ suite_id = db_access.get_suite_id(suite);
+ if suite_id == -1:
return None;
+ component_id = db_access.get_component_id(component);
+ type_id = db_access.get_override_type_id(type);
# FIXME: nasty non-US speficic hack
if string.lower(component[:7]) == "non-us/":
component = component[7:];
- if not overrides.has_key(suite) or not overrides[suite].has_key(component):
- if not overrides.has_key(suite):
- overrides[suite] = {}
- if not overrides[suite].has_key(component):
- overrides[suite][component] = {}
- if Cnf.has_key("Suite::%s::SingleOverrideFile" % (suite)): # legacy mixed suite (i.e. experimental)
- override_filename = Cnf["Dir::OverrideDir"] + 'override.' + Cnf["Suite::%s::OverrideCodeName" % (suite)];
- read_override_file (override_filename, suite, component);
- else: # all others.
- for src in ("", ".src"):
- override_filename = Cnf["Dir::OverrideDir"] + 'override.' + Cnf["Suite::%s::OverrideCodeName" % (suite)] + '.' + component + src;
- read_override_file (override_filename, suite, component);
-
- return overrides[suite][component].get(package, None);
+
+ q = projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
+ % (package, suite_id, component_id, type_id));
+ result = q.getresult();
+ # If checking for a source package fall back on the binary override type
+ if type == "dsc" and not result:
+ type_id = db_access.get_override_type_id("deb");
+ q = projectB.query("SELECT s.section, p.priority FROM override o, section s, priority p WHERE package = '%s' AND suite = %s AND component = %s AND type = %s AND o.section = s.id AND o.priority = p.id"
+ % (package, suite_id, component_id, type_id));
+ result = q.getresult();
+
+ # Remember the section and priority so we can check them later if appropriate
+ if result != []:
+ files[file]["override section"] = result[0][0];
+ files[file]["override priority"] = result[0][1];
+
+ return result;
#####################################################################################################################
def check_changes(filename):
global reject_message, changes, files
- # Parse the .changes field into a dictionary [FIXME - need to trap errors, pass on to reject_message etc.]
+ # Default in case we bail out
+ changes["maintainer822"] = Cnf["Dinstall::MyEmailAddress"];
+
+ # Parse the .changes field into a dictionary
try:
- changes = utils.parse_changes(filename)
+ changes = utils.parse_changes(filename, 0)
except utils.cant_open_exc:
reject_message = "Rejected: can't read changes file '%s'.\n" % (filename)
return 0;
except utils.changes_parse_error_exc, line:
reject_message = "Rejected: error parsing changes file '%s', can't grok: %s.\n" % (filename, line)
- changes["maintainer822"] = Cnf["Dinstall::MyEmailAddress"];
return 0;
- # Parse the Files field from the .changes into another dictionary [FIXME need to trap errors as above]
- files = utils.build_file_list(changes, "")
+ # Parse the Files field from the .changes into another dictionary
+ try:
+ files = utils.build_file_list(changes, "");
+ except utils.changes_parse_error_exc, line:
+ reject_message = "Rejected: error parsing changes file '%s', can't grok: %s.\n" % (filename, line);
# Check for mandatory fields
for i in ("source", "binary", "architecture", "version", "distribution","maintainer", "files"):
if not changes.has_key(i):
- reject_message = "Rejected: Missing field `%s' in changes file." % (i)
+ reject_message = "Rejected: Missing field `%s' in changes file.\n" % (i)
return 0 # Avoid <undef> errors during later tests
- # Fix the Maintainer: field to be RFC822 compatible
- (changes["maintainer822"], changes["maintainername"], changes["maintaineremail"]) = utils.fix_maintainer (changes["maintainer"])
-
# Override the Distribution: field if appropriate
if Cnf["Dinstall::Options::Override-Distribution"] != "":
reject_message = reject_message + "Warning: Distribution was overriden from %s to %s.\n" % (changes["distribution"], Cnf["Dinstall::Options::Override-Distribution"])
for j in string.split(o):
changes[i][j] = 1
+ # Fix the Maintainer: field to be RFC822 compatible
+ (changes["maintainer822"], changes["maintainername"], changes["maintaineremail"]) = utils.fix_maintainer (changes["maintainer"])
+
+ # Fix the Changed-By: field to be RFC822 compatible; if it exists.
+ (changes["changedby822"], changes["changedbyname"], changes["changedbyemail"]) = utils.fix_maintainer(changes.get("changed-by",""));
+
+ # For source uploads the Changed-By field wins; otherwise Maintainer wins.
+ if changes["architecture"].has_key("source"):
+ changes["uploader822"] = "To: %s\nCc: %s" % (changes["changedby822"], changes["maintainer822"]);
+ # changes["uploadername"], changes["uploaderemail"]) = (changes["changedby822"], changes["changedbyname"], changes["changedbyemail"]);
+
# Ensure all the values in Closes: are numbers
if changes.has_key("closes"):
for i in changes["closes"].keys():
# Map frozen to unstable if frozen doesn't exist
if changes["distribution"].has_key("frozen") and not Cnf.has_key("Suite::Frozen"):
del changes["distribution"]["frozen"]
+ changes["distribution"]["unstable"] = 1;
reject_message = reject_message + "Mapping frozen to unstable.\n"
+ # Map testing to unstable
+ if changes["distribution"].has_key("testing"):
+ del changes["distribution"]["testing"]
+ changes["distribution"]["unstable"] = 1;
+ reject_message = reject_message + "Mapping testing to unstable.\n"
+
# Ensure target distributions exist
for i in changes["distribution"].keys():
if not Cnf.has_key("Suite::%s" % (i)):
reject_message = reject_message + "Rejected: Unknown distribution `%s'.\n" % (i)
+ # Ensure there _is_ a target distribution
+ if changes["distribution"].keys() == []:
+ reject_message = reject_message + "Rejected: huh? Distribution field is empty in changes file.\n";
+
# Map unreleased arches from stable to unstable
if changes["distribution"].has_key("stable"):
for i in changes["architecture"].keys():
if not Cnf.has_key("Suite::Stable::Architectures::%s" % (i)):
reject_message = reject_message + "Mapping stable to unstable for unreleased arch `%s'.\n" % (i)
del changes["distribution"]["stable"]
+ changes["distribution"]["unstable"] = 1;
# Map arches not being released from frozen to unstable
if changes["distribution"].has_key("frozen"):
if not Cnf.has_key("Suite::Frozen::Architectures::%s" % (i)):
reject_message = reject_message + "Mapping frozen to unstable for non-releasing arch `%s'.\n" % (i)
del changes["distribution"]["frozen"]
+ changes["distribution"]["unstable"] = 1;
# Handle uploads to stable
if changes["distribution"].has_key("stable"):
- # If running from within proposed-updates kill non-stable distributions
+ # If running from within proposed-updates; assume an install to stable
if string.find(os.getcwd(), 'proposed-updates') != -1:
+ # FIXME: should probably remove anything that != stable
for i in ("frozen", "unstable"):
if changes["distribution"].has_key(i):
reject_message = reject_message + "Removing %s from distribution list.\n"
del changes["distribution"][i]
+ changes["stable upload"] = 1;
+ # If we can't find a file from the .changes; assume it's a package already in the pool and move into the pool
+ file = files.keys()[0];
+ if os.access(file, os.R_OK) == 0:
+ pool_dir = Cnf["Dir::PoolDir"] + '/' + utils.poolify(changes["source"], files[file]["component"]);
+ os.chdir(pool_dir);
# Otherwise (normal case) map stable to updates
else:
reject_message = reject_message + "Mapping stable to updates.\n";
files[file]["byhand"] = 1;
files[file]["type"] = "byhand";
# Checks for a binary package...
- elif re_isadeb.match(file) != None:
+ elif utils.re_isadeb.match(file) != None:
+ files[file]["type"] = "deb";
+
# Extract package information using dpkg-deb
- control = apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file,"r")))
+ try:
+ control = apt_pkg.ParseSection(apt_inst.debExtractControl(utils.open_file(file,"r")))
+ except:
+ reject_message = reject_message + "Rejected: %s: debExtractControl() raised %s.\n" % (file, sys.exc_type);
+ # Can't continue, none of the checks on control would work.
+ continue;
# Check for mandatory fields
if control.Find("Package") == None:
files[file]["dbtype"] = "deb";
else:
reject_message = reject_message + "Rejected: %s is neither a .deb or a .udeb.\n " % (file);
- files[file]["type"] = "deb";
files[file]["fullname"] = "%s_%s_%s.deb" % (control.Find("Package", ""), epochless_version, control.Find("Architecture", ""))
files[file]["source"] = control.Find("Source", "");
if files[file]["source"] == "":
files[file]["source"] = files[file]["package"];
# Checks for a source package...
else:
- m = re_issource.match(file)
+ m = utils.re_issource.match(file)
if m != None:
files[file]["package"] = m.group(1)
files[file]["version"] = m.group(2)
else:
files[file]["byhand"] = 1;
files[file]["type"] = "byhand";
-
+
files[file]["oldfiles"] = {}
for suite in changes["distribution"].keys():
# Skip byhand
continue
# See if the package is NEW
- if not in_override_p(files[file]["package"], files[file]["component"], suite):
+ if not in_override_p(files[file]["package"], files[file]["component"], suite, files[file].get("dbtype",""), file):
files[file]["new"] = 1
# Find any old binary packages
files[file]["oldfiles"][suite] = oldfile
# Check versions [NB: per-suite only; no cross-suite checking done (yet)]
if apt_pkg.VersionCompare(files[file]["version"], oldfile["version"]) != 1:
- if Cnf["Dinstall::Options::No-Version-Check"]:
- reject_message = reject_message + "Overriden rejection"
- else:
- reject_message = reject_message + "Rejected"
- reject_message = reject_message + ": %s Old version `%s' >= new version `%s'.\n" % (file, oldfile["version"], files[file]["version"])
+ reject_message = reject_message + "Rejected: %s Old version `%s' >= new version `%s'.\n" % (file, oldfile["version"], files[file]["version"])
# Check for existing copies of the file
- q = projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s' AND a.id = b.architecture" % (files[file]["package"], files[file]["version"], files[file]["architecture"]))
- if q.getresult() != []:
- reject_message = reject_message + "Rejected: can not overwrite existing copy of '%s' already in the archive.\n" % (file)
+ if not changes.has_key("stable upload"):
+ q = projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND a.arch_string = '%s' AND a.id = b.architecture" % (files[file]["package"], files[file]["version"], files[file]["architecture"]))
+ if q.getresult() != []:
+ reject_message = reject_message + "Rejected: can not overwrite existing copy of '%s' already in the archive.\n" % (file)
# Find any old .dsc files
elif files[file]["type"] == "dsc":
if component_id == -1:
reject_message = reject_message + "Rejected: file '%s' has unknown component '%s'.\n" % (file, component);
continue;
+
+ # Validate the priority
+ if string.find(files[file]["priority"],'/') != -1:
+ reject_message = reject_message + "Rejected: file '%s' has invalid priority '%s' [contains '/'].\n" % (file, files[file]["priority"]);
# Check the md5sum & size against existing files (if any)
location = Cnf["Dir::PoolDir"];
files[file]["location id"] = db_access.get_location_id (location, component, archive);
- files_id = db_access.get_files_id(component + '/' + file, files[file]["size"], files[file]["md5sum"], files[file]["location id"]);
+
+ files[file]["pool name"] = utils.poolify (changes["source"], files[file]["component"]);
+ files_id = db_access.get_files_id(files[file]["pool name"] + file, files[file]["size"], files[file]["md5sum"], files[file]["location id"]);
if files_id == -1:
reject_message = reject_message + "Rejected: INTERNAL ERROR, get_files_id() returned multiple matches for %s.\n" % (file)
elif files_id == -2:
###############################################################################
def check_dsc ():
- global dsc, dsc_files, reject_message, reprocess, orig_tar_id;
-
+ global dsc, dsc_files, reject_message, reprocess, orig_tar_id, legacy_source_untouchable;
+
for file in files.keys():
if files[file]["type"] == "dsc":
try:
- dsc = utils.parse_changes(file)
+ dsc = utils.parse_changes(file, 1)
except utils.cant_open_exc:
- reject_message = reject_message + "Rejected: can't read changes file '%s'.\n" % (filename)
+ reject_message = reject_message + "Rejected: can't read changes file '%s'.\n" % (file)
return 0;
except utils.changes_parse_error_exc, line:
- reject_message = reject_message + "Rejected: error parsing changes file '%s', can't grok: %s.\n" % (filename, line)
+ reject_message = reject_message + "Rejected: error parsing changes file '%s', can't grok: %s.\n" % (file, line)
+ return 0;
+ except utils.invalid_dsc_format_exc, line:
+ reject_message = reject_message + "Rejected: syntax error in .dsc file '%s', line %s.\n" % (file, line)
return 0;
try:
dsc_files = utils.build_file_list(dsc, 1)
except utils.no_files_exc:
reject_message = reject_message + "Rejected: no Files: field in .dsc file.\n";
continue;
+ except utils.changes_parse_error_exc, line:
+ reject_message = "Rejected: error parsing .dsc file '%s', can't grok: %s.\n" % (file, line);
+ continue;
# Try and find all files mentioned in the .dsc. This has
# to work harder to cope with the multiple possible
# locations of an .orig.tar.gz.
for dsc_file in dsc_files.keys():
if files.has_key(dsc_file):
- actual_md5 = files[dsc_file]["md5sum"]
+ actual_md5 = files[dsc_file]["md5sum"];
+ actual_size = int(files[dsc_file]["size"]);
found = "%s in incoming" % (dsc_file)
# Check the file does not already exist in the archive
- q = projectB.query("SELECT f.id FROM files f, location l WHERE f.filename ~ '/%s' AND l.id = f.location" % (utils.regex_safe(dsc_file)));
- if q.getresult() != []:
- reject_message = reject_message + "Rejected: can not overwrite existing copy of '%s' already in the archive.\n" % (dsc_file)
+ if not changes.has_key("stable upload"):
+ q = projectB.query("SELECT f.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
+
+ # "It has not broken them. It has fixed a
+ # brokenness. Your crappy hack exploited a
+ # bug in the old dinstall.
+ #
+ # "(Come on! I thought it was always obvious
+ # that one just doesn't release different
+ # files with the same name and version.)"
+ # -- ajk@ on d-devel@l.d.o
+
+ if q.getresult() != []:
+ reject_message = reject_message + "Rejected: can not overwrite existing copy of '%s' already in the archive.\n" % (dsc_file)
elif dsc_file[-12:] == ".orig.tar.gz":
- # Check in Incoming
- # See comment above process_it() for explanation...
- if os.access(dsc_file, os.R_OK) != 0:
- files[dsc_file] = {};
- files[dsc_file]["size"] = os.stat(dsc_file)[stat.ST_SIZE];
- files[dsc_file]["md5sum"] = dsc_files[dsc_file]["md5sum"];
- files[dsc_file]["section"] = files[file]["section"];
- files[dsc_file]["priority"] = files[file]["priority"];
- files[dsc_file]["component"] = files[file]["component"];
- reprocess = 1;
- return 1;
# Check in the pool
- q = projectB.query("SELECT l.path, f.filename, l.type, f.id FROM files f, location l WHERE f.filename ~ '/%s' AND l.id = f.location" % (utils.regex_safe(dsc_file)));
+ q = projectB.query("SELECT l.path, f.filename, l.type, f.id FROM files f, location l WHERE (f.filename ~ '/%s$' OR f.filename = '%s') AND l.id = f.location" % (utils.regex_safe(dsc_file), dsc_file));
ql = q.getresult();
- if len(ql) > 0:
- old_file = ql[0][0] + ql[0][1];
+
+ if ql != []:
+ # Unfortunately, we make get more than one match
+ # here if, for example, the package was in potato
+ # but had a -sa upload in woody. So we need to a)
+ # choose the right one and b) mark all wrong ones
+ # as excluded from the source poolification (to
+ # avoid file overwrites).
+
+ x = ql[0]; # default to something sane in case we don't match any or have only one
+
+ if len(ql) > 1:
+ for i in ql:
+ old_file = i[0] + i[1];
+ actual_md5 = apt_pkg.md5sum(utils.open_file(old_file,"r"));
+ actual_size = os.stat(old_file)[stat.ST_SIZE];
+ if actual_md5 == dsc_files[dsc_file]["md5sum"] and actual_size == int(dsc_files[dsc_file]["size"]):
+ x = i;
+ else:
+ legacy_source_untouchable[i[3]] = "";
+
+ old_file = x[0] + x[1];
actual_md5 = apt_pkg.md5sum(utils.open_file(old_file,"r"));
+ actual_size = os.stat(old_file)[stat.ST_SIZE];
found = old_file;
- suite_type = ql[0][2];
+ suite_type = x[2];
+ dsc_files[dsc_file]["files id"] = x[3]; # need this for updating dsc_files in install()
# See install()...
if suite_type == "legacy" or suite_type == "legacy-mixed":
- orig_tar_id = ql[0][3];
+ orig_tar_id = x[3];
else:
- reject_message = reject_message + "Rejected: %s refers to %s, but I can't find it in Incoming or in the pool.\n" % (file, dsc_file);
- continue;
+ # Not there? Check in Incoming...
+ # [See comment above process_it() for explanation
+ # of why this is necessary...]
+ if os.access(dsc_file, os.R_OK) != 0:
+ files[dsc_file] = {};
+ files[dsc_file]["size"] = os.stat(dsc_file)[stat.ST_SIZE];
+ files[dsc_file]["md5sum"] = dsc_files[dsc_file]["md5sum"];
+ files[dsc_file]["section"] = files[file]["section"];
+ files[dsc_file]["priority"] = files[file]["priority"];
+ files[dsc_file]["component"] = files[file]["component"];
+ files[dsc_file]["type"] = "orig.tar.gz";
+ reprocess = 1;
+ return 1;
+ else:
+ reject_message = reject_message + "Rejected: %s refers to %s, but I can't find it in Incoming or in the pool.\n" % (file, dsc_file);
+ continue;
else:
- reject_message = reject_message + "Rejected: %s refers to %s, but I can't find it in Incoming." % (file, dsc_file);
+ reject_message = reject_message + "Rejected: %s refers to %s, but I can't find it in Incoming.\n" % (file, dsc_file);
continue;
if actual_md5 != dsc_files[dsc_file]["md5sum"]:
- reject_message = reject_message + "Rejected: md5sum for %s doesn't match %s.\n" % (found, file)
+ reject_message = reject_message + "Rejected: md5sum for %s doesn't match %s.\n" % (found, file);
+ if actual_size != int(dsc_files[dsc_file]["size"]):
+ reject_message = reject_message + "Rejected: size for %s doesn't match %s.\n" % (found, file);
+
+ if string.find(reject_message, "Rejected:") != -1:
+ return 0
+ else:
+ return 1
+
+###############################################################################
+
+# Some cunning stunt broke dpkg-source in dpkg 1.8{,.1}; detect the
+# resulting bad source packages and reject them.
+
+# Even more amusingly the fix in 1.8.1.1 didn't actually fix the
+# problem just changed the symptoms.
+
+def check_diff ():
+ global dsc, dsc_files, reject_message, reprocess, orig_tar_id;
+
+ for filename in files.keys():
+ if files[filename]["type"] == "diff.gz":
+ file = gzip.GzipFile(filename, 'r');
+ for line in file.readlines():
+ if re_bad_diff.search(line):
+ reject_message = reject_message + "Rejected: [dpkg-sucks] source package was produced by a broken version of dpkg-dev 1.8.x; please rebuild with >= 1.8.3 version installed.\n";
+ break;
if string.find(reject_message, "Rejected:") != -1:
return 0
if apt_pkg.md5sum(file_handle) != files[file]["md5sum"]:
reject_message = reject_message + "Rejected: md5sum check failed for %s.\n" % (file);
+def check_override ():
+ # Only check section & priority on sourceful uploads
+ if not changes["architecture"].has_key("source"):
+ return;
+
+ summary = ""
+ for file in files.keys():
+ if not files[file].has_key("new") and files[file]["type"] == "deb":
+ section = files[file]["section"];
+ override_section = files[file]["override section"];
+ if section != override_section and section != "-":
+ # Ignore this; it's a common mistake and not worth whining about
+ if string.lower(section) == "non-us/main" and string.lower(override_section) == "non-us":
+ continue;
+ summary = summary + "%s: section is overridden from %s to %s.\n" % (file, section, override_section);
+ priority = files[file]["priority"];
+ override_priority = files[file]["override priority"];
+ if priority != override_priority and priority != "-":
+ summary = summary + "%s: priority is overridden from %s to %s.\n" % (file, priority, override_priority);
+
+ if summary == "":
+ return;
+
+ mail_message = """Return-Path: %s
+From: %s
+To: %s
+Bcc: troup@auric.debian.org
+Subject: %s override disparity
+
+There are disparities between your recently installed upload and the
+override file for the following file(s):
+
+%s
+Either the package or the override file is incorrect. If you think
+the override is correct and the package wrong please fix the package
+so that this disparity is fixed in the next upload. If you feel the
+override is incorrect then please reply to this mail and explain why.
+
+--
+Debian distribution maintenance software
+
+(This message was generated automatically; if you believe that there
+is a problem with it please contact the archive administrators by
+mailing ftpmaster@debian.org)
+""" % (Cnf["Dinstall::MyEmailAddress"], Cnf["Dinstall::MyEmailAddress"], changes["maintainer822"], changes["source"], summary);
+ utils.send_mail (mail_message, "")
+
#####################################################################################################################
def action (changes_filename):
def install (changes_filename, summary, short_summary):
global install_count, install_bytes
+
+ # Stable uploads are a special case
+ if changes.has_key("stable upload"):
+ stable_install (changes_filename, summary, short_summary);
+ return;
print "Installing."
dsc_location_id = files[file]["location id"];
if not files[file]["files id"]:
files[file]["files id"] = db_access.set_files_id (filename, files[file]["size"], files[file]["md5sum"], dsc_location_id)
- dsc_file_id = files[file]["files id"]
projectB.query("INSERT INTO source (source, version, maintainer, file) VALUES ('%s', '%s', %d, %d)"
% (package, version, maintainer_id, files[file]["files id"]))
suite_id = db_access.get_suite_id(suite);
projectB.query("INSERT INTO src_associations (suite, source) VALUES (%d, currval('source_id_seq'))" % (suite_id))
-
- # Add the .diff.gz and {.orig,}.tar.gz files to the DB (files and dsc_files)
- for file in files.keys():
- if files[file]["type"] == "diff.gz" or files[file]["type"] == "orig.tar.gz" or files[file]["type"] == "tar.gz":
- if not files[file]["files id"]:
- filename = files[file]["pool name"] + file;
- files[file]["files id"] = db_access.set_files_id (filename, files[file]["size"], files[file]["md5sum"], files[file]["location id"])
- projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files[file]["files id"]));
-
+ # Add the source files to the DB (files and dsc_files)
+ projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files[file]["files id"]));
+ for dsc_file in dsc_files.keys():
+ filename = files[file]["pool name"] + dsc_file;
+ # If the .orig.tar.gz is already in the pool, it's
+ # files id is stored in dsc_files by check_dsc().
+ files_id = dsc_files[dsc_file].get("files id", None);
+ if files_id == None:
+ files_id = db_access.get_files_id(filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id);
+ # FIXME: needs to check for -1/-2 and or handle exception
+ if files_id == None:
+ files_id = db_access.set_files_id (filename, dsc_files[dsc_file]["size"], dsc_files[dsc_file]["md5sum"], dsc_location_id);
+ projectB.query("INSERT INTO dsc_files (source, file) VALUES (currval('source_id_seq'), %d)" % (files_id));
+
# Add the .deb files to the DB
for file in files.keys():
if files[file]["type"] == "deb":
suite_id = db_access.get_suite_id(suite);
projectB.query("INSERT INTO bin_associations (suite, bin) VALUES (%d, currval('binaries_id_seq'))" % (suite_id));
+ # If the .orig.tar.gz is in a legacy directory we need to poolify
+ # it, so that apt-get source (and anything else that goes by the
+ # "Directory:" field in the Sources.gz file) works.
+ if orig_tar_id != None:
+ q = projectB.query("SELECT DISTINCT ON (f.id) l.path, f.filename, f.id as files_id, df.source, df.id as dsc_files_id, f.size, f.md5sum FROM files f, dsc_files df, location l WHERE df.source IN (SELECT source FROM dsc_files WHERE file = %s) AND f.id = df.file AND l.id = f.location AND (l.type = 'legacy' OR l.type = 'legacy-mixed')" % (orig_tar_id));
+ qd = q.dictresult();
+ for qid in qd:
+ # Is this an old upload superseded by a newer -sa upload? (See check_dsc() for details)
+ if legacy_source_untouchable.has_key(qid["files_id"]):
+ continue;
+ # First move the files to the new location
+ legacy_filename = qid["path"]+qid["filename"];
+ pool_location = utils.poolify (changes["source"], files[file]["component"]);
+ pool_filename = pool_location + os.path.basename(qid["filename"]);
+ destination = Cnf["Dir::PoolDir"] + pool_location
+ utils.move(legacy_filename, destination);
+ # Then Update the DB's files table
+ q = projectB.query("UPDATE files SET filename = '%s', location = '%s' WHERE id = '%s'" % (pool_filename, dsc_location_id, qid["files_id"]));
+
# Install the files into the pool
for file in files.keys():
if files[file].has_key("byhand"):
if Cnf.has_key("Suite::%s::CopyChanges" % (suite)):
utils.copy (changes_filename, Cnf["Dir::RootDir"] + Cnf["Suite::%s::CopyChanges" % (suite)]);
- # If the .orig.tar.gz is in a legacy directory we need to poolify
- # it, so that apt-get source (and anything else that goes by the
- # "Directory:" field in the Sources.gz file) works.
- if orig_tar_id != None:
- q = projectB.query("SELECT l.path, f.filename, f.id as files_id, df.source, df.id as dsc_files_id, f.size, f.md5sum FROM files f, dsc_files df, location l WHERE df.source IN (SELECT source FROM dsc_files WHERE file = %s) AND f.id = df.file AND l.id = f.location" % (orig_tar_id));
- qd = q.dictresult();
- for qid in qd:
- # First move the files to the new location
- legacy_filename = qid["path"]+qid["filename"];
- pool_location = utils.poolify (changes["source"], files[file]["component"]);
- pool_filename = pool_location + os.path.basename(qid["filename"]);
- destination = Cnf["Dir::PoolDir"] + pool_location
- utils.move(legacy_filename, destination);
- # Update the DB: files table
- new_files_id = db_access.set_files_id(pool_filename, qid["size"], qid["md5sum"], dsc_location_id);
- # Update the DB: dsc_files table
- projectB.query("INSERT INTO dsc_files (source, file) VALUES (%s, %s)" % (qid["source"], new_files_id));
- # Update the DB: source table
- if legacy_filename[-4:] == ".dsc":
- projectB.query("UPDATE source SET file = %s WHERE id = %d" % (new_files_id, qid["source"]));
+ projectB.query("COMMIT WORK");
- for qid in qd:
- # Remove old data from the DB: dsc_files table
- projectB.query("DELETE FROM dsc_files WHERE id = %s" % (qid["dsc_files_id"]));
- # Remove old data from the DB: files table
- projectB.query("DELETE FROM files WHERE id = %s" % (qid["files_id"]));
+ try:
+ utils.move (changes_filename, Cnf["Dir::IncomingDir"] + 'DONE/' + os.path.basename(changes_filename))
+ except:
+ sys.stderr.write("W: couldn't move changes file '%s' to DONE directory [Got %s].\n" % (os.path.basename(changes_filename), sys.exc_type));
+
+ install_count = install_count + 1;
+
+ if not Cnf["Dinstall::Options::No-Mail"]:
+ mail_message = """Return-Path: %s
+From: %s
+To: %s
+Bcc: troup@auric.debian.org
+Subject: %s INSTALLED
+
+%s
+Installing:
+%s
+
+%s""" % (Cnf["Dinstall::MyEmailAddress"], Cnf["Dinstall::MyEmailAddress"], changes["maintainer822"], os.path.basename(changes_filename), reject_message, summary, installed_footer)
+ utils.send_mail (mail_message, "")
+ announce (short_summary, 1)
+ check_override ();
+
+#####################################################################################################################
+
+def stable_install (changes_filename, summary, short_summary):
+ global install_count, install_bytes
+
+ print "Installing to stable."
+
+ archive = utils.where_am_i();
+
+ # Begin a transaction; if we bomb out anywhere between here and the COMMIT WORK below, the DB will not be changed.
+ projectB.query("BEGIN WORK");
+
+ # Add the .dsc file to the DB
+ for file in files.keys():
+ if files[file]["type"] == "dsc":
+ package = dsc["source"]
+ version = dsc["version"] # NB: not files[file]["version"], that has no epoch
+ q = projectB.query("SELECT id FROM source WHERE source = '%s' AND version = '%s'" % (package, version))
+ ql = q.getresult()
+ if ql == []:
+ sys.stderr.write("INTERNAL ERROR: couldn't find '%s' (%s) in source table.\n" % (package, version));
+ sys.exit(1);
+ source_id = ql[0][0];
+ suite_id = db_access.get_suite_id('proposed-updates');
+ projectB.query("DELETE FROM src_associations WHERE suite = '%s' AND source = '%s'" % (suite_id, source_id));
+ suite_id = db_access.get_suite_id('stable');
+ projectB.query("INSERT INTO src_associations (suite, source) VALUES ('%s', '%s')" % (suite_id, source_id));
+
+ # Add the .deb files to the DB
+ for file in files.keys():
+ if files[file]["type"] == "deb":
+ package = files[file]["package"]
+ version = files[file]["version"]
+ architecture = files[file]["architecture"]
+ q = projectB.query("SELECT b.id FROM binaries b, architecture a WHERE b.package = '%s' AND b.version = '%s' AND (a.arch_string = '%s' OR a.arch_string = 'all') AND b.architecture = a.id" % (package, version, architecture))
+ ql = q.getresult()
+ if ql == []:
+ sys.stderr.write("INTERNAL ERROR: couldn't find '%s' (%s for %s architecture) in binaries table.\n" % (package, version, architecture));
+ sys.exit(1);
+ binary_id = ql[0][0];
+ suite_id = db_access.get_suite_id('proposed-updates');
+ projectB.query("DELETE FROM bin_associations WHERE suite = '%s' AND bin = '%s'" % (suite_id, binary_id));
+ suite_id = db_access.get_suite_id('stable');
+ projectB.query("INSERT INTO bin_associations (suite, bin) VALUES ('%s', '%s')" % (suite_id, binary_id));
projectB.query("COMMIT WORK");
- utils.move (changes_filename, Cnf["Dir::IncomingDir"] + 'DONE/' + os.path.basename(changes_filename))
+ utils.move (changes_filename, Cnf["Rhona::Morgue"] + os.path.basename(changes_filename));
+
+ # Update the Stable ChangeLog file
+
+ new_changelog_filename = Cnf["Dir::RootDir"] + Cnf["Suite::Stable::ChangeLogBase"] + ".ChangeLog";
+ changelog_filename = Cnf["Dir::RootDir"] + Cnf["Suite::Stable::ChangeLogBase"] + "ChangeLog";
+ if os.path.exists(new_changelog_filename):
+ os.unlink (new_changelog_filename);
+
+ new_changelog = utils.open_file(new_changelog_filename, 'w');
+ for file in files.keys():
+ if files[file]["type"] == "deb":
+ new_changelog.write("stable/%s/binary-%s/%s\n" % (files[file]["component"], files[file]["architecture"], file));
+ elif utils.re_issource.match(file) != None:
+ new_changelog.write("stable/%s/source/%s\n" % (files[file]["component"], file));
+ else:
+ new_changelog.write("%s\n" % (file));
+ chop_changes = re_fdnic.sub("\n", changes["changes"]);
+ new_changelog.write(chop_changes + '\n\n');
+ if os.access(changelog_filename, os.R_OK) != 0:
+ changelog = utils.open_file(changelog_filename, 'r');
+ new_changelog.write(changelog.read());
+ new_changelog.close();
+ if os.access(changelog_filename, os.R_OK) != 0:
+ os.unlink(changelog_filename);
+ utils.move(new_changelog_filename, changelog_filename);
install_count = install_count + 1;
From: %s
To: %s
Bcc: troup@auric.debian.org
-Subject: %s INSTALLED
+Subject: %s INSTALLED into stable
%s
Installing:
%s
-%s""" % (Cnf["Dinstall::MyEmailAddress"], Cnf["Dinstall::MyEmailAddress"], changes["maintainer822"], changes_filename, reject_message, summary, installed_footer)
+%s""" % (Cnf["Dinstall::MyEmailAddress"], Cnf["Dinstall::MyEmailAddress"], changes["maintainer822"], os.path.basename(changes_filename), reject_message, summary, installed_footer)
utils.send_mail (mail_message, "")
announce (short_summary, 1)
reason_filename = re_changes.sub("reason", base_changes_filename);
reject_filename = "%s/REJECT/%s" % (Cnf["Dir::IncomingDir"], reason_filename);
- # Move the .changes files and it's contents into REJECT/
- utils.move (changes_filename, "%s/REJECT/%s" % (Cnf["Dir::IncomingDir"], base_changes_filename));
+ # Move the .changes files and it's contents into REJECT/ (if we can; errors are ignored)
+ try:
+ utils.move (changes_filename, "%s/REJECT/%s" % (Cnf["Dir::IncomingDir"], base_changes_filename));
+ except:
+ sys.stderr.write("W: couldn't reject changes file '%s' [Got %s].\n" % (base_changes_filename, sys.exc_type));
+ pass;
for file in files.keys():
- if os.access(file,os.R_OK) == 0:
- utils.move (file, "%s/REJECT/%s" % (Cnf["Dir::IncomingDir"], file));
+ if os.path.exists(file):
+ try:
+ utils.move (file, "%s/REJECT/%s" % (Cnf["Dir::IncomingDir"], file));
+ except:
+ sys.stderr.write("W: couldn't reject file '%s' [Got %s].\n" % (file, sys.exc_type));
+ pass;
# If this is not a manual rejection generate the .reason file and rejection mail message
if manual_reject_mail_filename == "":
%s
===
-%s""" % (Cnf["Dinstall::MyEmailAddress"], changes["maintainer822"], changes_filename, reject_message, reject_footer);
+%s""" % (Cnf["Dinstall::MyEmailAddress"], changes["maintainer822"], os.path.basename(changes_filename), reject_message, reject_footer);
else: # Have a manual rejection file to use
reject_mail_message = ""; # avoid <undef>'s
%s
%s
===
-%s""" % (user_email_address, Cnf["Dinstall::MyEmailAddress"], changes["maintainer822"], changes_filename, manual_reject_message, reject_message, reject_footer)
+%s""" % (user_email_address, Cnf["Dinstall::MyEmailAddress"], changes["maintainer822"], os.path.basename(changes_filename), manual_reject_message, reject_message, reject_footer)
# Write the rejection email out as the <foo>.reason file
reason_filename = re_changes.sub("reason", os.path.basename(changes_filename));
if manual_reject_message == "":
result = os.system("vi +6 %s" % (reject_file))
if result != 0:
- sys.stderr.write ("vi invocation failed for `%s'!" % (reject_file))
+ sys.stderr.write ("vi invocation failed for `%s'!\n" % (reject_file))
sys.exit(result)
# Then process it as if it were an automatic rejection
def acknowledge_new (changes_filename, summary):
global new_ack_new;
+ changes_filename = os.path.basename(changes_filename);
+
new_ack_new[changes_filename] = 1;
if new_ack_old.has_key(changes_filename):
(dsc_rfc822, dsc_name, dsc_email) = utils.fix_maintainer (dsc.get("maintainer",Cnf["Dinstall::MyEmailAddress"]));
bugs = changes["closes"].keys()
bugs.sort()
- if dsc_name == changes["maintainername"]:
+ # changes["changedbyname"] == dsc_name is probably never true, but better
+ # safe than sorry
+ if dsc_name == changes["maintainername"] and (changes["changedbyname"] == "" or changes["changedbyname"] == dsc_name):
summary = summary + "Closing bugs: "
for bug in bugs:
summary = summary + "%s " % (bug)
control_message = ""
for bug in bugs:
summary = summary + "%s " % (bug)
- control_message = control_message + "severity %s fixed\n" % (bug)
+ control_message = control_message + "tag %s + fixed\n" % (bug)
if action and control_message != "":
mail_message = """Return-Path: %s
From: %s
# into the .changes structure and reprocess the .changes file.
def process_it (changes_file):
- global reprocess, orig_tar_id;
+ global reprocess, orig_tar_id, changes, dsc, dsc_files, files, reject_message;
+ # Reset some globals
reprocess = 1;
+ changes = {};
+ dsc = {};
+ dsc_files = {};
+ files = {};
orig_tar_id = None;
+ legacy_source_untouchable = {};
+ reject_message = "";
+ orig_tar_id = None;
+
+ # Absolutize the filename to avoid the requirement of being in the
+ # same directory as the .changes file.
+ changes_file = os.path.abspath(changes_file);
+ # And since handling of installs to stable munges with the CWD;
+ # save and restore it.
+ cwd = os.getcwd();
+
check_signature (changes_file);
check_changes (changes_file);
while reprocess:
check_files ();
check_md5sums ();
check_dsc ();
+ check_diff ();
action(changes_file);
+ # Restore CWD
+ os.chdir(cwd);
+
###############################################################################
def main():
- global Cnf, projectB, reject_message, install_bytes, new_ack_old
+ global Cnf, projectB, install_bytes, new_ack_old
apt_pkg.init();
('m',"manual-reject","Dinstall::Options::Manual-Reject", "HasArg"),
('n',"no-action","Dinstall::Options::No-Action"),
('p',"no-lock", "Dinstall::Options::No-Lock"),
- ('r',"no-version-check", "Dinstall::Options::No-Version-Check"),
('s',"no-mail", "Dinstall::Options::No-Mail"),
('u',"override-distribution", "Dinstall::Options::Override-Distribution", "HasArg"),
('v',"version","Dinstall::Options::Version")];
# Process the changes files
for changes_file in changes_files:
- reject_message = ""
print "\n" + changes_file;
process_it (changes_file);
- install_mag = " b";
- if install_bytes > 10000:
- install_bytes = install_bytes / 1000;
- install_mag = " Kb";
- if install_bytes > 10000:
- install_bytes = install_bytes / 1000;
- install_mag = " Mb";
if install_count:
sets = "set"
if install_count > 1:
sets = "sets"
- sys.stderr.write("Installed %d package %s, %d%s.\n" % (install_count, sets, int(install_bytes), install_mag))
+ sys.stderr.write("Installed %d package %s, %s.\n" % (install_count, sets, utils.size_type(int(install_bytes))));
# Write out the list of already-acknowledged NEW packages
if Cnf["Dinstall::Options::Ack-New"]: