X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fchecks.py;h=2a7e3e10054b4ee946e6f1f24a8508e4199b677b;hb=321e814e04b7e01820120fd888e55975dafc240a;hp=019654caf278f7486c4504e63d68221d75283588;hpb=47f4f001c55284e717818f14b8f9eace3e682b56;p=dak.git diff --git a/daklib/checks.py b/daklib/checks.py index 019654ca..2a7e3e10 100644 --- a/daklib/checks.py +++ b/daklib/checks.py @@ -24,6 +24,7 @@ Please read the documentation for the L{Check} class for the interface. """ from daklib.config import Config +import daklib.daksubprocess from daklib.dbconn import * import daklib.dbconn as dbconn from daklib.regexes import * @@ -37,11 +38,18 @@ import apt_pkg from apt_pkg import version_compare import errno import os +import subprocess import time import yaml -# TODO: replace by subprocess -import commands +def check_fields_for_valid_utf8(filename, control): + """Check all fields of a control file for valid UTF-8""" + for field in control.keys(): + try: + field.decode('utf-8') + control[field].decode('utf-8') + except UnicodeDecodeError: + raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field)) class Reject(Exception): """exception raised by failing checks""" @@ -160,6 +168,8 @@ class ChangesCheck(Check): if field not in control: raise Reject('{0}: misses mandatory field {1}'.format(fn, field)) + check_fields_for_valid_utf8(fn, control) + source_match = re_field_source.match(control['Source']) if not source_match: raise Reject('{0}: Invalid Source field'.format(fn)) @@ -264,6 +274,8 @@ class BinaryCheck(Check): if field not in control: raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field)) + check_fields_for_valid_utf8(fn, control) + # check fields package = control['Package'] @@ -328,7 +340,7 @@ class BinaryTimestampCheck(Check): def check(self, upload): cnf = Config() future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600) - past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1984'), '%Y')) + past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y')) class TarTime(object): def __init__(self): @@ -336,9 +348,9 @@ class BinaryTimestampCheck(Check): self.past_files = dict() def callback(self, member, data): if member.mtime > future_cutoff: - future_files[member.name] = member.mtime + self.future_files[member.name] = member.mtime elif member.mtime < past_cutoff: - past_files[member.name] = member.mtime + self.past_files[member.name] = member.mtime def format_reason(filename, direction, files): reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction) @@ -375,7 +387,10 @@ class SourceCheck(Check): version = control['Version'] if is_orig: - version = re_field_version_upstream.match(version).group('upstream') + upstream_match = re_field_version_upstream.match(version) + if not upstream_match: + raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version)) + version = upstream_match.group('upstream') version_match = re_field_version.match(version) version_without_epoch = version_match.group('without_epoch') if match.group('version') != version_without_epoch: @@ -390,6 +405,8 @@ class SourceCheck(Check): control = source.dsc dsc_fn = source._dsc_file.filename + check_fields_for_valid_utf8(dsc_fn, control) + # check fields if not re_field_package.match(control['Source']): raise Reject('{0}: Invalid Source field'.format(dsc_fn)) @@ -440,7 +457,7 @@ class ACLCheck(Check): .filter(DBBinary.package == binary_name) for binary in binaries: if binary.source.source != upload.changes.changes['Source']: - return True, binary, binary.source.source + return True, binary.package, binary.source.source return False, None, None def _check_acl(self, session, upload, acl): @@ -534,6 +551,8 @@ class TransitionCheck(Check): if transitions is None: return True + session = upload.session + control = upload.changes.changes source = re_field_source.match(control['Source']).group('package') @@ -584,7 +603,7 @@ transition is done.""".format(source, currentlymsg, expected,t["rm"]))) contents = file(path, 'r').read() try: - transitions = yaml.load(contents) + transitions = yaml.safe_load(contents) return transitions except yaml.YAMLError as msg: utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg)) @@ -625,7 +644,7 @@ class LintianCheck(Check): with open(tagfile, 'r') as sourcefile: sourcecontent = sourcefile.read() try: - lintiantags = yaml.load(sourcecontent)['lintian'] + lintiantags = yaml.safe_load(sourcecontent)['lintian'] except yaml.YAMLError as msg: raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg)) @@ -639,13 +658,17 @@ class LintianCheck(Check): changespath = os.path.join(upload.directory, changes.filename) try: cmd = [] + result = 0 user = cnf.get('Dinstall::UnprivUser') or None if user is not None: cmd.extend(['sudo', '-H', '-u', user]) - cmd.extend(['LINTIAN_COLL_UNPACKED_SKIP_SIG=1', '/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath]) - result, output = commands.getstatusoutput(" ".join(cmd)) + cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath]) + output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + result = e.returncode + output = e.output finally: os.unlink(temp_filename) @@ -706,23 +729,31 @@ class VersionCheck(Check): else: return db_binary.version - def _version_checks(self, upload, suite, op): + def _version_checks(self, upload, suite, other_suite, op, op_name): session = upload.session if upload.changes.source is not None: source_name = upload.changes.source.dsc['Source'] source_version = upload.changes.source.dsc['Version'] - v = self._highest_source_version(session, source_name, suite) + v = self._highest_source_version(session, source_name, other_suite) if v is not None and not op(version_compare(source_version, v)): - raise Reject('Version check failed (source={0}, version={1}, other-version={2}, suite={3})'.format(source_name, source_version, v, suite.suite_name)) + raise Reject("Version check failed:\n" + "Your upload included the source package {0}, version {1},\n" + "however {3} already has version {2}.\n" + "Uploads to {5} must have a {4} version than present in {3}." + .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name)) for binary in upload.changes.binaries: binary_name = binary.control['Package'] binary_version = binary.control['Version'] architecture = binary.control['Architecture'] - v = self._highest_binary_version(session, binary_name, suite, architecture) + v = self._highest_binary_version(session, binary_name, other_suite, architecture) if v is not None and not op(version_compare(binary_version, v)): - raise Reject('Version check failed (binary={0}, version={1}, other-version={2}, suite={3})'.format(binary_name, binary_version, v, suite.suite_name)) + raise Reject("Version check failed:\n" + "Your upload included the binary package {0}, version {1}, for {2},\n" + "however {4} already has version {3}.\n" + "Uploads to {6} must have a {5} version than present in {4}." + .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name)) def per_suite_check(self, upload, suite): session = upload.session @@ -734,13 +765,13 @@ class VersionCheck(Check): must_be_newer_than.append(suite) for s in must_be_newer_than: - self._version_checks(upload, s, lambda result: result > 0) + self._version_checks(upload, suite, s, lambda result: result > 0, 'higher') vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan') must_be_older_than = [ vc.reference for vc in vc_older ] for s in must_be_older_than: - self._version_checks(upload, s, lambda result: result < 0) + self._version_checks(upload, suite, s, lambda result: result < 0, 'lower') return True