X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fchecks.py;h=9916cc755fab492a35e7763692f1ccc6cdde416d;hb=391f5ec09a119131dc846b796ca791f4cecc69e4;hp=f874eec240b74a7517139544a93b2d87b2bfe84e;hpb=49f5b089d5245571287a84f9513cc970a54db2c1;p=dak.git diff --git a/daklib/checks.py b/daklib/checks.py index f874eec2..9916cc75 100644 --- a/daklib/checks.py +++ b/daklib/checks.py @@ -20,67 +20,106 @@ """module provided pre-acceptance tests -Please read the documentation for the `Check` class for the interface. +Please read the documentation for the L{Check} class for the interface. """ from daklib.config import Config -from .dbconn import * +import daklib.daksubprocess +from daklib.dbconn import * import daklib.dbconn as dbconn -from .regexes import * -from .textutils import fix_maintainer, ParseMaintError +from daklib.regexes import * +from daklib.textutils import fix_maintainer, ParseMaintError import daklib.lintian as lintian import daklib.utils as utils +import daklib.upload +import apt_inst import apt_pkg from apt_pkg import version_compare +import datetime +import errno import os +import subprocess +import textwrap +import time import yaml -# TODO: replace by subprocess -import commands +def check_fields_for_valid_utf8(filename, control): + """Check all fields of a control file for valid UTF-8""" + for field in control.keys(): + try: + field.decode('utf-8') + control[field].decode('utf-8') + except UnicodeDecodeError: + raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field)) class Reject(Exception): """exception raised by failing checks""" pass +class RejectExternalFilesMismatch(Reject): + """exception raised by failing the external hashes check""" + + def __str__(self): + return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4] + +class RejectACL(Reject): + """exception raise by failing ACL checks""" + def __init__(self, acl, reason): + self.acl = acl + self.reason = reason + + def __str__(self): + return "ACL {0}: {1}".format(self.acl.name, self.reason) + class Check(object): """base class for checks - checks are called by daklib.archive.ArchiveUpload. Failing tests should - raise a `daklib.checks.Reject` exception including a human-readable + checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should + raise a L{daklib.checks.Reject} exception including a human-readable description why the upload should be rejected. """ def check(self, upload): """do checks - Args: - upload (daklib.archive.ArchiveUpload): upload to check + @type upload: L{daklib.archive.ArchiveUpload} + @param upload: upload to check - Raises: - daklib.checks.Reject + @raise daklib.checks.Reject: upload should be rejected """ raise NotImplemented def per_suite_check(self, upload, suite): """do per-suite checks - Args: - upload (daklib.archive.ArchiveUpload): upload to check - suite (daklib.dbconn.Suite): suite to check + @type upload: L{daklib.archive.ArchiveUpload} + @param upload: upload to check + + @type suite: L{daklib.dbconn.Suite} + @param suite: suite to check - Raises: - daklib.checks.Reject + @raise daklib.checks.Reject: upload should be rejected """ raise NotImplemented @property def forcable(self): """allow to force ignore failing test - True if it is acceptable to force ignoring a failing test, - False otherwise + C{True} if it is acceptable to force ignoring a failing test, + C{False} otherwise """ return False -class SignatureCheck(Check): +class SignatureAndHashesCheck(Check): + def check_replay(self, upload): + # Use private session as we want to remember having seen the .changes + # in all cases. + session = upload.session + history = SignatureHistory.from_signed_file(upload.changes) + r = history.query(session) + if r is not None: + raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen)) + return True + """Check signature of changes and dsc file (if included in upload) Make sure the signature is valid and done by a known user. @@ -89,14 +128,65 @@ class SignatureCheck(Check): changes = upload.changes if not changes.valid_signature: raise Reject("Signature for .changes not valid.") - if changes.source is not None: - if not changes.source.valid_signature: + self.check_replay(upload) + self._check_hashes(upload, changes.filename, changes.files.itervalues()) + + source = None + try: + source = changes.source + except Exception as e: + raise Reject("Invalid dsc file: {0}".format(e)) + if source is not None: + if not source.valid_signature: raise Reject("Signature for .dsc not valid.") - if changes.source.primary_fingerprint != changes.primary_fingerprint: + if source.primary_fingerprint != changes.primary_fingerprint: raise Reject(".changes and .dsc not signed by the same key.") + self._check_hashes(upload, source.filename, source.files.itervalues()) + if upload.fingerprint is None or upload.fingerprint.uid is None: raise Reject(".changes signed by unknown key.") + """Make sure hashes match existing files + + @type upload: L{daklib.archive.ArchiveUpload} + @param upload: upload we are processing + + @type filename: str + @param filename: name of the file the expected hash values are taken from + + @type files: sequence of L{daklib.upload.HashedFile} + @param files: files to check the hashes for + """ + def _check_hashes(self, upload, filename, files): + try: + for f in files: + f.check(upload.directory) + except daklib.upload.FileDoesNotExist as e: + raise Reject('{0}: {1}\n' + 'Perhaps you need to include the file in your upload?' + .format(filename, unicode(e))) + except daklib.upload.UploadException as e: + raise Reject('{0}: {1}'.format(filename, unicode(e))) + +class SignatureTimestampCheck(Check): + """Check timestamp of .changes signature""" + def check(self, upload): + changes = upload.changes + + now = datetime.datetime.utcnow() + timestamp = changes.signature_timestamp + age = now - timestamp + + age_max = datetime.timedelta(days=365) + age_min = datetime.timedelta(days=-7) + + if age > age_max: + raise Reject('{0}: Signature from {1} is too old (maximum age is {2} days)'.format(changes.filename, timestamp, age_max.days)) + if age < age_min: + raise Reject('{0}: Signature from {1} is too far in the future (tolerance is {2} days)'.format(changes.filename, timestamp, abs(age_min.days))) + + return True + class ChangesCheck(Check): """Check changes file for syntax errors.""" def check(self, upload): @@ -108,6 +198,8 @@ class ChangesCheck(Check): if field not in control: raise Reject('{0}: misses mandatory field {1}'.format(fn, field)) + check_fields_for_valid_utf8(fn, control) + source_match = re_field_source.match(control['Source']) if not source_match: raise Reject('{0}: Invalid Source field'.format(fn)) @@ -154,27 +246,81 @@ class ChangesCheck(Check): return True -class HashesCheck(Check): - """Check hashes in .changes and .dsc are valid.""" +class ExternalHashesCheck(Check): + """Checks hashes in .changes and .dsc against an external database.""" + def check_single(self, session, f): + q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename) + (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None) + + if not ext_size: + return + + if ext_size != f.size: + raise RejectExternalFilesMismatch(f.filename, 'size', f.size, ext_size) + + if ext_md5sum != f.md5sum: + raise RejectExternalFilesMismatch(f.filename, 'md5sum', f.md5sum, ext_md5sum) + + if ext_sha1sum != f.sha1sum: + raise RejectExternalFilesMismatch(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum) + + if ext_sha256sum != f.sha256sum: + raise RejectExternalFilesMismatch(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum) + def check(self, upload): + cnf = Config() + + if not cnf.use_extfiles: + return + + session = upload.session changes = upload.changes + for f in changes.files.itervalues(): - f.check(upload.directory) - source = changes.source + self.check_single(session, f) + source = changes.source if source is not None: for f in source.files.itervalues(): - f.check(upload.directory) + self.check_single(session, f) class BinaryCheck(Check): """Check binary packages for syntax errors.""" def check(self, upload): + debug_deb_name_postfix = "-dbgsym" + # XXX: Handle dynamic debug section name here + for binary in upload.changes.binaries: self.check_binary(upload, binary) - binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ]) - for bn in binary_names: - if bn not in upload.changes.binary_names: - raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn)) + binaries = {binary.control['Package']: binary + for binary in upload.changes.binaries} + + for name, binary in binaries.items(): + if name in upload.changes.binary_names: + # Package is listed in Binary field. Everything is good. + pass + elif daklib.utils.is_in_debug_section(binary.control): + # If we have a binary package in the debug section, we + # can allow it to not be present in the Binary field + # in the .changes file, so long as its name (without + # -dbgsym) is present in the Binary list. + if not name.endswith(debug_deb_name_postfix): + raise Reject('Package {0} is in the debug section, but ' + 'does not end in {1}.'.format(name, debug_deb_name_postfix)) + + # Right, so, it's named properly, let's check that + # the corresponding package is in the Binary list + origin_package_name = name[:-len(debug_deb_name_postfix)] + if origin_package_name not in upload.changes.binary_names: + raise Reject( + "Debug package {debug}'s corresponding binary package " + "{origin} is not present in the Binary field.".format( + debug=name, origin=origin_package_name)) + else: + # Someone was a nasty little hacker and put a package + # into the .changes that isn't in debian/control. Bad, + # bad person. + raise Reject('Package {0} is not mentioned in Binary field in changes'.format(name)) return True @@ -182,10 +328,12 @@ class BinaryCheck(Check): fn = binary.hashed_file.filename control = binary.control - for field in ('Package', 'Architecture', 'Version', 'Description'): + for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'): if field not in control: raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field)) + check_fields_for_valid_utf8(fn, control) + # check fields package = control['Package'] @@ -241,6 +389,49 @@ class BinaryCheck(Check): except: raise Reject('{0}: APT could not parse {1} field'.format(fn, field)) + # "Multi-Arch: no" breaks wanna-build, #768353 + multi_arch = control.get("Multi-Arch") + if multi_arch == 'no': + raise Reject('{0}: Multi-Arch: no support in Debian is broken (#768353)'.format(fn)) + +class BinaryTimestampCheck(Check): + """check timestamps of files in binary packages + + Files in the near future cause ugly warnings and extreme time travel + can cause errors on extraction. + """ + def check(self, upload): + cnf = Config() + future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600) + past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y')) + + class TarTime(object): + def __init__(self): + self.future_files = dict() + self.past_files = dict() + def callback(self, member, data): + if member.mtime > future_cutoff: + self.future_files[member.name] = member.mtime + elif member.mtime < past_cutoff: + self.past_files[member.name] = member.mtime + + def format_reason(filename, direction, files): + reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction) + for fn, ts in files.iteritems(): + reason += " {0} ({1})".format(fn, time.ctime(ts)) + return reason + + for binary in upload.changes.binaries: + filename = binary.hashed_file.filename + path = os.path.join(upload.directory, filename) + deb = apt_inst.DebFile(path) + tar = TarTime() + deb.control.go(tar.callback) + if tar.future_files: + raise Reject(format_reason(filename, 'future', tar.future_files)) + if tar.past_files: + raise Reject(format_reason(filename, 'past', tar.past_files)) + class SourceCheck(Check): """Check source package for syntax errors.""" def check_filename(self, control, filename, regex): @@ -259,7 +450,10 @@ class SourceCheck(Check): version = control['Version'] if is_orig: - version = re_field_version_upstream.match(version).group('upstream') + upstream_match = re_field_version_upstream.match(version) + if not upstream_match: + raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version)) + version = upstream_match.group('upstream') version_match = re_field_version.match(version) version_without_epoch = version_match.group('without_epoch') if match.group('version') != version_without_epoch: @@ -274,6 +468,8 @@ class SourceCheck(Check): control = source.dsc dsc_fn = source._dsc_file.filename + check_fields_for_valid_utf8(dsc_fn, control) + # check fields if not re_field_package.match(control['Source']): raise Reject('{0}: Invalid Source field'.format(dsc_fn)) @@ -298,7 +494,11 @@ class SourceCheck(Check): except Exception as e: raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e)) - # TODO: check all expected files for given source format are included + rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys()) + if len(rejects) > 0: + raise Reject("\n".join(rejects)) + + return True class SingleDistributionCheck(Check): """Check that the .changes targets only a single distribution.""" @@ -308,108 +508,231 @@ class SingleDistributionCheck(Check): class ACLCheck(Check): """Check the uploader is allowed to upload the packages in .changes""" - def _check_dm(self, upload): - # This code is not very nice, but hopefully works until we can replace - # DM-Upload-Allowed, cf. https://lists.debian.org/debian-project/2012/06/msg00029.html - session = upload.session - - if 'source' not in upload.changes.architectures: - raise Reject('DM uploads must include source') - for f in upload.changes.files.itervalues(): - if f.section == 'byhand' or f.section[:4] == "raw-": - raise Reject("Uploading byhand packages is not allowed for DMs.") - - # Reject NEW packages - distributions = upload.changes.distributions - assert len(distributions) == 1 - suite = session.query(Suite).filter_by(suite_name=distributions[0]).one() - overridesuite = suite - if suite.overridesuite is not None: - overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one() - if upload._check_new(overridesuite): - raise Reject('Uploading NEW packages is not allowed for DMs.') - - # Check DM-Upload-Allowed - last_suites = ['unstable', 'experimental'] - if suite.suite_name.endswith('-backports'): - last_suites = [suite.suite_name] - last = session.query(DBSource).filter_by(source=upload.changes.changes['Source']) \ - .join(DBSource.suites).filter(Suite.suite_name.in_(last_suites)) \ - .order_by(DBSource.version.desc()).limit(1).first() - if last is None: - raise Reject('No existing source found in {0}'.format(' or '.join(last_suites))) - if not last.dm_upload_allowed: - raise Reject('DM-Upload-Allowed is not set in {0}={1}'.format(last.source, last.version)) - - # check current Changed-by is in last Maintainer or Uploaders - uploader_names = [ u.name for u in last.uploaders ] - changed_by_field = upload.changes.changes.get('Changed-By', upload.changes.changes['Maintainer']) - if changed_by_field not in uploader_names: - raise Reject('{0} is not an uploader for {1}={2}'.format(changed_by_field, last.source, last.version)) - - # check Changed-by is the DM - changed_by = fix_maintainer(changed_by_field) - uid = upload.fingerprint.uid - if uid is None: - raise Reject('Unknown uid for fingerprint {0}'.format(upload.fingerprint.fingerprint)) - if uid.uid != changed_by[3] and uid.name != changed_by[2]: - raise Reject('DMs are not allowed to sponsor uploads (expected {0} <{1}> as maintainer, but got {2})'.format(uid.name, uid.uid, changed_by_field)) + def _does_hijack(self, session, upload, suite): # Try to catch hijacks. # This doesn't work correctly. Uploads to experimental can still # "hijack" binaries from unstable. Also one can hijack packages # via buildds (but people who try this should not be DMs). for binary_name in upload.changes.binary_names: binaries = session.query(DBBinary).join(DBBinary.source) \ - .join(DBBinary.suites).filter(Suite.suite_name.in_(upload.changes.distributions)) \ + .filter(DBBinary.suites.contains(suite)) \ .filter(DBBinary.package == binary_name) for binary in binaries: if binary.source.source != upload.changes.changes['Source']: - raise Reject('DMs must not hijack binaries (binary={0}, other-source={1})'.format(binary_name, binary.source.source)) + return True, binary.package, binary.source.source + return False, None, None + + def _check_acl(self, session, upload, acl): + source_name = upload.changes.source_name + + if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints: + return None, None + if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring: + return None, None + + if not acl.allow_new: + if upload.new: + return False, "NEW uploads are not allowed" + for f in upload.changes.files.itervalues(): + if f.section == 'byhand' or f.section.startswith("raw-"): + return False, "BYHAND uploads are not allowed" + if not acl.allow_source and upload.changes.source is not None: + return False, "sourceful uploads are not allowed" + binaries = upload.changes.binaries + if len(binaries) != 0: + if not acl.allow_binary: + return False, "binary uploads are not allowed" + if upload.changes.source is None and not acl.allow_binary_only: + return False, "binary-only uploads are not allowed" + if not acl.allow_binary_all: + uploaded_arches = set(upload.changes.architectures) + uploaded_arches.discard('source') + allowed_arches = set(a.arch_string for a in acl.architectures) + forbidden_arches = uploaded_arches - allowed_arches + if len(forbidden_arches) != 0: + return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches)) + if not acl.allow_hijack: + for suite in upload.final_suites: + does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite) + if does_hijack: + return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from) + + acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first() + if acl.allow_per_source: + if acl_per_source is None: + return False, "not allowed to upload source package '{0}'".format(source_name) + if acl.deny_per_source and acl_per_source is not None: + return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name) + + return True, None + + def check(self, upload): + session = upload.session + fingerprint = upload.fingerprint + keyring = fingerprint.keyring + + if keyring is None: + raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint)) + if not keyring.active: + raise Reject('Keyring {0} is not active'.format(keyring.name)) + + acl = fingerprint.acl or keyring.acl + if acl is None: + raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint)) + result, reason = self._check_acl(session, upload, acl) + if not result: + raise RejectACL(acl, reason) + + for acl in session.query(ACL).filter_by(is_global=True): + result, reason = self._check_acl(session, upload, acl) + if result == False: + raise RejectACL(acl, reason) return True + def per_suite_check(self, upload, suite): + acls = suite.acls + if len(acls) != 0: + accept = False + for acl in acls: + result, reason = self._check_acl(upload.session, upload, acl) + if result == False: + raise Reject(reason) + accept = accept or result + if not accept: + raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name)) + return True + +class TransitionCheck(Check): + """check for a transition""" def check(self, upload): - fingerprint = upload.fingerprint - source_acl = fingerprint.source_acl - if source_acl is None: - if 'source' in upload.changes.architectures: - raise Reject('Fingerprint {0} must not upload source'.format(fingerprint.fingerprint)) - elif source_acl.access_level == 'dm': - self._check_dm(upload) - elif source_acl.access_level != 'full': - raise Reject('Unknown source_acl access level {0} for fingerprint {1}'.format(source_acl.access_level, fingerprint.fingerprint)) - - bin_architectures = set(upload.changes.architectures) - bin_architectures.discard('source') - binary_acl = fingerprint.binary_acl - if binary_acl is None: - if len(bin_architectures) > 0: - raise Reject('Fingerprint {0} must not upload binary packages'.format(fingerprint.fingerprint)) - elif binary_acl.access_level == 'map': - query = upload.session.query(BinaryACLMap).filter_by(fingerprint=fingerprint) - allowed_architectures = [ m.architecture.arch_string for m in query ] - - for arch in upload.changes.architectures: - if arch not in allowed_architectures: - raise Reject('Fingerprint {0} must not upload binaries for architecture {1}'.format(fingerprint.fingerprint, arch)) - elif binary_acl.access_level != 'full': - raise Reject('Unknown binary_acl access level {0} for fingerprint {1}'.format(binary_acl.access_level, fingerprint.fingerprint)) + if 'source' not in upload.changes.architectures: + return True + + transitions = self.get_transitions() + if transitions is None: + return True + + session = upload.session + + control = upload.changes.changes + source = re_field_source.match(control['Source']).group('package') + + for trans in transitions: + t = transitions[trans] + transition_source = t["source"] + expected = t["new"] + + # Will be None if nothing is in testing. + current = get_source_in_suite(transition_source, "testing", session) + if current is not None: + compare = apt_pkg.version_compare(current.version, expected) + + if current is None or compare < 0: + # This is still valid, the current version in testing is older than + # the new version we wait for, or there is none in testing yet + + # Check if the source we look at is affected by this. + if source in t['packages']: + # The source is affected, lets reject it. + + rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans) + + if current is not None: + currentlymsg = "at version {0}".format(current.version) + else: + currentlymsg = "not present in testing" + + rejectmsg += "Transition description: {0}\n\n".format(t["reason"]) + + rejectmsg += "\n".join(textwrap.wrap("""Your package +is part of a testing transition designed to get {0} migrated (it is +currently {1}, we need version {2}). This transition is managed by the +Release Team, and {3} is the Release-Team member responsible for it. +Please mail debian-release@lists.debian.org or contact {3} directly if you +need further assistance. You might want to upload to experimental until this +transition is done.""".format(transition_source, currentlymsg, expected,t["rm"]))) + + raise Reject(rejectmsg) return True + def get_transitions(self): + cnf = Config() + path = cnf.get('Dinstall::ReleaseTransitions', '') + if path == '' or not os.path.exists(path): + return None + + contents = file(path, 'r').read() + try: + transitions = yaml.safe_load(contents) + return transitions + except yaml.YAMLError as msg: + utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg)) + + return None + class NoSourceOnlyCheck(Check): + def is_source_only_upload(self, upload): + changes = upload.changes + if changes.source is not None and len(changes.binaries) == 0: + return True + return False + """Check for source-only upload Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is set. Otherwise they are rejected. + + Source-only uploads are only accepted for source packages having a + Package-List field that also lists architectures per package. This + check can be disabled via + Dinstall::AllowSourceOnlyUploadsWithoutPackageList. + + Source-only uploads to NEW are only allowed if + Dinstall::AllowSourceOnlyNew is set. + + Uploads not including architecture-independent packages are only + allowed if Dinstall::AllowNoArchIndepUploads is set. + """ def check(self, upload): - if Config().find_b("Dinstall::AllowSourceOnlyUploads"): + if not self.is_source_only_upload(upload): return True + + allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads') + allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList') + allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew') + allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads') changes = upload.changes - if changes.source is not None and len(changes.binaries) == 0: + + if not allow_source_only_uploads: raise Reject('Source-only uploads are not allowed.') + if not allow_source_only_uploads_without_package_list \ + and changes.source.package_list.fallback: + raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.') + if not allow_source_only_new and upload.new: + raise Reject('Source-only uploads to NEW are not allowed.') + + if not allow_no_arch_indep_uploads \ + and 'all' not in changes.architectures \ + and 'experimental' not in changes.distributions \ + and 'unstable' not in changes.distributions \ + and 'sid' not in changes.distributions \ + and changes.source.package_list.has_arch_indep_packages(): + raise Reject('Uploads not including architecture-independent packages are not allowed.') + + return True + +class ArchAllBinNMUCheck(Check): + """Check for arch:all binNMUs""" + def check(self, upload): + changes = upload.changes + + if 'all' in changes.architectures and changes.changes.get('Binary-Only') == 'yes': + raise Reject('arch:all binNMUs are not allowed.') + return True class LintianCheck(Check): @@ -432,11 +755,11 @@ class LintianCheck(Check): with open(tagfile, 'r') as sourcefile: sourcecontent = sourcefile.read() try: - lintiantags = yaml.load(sourcecontent)['lintian'] + lintiantags = yaml.safe_load(sourcecontent)['lintian'] except yaml.YAMLError as msg: raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg)) - fd, temp_filename = utils.temp_filename() + fd, temp_filename = utils.temp_filename(mode=0o644) temptagfile = os.fdopen(fd, 'w') for tags in lintiantags.itervalues(): for tag in tags: @@ -445,9 +768,18 @@ class LintianCheck(Check): changespath = os.path.join(upload.directory, changes.filename) try: - # FIXME: no shell - cmd = "lintian --show-overrides --tags-from-file {0} {1}".format(temp_filename, changespath) - result, output = commands.getstatusoutput(cmd) + cmd = [] + result = 0 + + user = cnf.get('Dinstall::UnprivUser') or None + if user is not None: + cmd.extend(['sudo', '-H', '-u', user]) + + cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath]) + output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + result = e.returncode + output = e.output finally: os.unlink(temp_filename) @@ -477,13 +809,21 @@ class SourceFormatCheck(Check): if query.first() is None: raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name)) +class SuiteCheck(Check): + def per_suite_check(self, upload, suite): + if not suite.accept_source_uploads and upload.changes.source is not None: + raise Reject('The suite "{0}" does not accept source uploads.'.format(suite.suite_name)) + if not suite.accept_binary_uploads and len(upload.changes.binaries) != 0: + raise Reject('The suite "{0}" does not accept binary uploads.'.format(suite.suite_name)) + return True + class SuiteArchitectureCheck(Check): def per_suite_check(self, upload, suite): session = upload.session for arch in upload.changes.architectures: query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite)) if query.first() is None: - raise Reject('Architecture {0} is not allowed in suite {2}'.format(arch, suite.suite_name)) + raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name)) return True @@ -508,23 +848,31 @@ class VersionCheck(Check): else: return db_binary.version - def _version_checks(self, upload, suite, op): + def _version_checks(self, upload, suite, other_suite, op, op_name): session = upload.session if upload.changes.source is not None: source_name = upload.changes.source.dsc['Source'] source_version = upload.changes.source.dsc['Version'] - v = self._highest_source_version(session, source_name, suite) + v = self._highest_source_version(session, source_name, other_suite) if v is not None and not op(version_compare(source_version, v)): - raise Reject('Version check failed (source={0}, version={1}, other-version={2}, suite={3})'.format(source_name, source_version, v, suite.suite_name)) + raise Reject("Version check failed:\n" + "Your upload included the source package {0}, version {1},\n" + "however {3} already has version {2}.\n" + "Uploads to {5} must have a {4} version than present in {3}." + .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name)) for binary in upload.changes.binaries: binary_name = binary.control['Package'] binary_version = binary.control['Version'] architecture = binary.control['Architecture'] - v = self._highest_binary_version(session, binary_name, suite, architecture) + v = self._highest_binary_version(session, binary_name, other_suite, architecture) if v is not None and not op(version_compare(binary_version, v)): - raise Reject('Version check failed (binary={0}, version={1}, other-version={2}, suite={3})'.format(binary_name, binary_version, v, suite.suite_name)) + raise Reject("Version check failed:\n" + "Your upload included the binary package {0}, version {1}, for {2},\n" + "however {4} already has version {3}.\n" + "Uploads to {6} must have a {5} version than present in {4}." + .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name)) def per_suite_check(self, upload, suite): session = upload.session @@ -536,13 +884,13 @@ class VersionCheck(Check): must_be_newer_than.append(suite) for s in must_be_newer_than: - self._version_checks(upload, s, lambda result: result > 0) + self._version_checks(upload, suite, s, lambda result: result > 0, 'higher') vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan') must_be_older_than = [ vc.reference for vc in vc_older ] for s in must_be_older_than: - self._version_checks(upload, s, lambda result: result < 0) + self._version_checks(upload, suite, s, lambda result: result < 0, 'lower') return True