def __str__(self):
return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
+class RejectACL(Reject):
+ """exception raise by failing ACL checks"""
+ def __init__(self, acl, reason):
+ self.acl = acl
+ self.reason = reason
+
+ def __str__(self):
+ return "ACL {0}: {1}".format(self.acl.name, self.reason)
+
class Check(object):
"""base class for checks
"""
return False
-class SignatureCheck(Check):
+class SignatureAndHashesCheck(Check):
"""Check signature of changes and dsc file (if included in upload)
Make sure the signature is valid and done by a known user.
changes = upload.changes
if not changes.valid_signature:
raise Reject("Signature for .changes not valid.")
- if changes.source is not None:
- if not changes.source.valid_signature:
+ self._check_hashes(upload, changes.filename, changes.files.itervalues())
+
+ source = None
+ try:
+ source = changes.source
+ except Exception as e:
+ raise Reject("Invalid dsc file: {0}".format(e))
+ if source is not None:
+ if not source.valid_signature:
raise Reject("Signature for .dsc not valid.")
- if changes.source.primary_fingerprint != changes.primary_fingerprint:
+ if source.primary_fingerprint != changes.primary_fingerprint:
raise Reject(".changes and .dsc not signed by the same key.")
+ self._check_hashes(upload, source.filename, source.files.itervalues())
+
if upload.fingerprint is None or upload.fingerprint.uid is None:
raise Reject(".changes signed by unknown key.")
+ """Make sure hashes match existing files
+
+ @type upload: L{daklib.archive.ArchiveUpload}
+ @param upload: upload we are processing
+
+ @type filename: str
+ @param filename: name of the file the expected hash values are taken from
+
+ @type files: sequence of L{daklib.upload.HashedFile}
+ @param files: files to check the hashes for
+ """
+ def _check_hashes(self, upload, filename, files):
+ try:
+ for f in files:
+ f.check(upload.directory)
+ except IOError as e:
+ if e.errno == errno.ENOENT:
+ raise Reject('{0} refers to non-existing file: {1}\n'
+ 'Perhaps you need to include it in your upload?'
+ .format(filename, os.path.basename(e.filename)))
+ raise
+ except InvalidHashException as e:
+ raise Reject('{0}: {1}'.format(filename, unicode(e)))
+
class ChangesCheck(Check):
"""Check changes file for syntax errors."""
def check(self, upload):
return True
-class HashesCheck(Check):
- """Check hashes in .changes and .dsc are valid."""
- def check(self, upload):
- what = None
- try:
- changes = upload.changes
- what = changes.filename
- for f in changes.files.itervalues():
- f.check(upload.directory)
- source = changes.source
- if source is not None:
- what = source.filename
- for f in source.files.itervalues():
- f.check(upload.directory)
- except IOError as e:
- if e.errno == errno.ENOENT:
- raise Reject('{0} refers to non-existing file: {1}\n'
- 'Perhaps you need to include it in your upload?'
- .format(what, os.path.basename(e.filename)))
- raise
- except InvalidHashException as e:
- raise Reject('{0}: {1}'.format(what, unicode(e)))
-
class ExternalHashesCheck(Check):
"""Checks hashes in .changes and .dsc against an external database."""
def check_single(self, session, f):
def check(self, upload):
cnf = Config()
future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
- past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1984'), '%Y'))
+ past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
class TarTime(object):
def __init__(self):
self.past_files = dict()
def callback(self, member, data):
if member.mtime > future_cutoff:
- future_files[member.name] = member.mtime
+ self.future_files[member.name] = member.mtime
elif member.mtime < past_cutoff:
- past_files[member.name] = member.mtime
+ self.past_files[member.name] = member.mtime
def format_reason(filename, direction, files):
reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
version = control['Version']
if is_orig:
- version = re_field_version_upstream.match(version).group('upstream')
+ upstream_match = re_field_version_upstream.match(version)
+ if not upstream_match:
+ raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version))
+ version = upstream_match.group('upstream')
version_match = re_field_version.match(version)
version_without_epoch = version_match.group('without_epoch')
if match.group('version') != version_without_epoch:
.filter(DBBinary.package == binary_name)
for binary in binaries:
if binary.source.source != upload.changes.changes['Source']:
- return True, binary, binary.source.source
+ return True, binary.package, binary.source.source
return False, None, None
def _check_acl(self, session, upload, acl):
acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
if acl.allow_per_source:
- # XXX: Drop DMUA part here and switch to new implementation.
- # XXX: Send warning mail once users can set the new DMUA flag
- dmua_status, dmua_reason = self._check_dmua(upload)
if acl_per_source is None:
- if not dmua_status:
- return False, dmua_reason
- else:
- upload.warn('DM flag not set, but accepted as DMUA was set.')
- #if acl_per_source is None:
- # return False, "not allowed to upload source package '{0}'".format(source_name)
+ return False, "not allowed to upload source package '{0}'".format(source_name)
if acl.deny_per_source and acl_per_source is not None:
return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
return True, None
- def _check_dmua(self, upload):
- # This code is not very nice, but hopefully works until we can replace
- # DM-Upload-Allowed, cf. https://lists.debian.org/debian-project/2012/06/msg00029.html
- session = upload.session
-
- # Check DM-Upload-Allowed
- suites = upload.final_suites
- assert len(suites) == 1
- suite = list(suites)[0]
-
- last_suites = ['unstable', 'experimental']
- if suite.suite_name.endswith('-backports'):
- last_suites = [suite.suite_name]
- last = session.query(DBSource).filter_by(source=upload.changes.changes['Source']) \
- .join(DBSource.suites).filter(Suite.suite_name.in_(last_suites)) \
- .order_by(DBSource.version.desc()).limit(1).first()
- if last is None:
- return False, 'No existing source found in {0}'.format(' or '.join(last_suites))
- if not last.dm_upload_allowed:
- return False, 'DM-Upload-Allowed is not set in {0}={1}'.format(last.source, last.version)
-
- # check current Changed-by is in last Maintainer or Uploaders
- uploader_names = [ u.name for u in last.uploaders ]
- changed_by_field = upload.changes.changes.get('Changed-By', upload.changes.changes['Maintainer'])
- if changed_by_field not in uploader_names:
- return False, '{0} is not an uploader for {1}={2}'.format(changed_by_field, last.source, last.version)
-
- # check Changed-by is the DM
- changed_by = fix_maintainer(changed_by_field)
- uid = upload.fingerprint.uid
- if uid is None:
- return False, 'Unknown uid for fingerprint {0}'.format(upload.fingerprint.fingerprint)
- if uid.uid != changed_by[3] and uid.name != changed_by[2]:
- return False, 'DMs are not allowed to sponsor uploads (expected {0} <{1}> as maintainer, but got {2})'.format(uid.name, uid.uid, changed_by_field)
-
- return True, None
-
def check(self, upload):
session = upload.session
fingerprint = upload.fingerprint
raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
result, reason = self._check_acl(session, upload, acl)
if not result:
- raise Reject(reason)
+ raise RejectACL(acl, reason)
for acl in session.query(ACL).filter_by(is_global=True):
result, reason = self._check_acl(session, upload, acl)
if result == False:
- raise Reject(reason)
+ raise RejectACL(acl, reason)
return True
changespath = os.path.join(upload.directory, changes.filename)
try:
- if cnf.unprivgroup:
- cmd = "sudo -H -u {0} -- /usr/bin/lintian --show-overrides --tags-from-file {1} {2}".format(cnf.unprivgroup, temp_filename, changespath)
- else:
- cmd = "/usr/bin/lintian --show-overrides --tags-from-file {0} {1}".format(temp_filename, changespath)
- result, output = commands.getstatusoutput(cmd)
+ cmd = []
+
+ user = cnf.get('Dinstall::UnprivUser') or None
+ if user is not None:
+ cmd.extend(['sudo', '-H', '-u', user])
+
+ cmd.extend(['LINTIAN_COLL_UNPACKED_SKIP_SIG=1', '/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
+ result, output = commands.getstatusoutput(" ".join(cmd))
finally:
os.unlink(temp_filename)
for arch in upload.changes.architectures:
query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
if query.first() is None:
- raise Reject('Architecture {0} is not allowed in suite {2}'.format(arch, suite.suite_name))
+ raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
return True
else:
return db_binary.version
- def _version_checks(self, upload, suite, op):
+ def _version_checks(self, upload, suite, other_suite, op, op_name):
session = upload.session
if upload.changes.source is not None:
source_name = upload.changes.source.dsc['Source']
source_version = upload.changes.source.dsc['Version']
- v = self._highest_source_version(session, source_name, suite)
+ v = self._highest_source_version(session, source_name, other_suite)
if v is not None and not op(version_compare(source_version, v)):
- raise Reject('Version check failed (source={0}, version={1}, other-version={2}, suite={3})'.format(source_name, source_version, v, suite.suite_name))
+ raise Reject("Version check failed:\n"
+ "Your upload included the source package {0}, version {1},\n"
+ "however {3} already has version {2}.\n"
+ "Uploads to {5} must have a {4} version than present in {3}."
+ .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
for binary in upload.changes.binaries:
binary_name = binary.control['Package']
binary_version = binary.control['Version']
architecture = binary.control['Architecture']
- v = self._highest_binary_version(session, binary_name, suite, architecture)
+ v = self._highest_binary_version(session, binary_name, other_suite, architecture)
if v is not None and not op(version_compare(binary_version, v)):
- raise Reject('Version check failed (binary={0}, version={1}, other-version={2}, suite={3})'.format(binary_name, binary_version, v, suite.suite_name))
+ raise Reject("Version check failed:\n"
+ "Your upload included the binary package {0}, version {1}, for {2},\n"
+ "however {4} already has version {3}.\n"
+ "Uploads to {6} must have a {5} version than present in {4}."
+ .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
def per_suite_check(self, upload, suite):
session = upload.session
must_be_newer_than.append(suite)
for s in must_be_newer_than:
- self._version_checks(upload, s, lambda result: result > 0)
+ self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
must_be_older_than = [ vc.reference for vc in vc_older ]
for s in must_be_older_than:
- self._version_checks(upload, s, lambda result: result < 0)
+ self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')
return True