1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the `Check` class for the interface.
26 from daklib.config import Config
28 import daklib.dbconn as dbconn
29 from .regexes import *
30 from .textutils import fix_maintainer, ParseMaintError
31 import daklib.lintian as lintian
32 import daklib.utils as utils
35 from apt_pkg import version_compare
39 # TODO: replace by subprocess
42 class Reject(Exception):
43 """exception raised by failing checks"""
47 """base class for checks
49 checks are called by daklib.archive.ArchiveUpload. Failing tests should
50 raise a `daklib.checks.Reject` exception including a human-readable
51 description why the upload should be rejected.
53 def check(self, upload):
57 upload (daklib.archive.ArchiveUpload): upload to check
63 def per_suite_check(self, upload, suite):
64 """do per-suite checks
67 upload (daklib.archive.ArchiveUpload): upload to check
68 suite (daklib.dbconn.Suite): suite to check
76 """allow to force ignore failing test
78 True if it is acceptable to force ignoring a failing test,
83 class SignatureCheck(Check):
84 """Check signature of changes and dsc file (if included in upload)
86 Make sure the signature is valid and done by a known user.
88 def check(self, upload):
89 changes = upload.changes
90 if not changes.valid_signature:
91 raise Reject("Signature for .changes not valid.")
92 if changes.source is not None:
93 if not changes.source.valid_signature:
94 raise Reject("Signature for .dsc not valid.")
95 if changes.source.primary_fingerprint != changes.primary_fingerprint:
96 raise Reject(".changes and .dsc not signed by the same key.")
97 if upload.fingerprint is None or upload.fingerprint.uid is None:
98 raise Reject(".changes signed by unknown key.")
100 class ChangesCheck(Check):
101 """Check changes file for syntax errors."""
102 def check(self, upload):
103 changes = upload.changes
104 control = changes.changes
105 fn = changes.filename
107 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
108 if field not in control:
109 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
111 source_match = re_field_source.match(control['Source'])
113 raise Reject('{0}: Invalid Source field'.format(fn))
114 version_match = re_field_version.match(control['Version'])
115 if not version_match:
116 raise Reject('{0}: Invalid Version field'.format(fn))
117 version_without_epoch = version_match.group('without_epoch')
119 match = re_file_changes.match(fn)
121 raise Reject('{0}: Does not match re_file_changes'.format(fn))
122 if match.group('package') != source_match.group('package'):
123 raise Reject('{0}: Filename does not match Source field'.format(fn))
124 if match.group('version') != version_without_epoch:
125 raise Reject('{0}: Filename does not match Version field'.format(fn))
127 for bn in changes.binary_names:
128 if not re_field_package.match(bn):
129 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
131 if 'source' in changes.architectures and changes.source is None:
132 raise Reject("Changes has architecture source, but no source found.")
133 if changes.source is not None and 'source' not in changes.architectures:
134 raise Reject("Upload includes source, but changes does not say so.")
137 fix_maintainer(changes.changes['Maintainer'])
138 except ParseMaintError as e:
139 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
142 changed_by = changes.changes.get('Changed-By')
143 if changed_by is not None:
144 fix_maintainer(changed_by)
145 except ParseMaintError as e:
146 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
148 if len(changes.files) == 0:
149 raise Reject("Changes includes no files.")
151 for bugnum in changes.closed_bugs:
152 if not re_isanum.match(bugnum):
153 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
157 class HashesCheck(Check):
158 """Check hashes in .changes and .dsc are valid."""
159 def check(self, upload):
160 changes = upload.changes
161 for f in changes.files.itervalues():
162 f.check(upload.directory)
163 source = changes.source
164 if source is not None:
165 for f in source.files.itervalues():
166 f.check(upload.directory)
168 class BinaryCheck(Check):
169 """Check binary packages for syntax errors."""
170 def check(self, upload):
171 for binary in upload.changes.binaries:
172 self.check_binary(upload, binary)
174 binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
175 for bn in binary_names:
176 if bn not in upload.changes.binary_names:
177 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
181 def check_binary(self, upload, binary):
182 fn = binary.hashed_file.filename
183 control = binary.control
185 for field in ('Package', 'Architecture', 'Version', 'Description'):
186 if field not in control:
187 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
191 package = control['Package']
192 if not re_field_package.match(package):
193 raise Reject('{0}: Invalid Package field'.format(fn))
195 version = control['Version']
196 version_match = re_field_version.match(version)
197 if not version_match:
198 raise Reject('{0}: Invalid Version field'.format(fn))
199 version_without_epoch = version_match.group('without_epoch')
201 architecture = control['Architecture']
202 if architecture not in upload.changes.architectures:
203 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
204 if architecture == 'source':
205 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
207 source = control.get('Source')
208 if source is not None and not re_field_source.match(source):
209 raise Reject('{0}: Invalid Source field'.format(fn))
213 match = re_file_binary.match(fn)
214 if package != match.group('package'):
215 raise Reject('{0}: filename does not match Package field'.format(fn))
216 if version_without_epoch != match.group('version'):
217 raise Reject('{0}: filename does not match Version field'.format(fn))
218 if architecture != match.group('architecture'):
219 raise Reject('{0}: filename does not match Architecture field'.format(fn))
221 # check dependency field syntax
223 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
224 'Provides', 'Recommends', 'Replaces', 'Suggests'):
225 value = control.get(field)
226 if value is not None:
227 if value.strip() == '':
228 raise Reject('{0}: empty {1} field'.format(fn, field))
230 apt_pkg.parse_depends(value)
232 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
234 for field in ('Built-Using',):
235 value = control.get(field)
236 if value is not None:
237 if value.strip() == '':
238 raise Reject('{0}: empty {1} field'.format(fn, field))
240 apt_pkg.parse_src_depends(value)
242 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
244 class SourceCheck(Check):
245 """Check source package for syntax errors."""
246 def check_filename(self, control, filename, regex):
247 # In case we have an .orig.tar.*, we have to strip the Debian revison
248 # from the version number. So handle this special case first.
250 match = re_file_orig.match(filename)
253 match = regex.match(filename)
256 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
257 if match.group('package') != control['Source']:
258 raise Reject('{0}: filename does not match Source field'.format(filename))
260 version = control['Version']
262 version = re_field_version_upstream.match(version).group('upstream')
263 version_match = re_field_version.match(version)
264 version_without_epoch = version_match.group('without_epoch')
265 if match.group('version') != version_without_epoch:
266 raise Reject('{0}: filename does not match Version field'.format(filename))
268 def check(self, upload):
269 if upload.changes.source is None:
272 changes = upload.changes.changes
273 source = upload.changes.source
275 dsc_fn = source._dsc_file.filename
278 if not re_field_package.match(control['Source']):
279 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
280 if control['Source'] != changes['Source']:
281 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
282 if control['Version'] != changes['Version']:
283 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
286 self.check_filename(control, dsc_fn, re_file_dsc)
287 for f in source.files.itervalues():
288 self.check_filename(control, f.filename, re_file_source)
290 # check dependency field syntax
291 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
292 value = control.get(field)
293 if value is not None:
294 if value.strip() == '':
295 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
297 apt_pkg.parse_src_depends(value)
298 except Exception as e:
299 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
301 # TODO: check all expected files for given source format are included
303 class SingleDistributionCheck(Check):
304 """Check that the .changes targets only a single distribution."""
305 def check(self, upload):
306 if len(upload.changes.distributions) != 1:
307 raise Reject("Only uploads to a single distribution are allowed.")
309 class ACLCheck(Check):
310 """Check the uploader is allowed to upload the packages in .changes"""
311 def _check_dm(self, upload):
312 # This code is not very nice, but hopefully works until we can replace
313 # DM-Upload-Allowed, cf. https://lists.debian.org/debian-project/2012/06/msg00029.html
314 session = upload.session
316 if 'source' not in upload.changes.architectures:
317 raise Reject('DM uploads must include source')
318 for f in upload.changes.files.itervalues():
319 if f.section == 'byhand' or f.section[:4] == "raw-":
320 raise Reject("Uploading byhand packages is not allowed for DMs.")
322 # Reject NEW packages
323 distributions = upload.changes.distributions
324 assert len(distributions) == 1
325 suite = session.query(Suite).filter_by(suite_name=distributions[0]).one()
326 overridesuite = suite
327 if suite.overridesuite is not None:
328 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
329 if upload._check_new(overridesuite):
330 raise Reject('Uploading NEW packages is not allowed for DMs.')
332 # Check DM-Upload-Allowed
333 last_suites = ['unstable', 'experimental']
334 if suite.suite_name.endswith('-backports'):
335 last_suites = [suite.suite_name]
336 last = session.query(DBSource).filter_by(source=upload.changes.changes['Source']) \
337 .join(DBSource.suites).filter(Suite.suite_name.in_(last_suites)) \
338 .order_by(DBSource.version.desc()).limit(1).first()
340 raise Reject('No existing source found in {0}'.format(' or '.join(last_suites)))
341 if not last.dm_upload_allowed:
342 raise Reject('DM-Upload-Allowed is not set in {0}={1}'.format(last.source, last.version))
344 # check current Changed-by is in last Maintainer or Uploaders
345 uploader_names = [ u.name for u in last.uploaders ]
346 changed_by_field = upload.changes.changes.get('Changed-By', upload.changes.changes['Maintainer'])
347 if changed_by_field not in uploader_names:
348 raise Reject('{0} is not an uploader for {1}={2}'.format(changed_by_field, last.source, last.version))
350 # check Changed-by is the DM
351 changed_by = fix_maintainer(changed_by_field)
352 uid = upload.fingerprint.uid
354 raise Reject('Unknown uid for fingerprint {0}'.format(upload.fingerprint.fingerprint))
355 if uid.uid != changed_by[3] and uid.name != changed_by[2]:
356 raise Reject('DMs are not allowed to sponsor uploads (expected {0} <{1}> as maintainer, but got {2})'.format(uid.name, uid.uid, changed_by_field))
358 # Try to catch hijacks.
359 # This doesn't work correctly. Uploads to experimental can still
360 # "hijack" binaries from unstable. Also one can hijack packages
361 # via buildds (but people who try this should not be DMs).
362 for binary_name in upload.changes.binary_names:
363 binaries = session.query(DBBinary).join(DBBinary.source) \
364 .join(DBBinary.suites).filter(Suite.suite_name.in_(upload.changes.distributions)) \
365 .filter(DBBinary.package == binary_name)
366 for binary in binaries:
367 if binary.source.source != upload.changes.changes['Source']:
368 raise Reject('DMs must not hijack binaries (binary={0}, other-source={1})'.format(binary_name, binary.source.source))
372 def check(self, upload):
373 fingerprint = upload.fingerprint
374 source_acl = fingerprint.source_acl
375 if source_acl is None:
376 if 'source' in upload.changes.architectures:
377 raise Reject('Fingerprint {0} must not upload source'.format(fingerprint.fingerprint))
378 elif source_acl.access_level == 'dm':
379 self._check_dm(upload)
380 elif source_acl.access_level != 'full':
381 raise Reject('Unknown source_acl access level {0} for fingerprint {1}'.format(source_acl.access_level, fingerprint.fingerprint))
383 bin_architectures = set(upload.changes.architectures)
384 bin_architectures.discard('source')
385 binary_acl = fingerprint.binary_acl
386 if binary_acl is None:
387 if len(bin_architectures) > 0:
388 raise Reject('Fingerprint {0} must not upload binary packages'.format(fingerprint.fingerprint))
389 elif binary_acl.access_level == 'map':
390 query = upload.session.query(BinaryACLMap).filter_by(fingerprint=fingerprint)
391 allowed_architectures = [ m.architecture.arch_string for m in query ]
393 for arch in upload.changes.architectures:
394 if arch not in allowed_architectures:
395 raise Reject('Fingerprint {0} must not upload binaries for architecture {1}'.format(fingerprint.fingerprint, arch))
396 elif binary_acl.access_level != 'full':
397 raise Reject('Unknown binary_acl access level {0} for fingerprint {1}'.format(binary_acl.access_level, fingerprint.fingerprint))
401 class NoSourceOnlyCheck(Check):
402 """Check for source-only upload
404 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
405 set. Otherwise they are rejected.
407 def check(self, upload):
408 if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
410 changes = upload.changes
411 if changes.source is not None and len(changes.binaries) == 0:
412 raise Reject('Source-only uploads are not allowed.')
415 class LintianCheck(Check):
416 """Check package using lintian"""
417 def check(self, upload):
418 changes = upload.changes
420 # Only check sourceful uploads.
421 if changes.source is None:
423 # Only check uploads to unstable or experimental.
424 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
428 if 'Dinstall::LintianTags' not in cnf:
430 tagfile = cnf['Dinstall::LintianTags']
432 with open(tagfile, 'r') as sourcefile:
433 sourcecontent = sourcefile.read()
435 lintiantags = yaml.load(sourcecontent)['lintian']
436 except yaml.YAMLError as msg:
437 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
439 fd, temp_filename = utils.temp_filename()
440 temptagfile = os.fdopen(fd, 'w')
441 for tags in lintiantags.itervalues():
443 print >>temptagfile, tag
446 changespath = os.path.join(upload.directory, changes.filename)
449 cmd = "lintian --show-overrides --tags-from-file {0} {1}".format(temp_filename, changespath)
450 result, output = commands.getstatusoutput(cmd)
452 os.unlink(temp_filename)
455 utils.warn("lintian failed for %s [return code: %s]." % \
456 (changespath, result))
457 utils.warn(utils.prefix_multi_line_string(output, \
458 " [possible output:] "))
460 parsed_tags = lintian.parse_lintian_output(output)
461 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
462 if len(rejects) != 0:
463 raise Reject('\n'.join(rejects))
467 class SourceFormatCheck(Check):
468 """Check source format is allowed in the target suite"""
469 def per_suite_check(self, upload, suite):
470 source = upload.changes.source
471 session = upload.session
475 source_format = source.dsc['Format']
476 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
477 if query.first() is None:
478 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
480 class SuiteArchitectureCheck(Check):
481 def per_suite_check(self, upload, suite):
482 session = upload.session
483 for arch in upload.changes.architectures:
484 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
485 if query.first() is None:
486 raise Reject('Architecture {0} is not allowed in suite {2}'.format(arch, suite.suite_name))
490 class VersionCheck(Check):
491 """Check version constraints"""
492 def _highest_source_version(self, session, source_name, suite):
493 db_source = session.query(DBSource).filter_by(source=source_name) \
494 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
495 if db_source is None:
498 return db_source.version
500 def _highest_binary_version(self, session, binary_name, suite, architecture):
501 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
502 .filter(DBBinary.suites.contains(suite)) \
503 .filter(Architecture.arch_string.in_(['all', architecture])) \
504 .order_by(DBBinary.version.desc()).first()
505 if db_binary is None:
508 return db_binary.version
510 def _version_checks(self, upload, suite, op):
511 session = upload.session
513 if upload.changes.source is not None:
514 source_name = upload.changes.source.dsc['Source']
515 source_version = upload.changes.source.dsc['Version']
516 v = self._highest_source_version(session, source_name, suite)
517 if v is not None and not op(version_compare(source_version, v)):
518 raise Reject('Version check failed (source={0}, version={1}, other-version={2}, suite={3})'.format(source_name, source_version, v, suite.suite_name))
520 for binary in upload.changes.binaries:
521 binary_name = binary.control['Package']
522 binary_version = binary.control['Version']
523 architecture = binary.control['Architecture']
524 v = self._highest_binary_version(session, binary_name, suite, architecture)
525 if v is not None and not op(version_compare(binary_version, v)):
526 raise Reject('Version check failed (binary={0}, version={1}, other-version={2}, suite={3})'.format(binary_name, binary_version, v, suite.suite_name))
528 def per_suite_check(self, upload, suite):
529 session = upload.session
531 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
532 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
533 must_be_newer_than = [ vc.reference for vc in vc_newer ]
534 # Must be newer than old versions in `suite`
535 must_be_newer_than.append(suite)
537 for s in must_be_newer_than:
538 self._version_checks(upload, s, lambda result: result > 0)
540 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
541 must_be_older_than = [ vc.reference for vc in vc_older ]
543 for s in must_be_older_than:
544 self._version_checks(upload, s, lambda result: result < 0)