1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the L{Check} class for the interface.
26 from daklib.config import Config
27 from daklib.dbconn import *
28 import daklib.dbconn as dbconn
29 from daklib.regexes import *
30 from daklib.textutils import fix_maintainer, ParseMaintError
31 import daklib.lintian as lintian
32 import daklib.utils as utils
36 from apt_pkg import version_compare
41 # TODO: replace by subprocess
44 class Reject(Exception):
45 """exception raised by failing checks"""
49 """base class for checks
51 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
52 raise a L{daklib.checks.Reject} exception including a human-readable
53 description why the upload should be rejected.
55 def check(self, upload):
58 @type upload: L{daklib.archive.ArchiveUpload}
59 @param upload: upload to check
61 @raise daklib.checks.Reject: upload should be rejected
64 def per_suite_check(self, upload, suite):
65 """do per-suite checks
67 @type upload: L{daklib.archive.ArchiveUpload}
68 @param upload: upload to check
70 @type suite: L{daklib.dbconn.Suite}
71 @param suite: suite to check
73 @raise daklib.checks.Reject: upload should be rejected
78 """allow to force ignore failing test
80 C{True} if it is acceptable to force ignoring a failing test,
85 class SignatureCheck(Check):
86 """Check signature of changes and dsc file (if included in upload)
88 Make sure the signature is valid and done by a known user.
90 def check(self, upload):
91 changes = upload.changes
92 if not changes.valid_signature:
93 raise Reject("Signature for .changes not valid.")
94 if changes.source is not None:
95 if not changes.source.valid_signature:
96 raise Reject("Signature for .dsc not valid.")
97 if changes.source.primary_fingerprint != changes.primary_fingerprint:
98 raise Reject(".changes and .dsc not signed by the same key.")
99 if upload.fingerprint is None or upload.fingerprint.uid is None:
100 raise Reject(".changes signed by unknown key.")
102 class ChangesCheck(Check):
103 """Check changes file for syntax errors."""
104 def check(self, upload):
105 changes = upload.changes
106 control = changes.changes
107 fn = changes.filename
109 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
110 if field not in control:
111 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
113 source_match = re_field_source.match(control['Source'])
115 raise Reject('{0}: Invalid Source field'.format(fn))
116 version_match = re_field_version.match(control['Version'])
117 if not version_match:
118 raise Reject('{0}: Invalid Version field'.format(fn))
119 version_without_epoch = version_match.group('without_epoch')
121 match = re_file_changes.match(fn)
123 raise Reject('{0}: Does not match re_file_changes'.format(fn))
124 if match.group('package') != source_match.group('package'):
125 raise Reject('{0}: Filename does not match Source field'.format(fn))
126 if match.group('version') != version_without_epoch:
127 raise Reject('{0}: Filename does not match Version field'.format(fn))
129 for bn in changes.binary_names:
130 if not re_field_package.match(bn):
131 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
133 if 'source' in changes.architectures and changes.source is None:
134 raise Reject("Changes has architecture source, but no source found.")
135 if changes.source is not None and 'source' not in changes.architectures:
136 raise Reject("Upload includes source, but changes does not say so.")
139 fix_maintainer(changes.changes['Maintainer'])
140 except ParseMaintError as e:
141 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
144 changed_by = changes.changes.get('Changed-By')
145 if changed_by is not None:
146 fix_maintainer(changed_by)
147 except ParseMaintError as e:
148 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
150 if len(changes.files) == 0:
151 raise Reject("Changes includes no files.")
153 for bugnum in changes.closed_bugs:
154 if not re_isanum.match(bugnum):
155 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
159 class HashesCheck(Check):
160 """Check hashes in .changes and .dsc are valid."""
161 def check(self, upload):
162 changes = upload.changes
163 for f in changes.files.itervalues():
164 f.check(upload.directory)
165 source = changes.source
166 if source is not None:
167 for f in source.files.itervalues():
168 f.check(upload.directory)
170 class BinaryCheck(Check):
171 """Check binary packages for syntax errors."""
172 def check(self, upload):
173 for binary in upload.changes.binaries:
174 self.check_binary(upload, binary)
176 binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
177 for bn in binary_names:
178 if bn not in upload.changes.binary_names:
179 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
183 def check_binary(self, upload, binary):
184 fn = binary.hashed_file.filename
185 control = binary.control
187 for field in ('Package', 'Architecture', 'Version', 'Description'):
188 if field not in control:
189 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
193 package = control['Package']
194 if not re_field_package.match(package):
195 raise Reject('{0}: Invalid Package field'.format(fn))
197 version = control['Version']
198 version_match = re_field_version.match(version)
199 if not version_match:
200 raise Reject('{0}: Invalid Version field'.format(fn))
201 version_without_epoch = version_match.group('without_epoch')
203 architecture = control['Architecture']
204 if architecture not in upload.changes.architectures:
205 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
206 if architecture == 'source':
207 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
209 source = control.get('Source')
210 if source is not None and not re_field_source.match(source):
211 raise Reject('{0}: Invalid Source field'.format(fn))
215 match = re_file_binary.match(fn)
216 if package != match.group('package'):
217 raise Reject('{0}: filename does not match Package field'.format(fn))
218 if version_without_epoch != match.group('version'):
219 raise Reject('{0}: filename does not match Version field'.format(fn))
220 if architecture != match.group('architecture'):
221 raise Reject('{0}: filename does not match Architecture field'.format(fn))
223 # check dependency field syntax
225 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
226 'Provides', 'Recommends', 'Replaces', 'Suggests'):
227 value = control.get(field)
228 if value is not None:
229 if value.strip() == '':
230 raise Reject('{0}: empty {1} field'.format(fn, field))
232 apt_pkg.parse_depends(value)
234 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
236 for field in ('Built-Using',):
237 value = control.get(field)
238 if value is not None:
239 if value.strip() == '':
240 raise Reject('{0}: empty {1} field'.format(fn, field))
242 apt_pkg.parse_src_depends(value)
244 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
246 class BinaryTimestampCheck(Check):
247 """check timestamps of files in binary packages
249 Files in the near future cause ugly warnings and extreme time travel
250 can cause errors on extraction.
252 def check(self, upload):
254 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
255 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1984'), '%Y'))
257 class TarTime(object):
259 self.future_files = dict()
260 self.past_files = dict()
261 def callback(self, member, data):
262 if member.mtime > future_cutoff:
263 future_files[member.name] = member.mtime
264 elif member.mtime < past_cutoff:
265 past_files[member.name] = member.mtime
267 def format_reason(filename, direction, files):
268 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
269 for fn, ts in files.iteritems():
270 reason += " {0} ({1})".format(fn, time.ctime(ts))
273 for binary in upload.changes.binaries:
274 filename = binary.hashed_file.filename
275 path = os.path.join(upload.directory, filename)
276 deb = apt_inst.DebFile(path)
278 deb.control.go(tar.callback)
280 raise Reject(format_reason(filename, 'future', tar.future_files))
282 raise Reject(format_reason(filename, 'past', tar.past_files))
284 class SourceCheck(Check):
285 """Check source package for syntax errors."""
286 def check_filename(self, control, filename, regex):
287 # In case we have an .orig.tar.*, we have to strip the Debian revison
288 # from the version number. So handle this special case first.
290 match = re_file_orig.match(filename)
293 match = regex.match(filename)
296 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
297 if match.group('package') != control['Source']:
298 raise Reject('{0}: filename does not match Source field'.format(filename))
300 version = control['Version']
302 version = re_field_version_upstream.match(version).group('upstream')
303 version_match = re_field_version.match(version)
304 version_without_epoch = version_match.group('without_epoch')
305 if match.group('version') != version_without_epoch:
306 raise Reject('{0}: filename does not match Version field'.format(filename))
308 def check(self, upload):
309 if upload.changes.source is None:
312 changes = upload.changes.changes
313 source = upload.changes.source
315 dsc_fn = source._dsc_file.filename
318 if not re_field_package.match(control['Source']):
319 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
320 if control['Source'] != changes['Source']:
321 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
322 if control['Version'] != changes['Version']:
323 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
326 self.check_filename(control, dsc_fn, re_file_dsc)
327 for f in source.files.itervalues():
328 self.check_filename(control, f.filename, re_file_source)
330 # check dependency field syntax
331 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
332 value = control.get(field)
333 if value is not None:
334 if value.strip() == '':
335 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
337 apt_pkg.parse_src_depends(value)
338 except Exception as e:
339 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
341 # TODO: check all expected files for given source format are included
343 class SingleDistributionCheck(Check):
344 """Check that the .changes targets only a single distribution."""
345 def check(self, upload):
346 if len(upload.changes.distributions) != 1:
347 raise Reject("Only uploads to a single distribution are allowed.")
349 class ACLCheck(Check):
350 """Check the uploader is allowed to upload the packages in .changes"""
351 def _check_dm(self, upload):
352 # This code is not very nice, but hopefully works until we can replace
353 # DM-Upload-Allowed, cf. https://lists.debian.org/debian-project/2012/06/msg00029.html
354 session = upload.session
356 if 'source' not in upload.changes.architectures:
357 raise Reject('DM uploads must include source')
358 for f in upload.changes.files.itervalues():
359 if f.section == 'byhand' or f.section[:4] == "raw-":
360 raise Reject("Uploading byhand packages is not allowed for DMs.")
362 # Reject NEW packages
363 distributions = upload.changes.distributions
364 assert len(distributions) == 1
365 suite = session.query(Suite).filter_by(suite_name=distributions[0]).one()
366 overridesuite = suite
367 if suite.overridesuite is not None:
368 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
369 if upload._check_new(overridesuite):
370 raise Reject('Uploading NEW packages is not allowed for DMs.')
372 # Check DM-Upload-Allowed
373 last_suites = ['unstable', 'experimental']
374 if suite.suite_name.endswith('-backports'):
375 last_suites = [suite.suite_name]
376 last = session.query(DBSource).filter_by(source=upload.changes.changes['Source']) \
377 .join(DBSource.suites).filter(Suite.suite_name.in_(last_suites)) \
378 .order_by(DBSource.version.desc()).limit(1).first()
380 raise Reject('No existing source found in {0}'.format(' or '.join(last_suites)))
381 if not last.dm_upload_allowed:
382 raise Reject('DM-Upload-Allowed is not set in {0}={1}'.format(last.source, last.version))
384 # check current Changed-by is in last Maintainer or Uploaders
385 uploader_names = [ u.name for u in last.uploaders ]
386 changed_by_field = upload.changes.changes.get('Changed-By', upload.changes.changes['Maintainer'])
387 if changed_by_field not in uploader_names:
388 raise Reject('{0} is not an uploader for {1}={2}'.format(changed_by_field, last.source, last.version))
390 # check Changed-by is the DM
391 changed_by = fix_maintainer(changed_by_field)
392 uid = upload.fingerprint.uid
394 raise Reject('Unknown uid for fingerprint {0}'.format(upload.fingerprint.fingerprint))
395 if uid.uid != changed_by[3] and uid.name != changed_by[2]:
396 raise Reject('DMs are not allowed to sponsor uploads (expected {0} <{1}> as maintainer, but got {2})'.format(uid.name, uid.uid, changed_by_field))
398 # Try to catch hijacks.
399 # This doesn't work correctly. Uploads to experimental can still
400 # "hijack" binaries from unstable. Also one can hijack packages
401 # via buildds (but people who try this should not be DMs).
402 for binary_name in upload.changes.binary_names:
403 binaries = session.query(DBBinary).join(DBBinary.source) \
404 .join(DBBinary.suites).filter(Suite.suite_name.in_(upload.changes.distributions)) \
405 .filter(DBBinary.package == binary_name)
406 for binary in binaries:
407 if binary.source.source != upload.changes.changes['Source']:
408 raise Reject('DMs must not hijack binaries (binary={0}, other-source={1})'.format(binary_name, binary.source.source))
412 def check(self, upload):
413 fingerprint = upload.fingerprint
414 source_acl = fingerprint.source_acl
415 if source_acl is None:
416 if 'source' in upload.changes.architectures:
417 raise Reject('Fingerprint {0} must not upload source'.format(fingerprint.fingerprint))
418 elif source_acl.access_level == 'dm':
419 self._check_dm(upload)
420 elif source_acl.access_level != 'full':
421 raise Reject('Unknown source_acl access level {0} for fingerprint {1}'.format(source_acl.access_level, fingerprint.fingerprint))
423 bin_architectures = set(upload.changes.architectures)
424 bin_architectures.discard('source')
425 binary_acl = fingerprint.binary_acl
426 if binary_acl is None:
427 if len(bin_architectures) > 0:
428 raise Reject('Fingerprint {0} must not upload binary packages'.format(fingerprint.fingerprint))
429 elif binary_acl.access_level == 'map':
430 query = upload.session.query(BinaryACLMap).filter_by(fingerprint=fingerprint)
431 allowed_architectures = [ m.architecture.arch_string for m in query ]
433 for arch in upload.changes.architectures:
434 if arch not in allowed_architectures:
435 raise Reject('Fingerprint {0} must not upload binaries for architecture {1}'.format(fingerprint.fingerprint, arch))
436 elif binary_acl.access_level != 'full':
437 raise Reject('Unknown binary_acl access level {0} for fingerprint {1}'.format(binary_acl.access_level, fingerprint.fingerprint))
441 class UploadBlockCheck(Check):
442 """check for upload blocks"""
443 def check(self, upload):
444 session = upload.session
445 control = upload.changes.changes
447 source = re_field_source.match(control['Source']).group('package')
448 version = control['Version']
449 blocks = session.query(UploadBlock).filter_by(source=source) \
450 .filter((UploadBlock.version == version) | (UploadBlock.version == None))
453 if block.fingerprint == upload.fingerprint:
454 raise Reject('Manual upload block in place for package {0} and fingerprint {1}:\n{2}'.format(source, upload.fingerprint.fingerprint, block.reason))
455 if block.uid == upload.fingerprint.uid:
456 raise Reject('Manual upload block in place for package {0} and uid {1}:\n{2}'.format(source, block.uid.uid, block.reason))
460 class TransitionCheck(Check):
461 """check for a transition"""
462 def check(self, upload):
463 if 'source' not in upload.changes.architectures:
466 transitions = self.get_transitions()
467 if transitions is None:
470 source = re_field_source.match(control['Source']).group('package')
472 for trans in transitions:
473 t = transitions[trans]
477 # Will be None if nothing is in testing.
478 current = get_source_in_suite(source, "testing", session)
479 if current is not None:
480 compare = apt_pkg.version_compare(current.version, expected)
482 if current is None or compare < 0:
483 # This is still valid, the current version in testing is older than
484 # the new version we wait for, or there is none in testing yet
486 # Check if the source we look at is affected by this.
487 if source in t['packages']:
488 # The source is affected, lets reject it.
490 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
492 if current is not None:
493 currentlymsg = "at version {0}".format(current.version)
495 currentlymsg = "not present in testing"
497 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
499 rejectmsg += "\n".join(textwrap.wrap("""Your package
500 is part of a testing transition designed to get {0} migrated (it is
501 currently {1}, we need version {2}). This transition is managed by the
502 Release Team, and {3} is the Release-Team member responsible for it.
503 Please mail debian-release@lists.debian.org or contact {3} directly if you
504 need further assistance. You might want to upload to experimental until this
505 transition is done.""".format(source, currentlymsg, expected,t["rm"])))
507 raise Reject(rejectmsg)
511 def get_transitions(self):
513 path = cnf.get('Dinstall::ReleaseTransitions', '')
514 if path == '' or not os.path.exists(path):
517 contents = file(path, 'r').read()
519 transitions = yaml.load(contents)
521 except yaml.YAMLError as msg:
522 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
526 class NoSourceOnlyCheck(Check):
527 """Check for source-only upload
529 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
530 set. Otherwise they are rejected.
532 def check(self, upload):
533 if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
535 changes = upload.changes
536 if changes.source is not None and len(changes.binaries) == 0:
537 raise Reject('Source-only uploads are not allowed.')
540 class LintianCheck(Check):
541 """Check package using lintian"""
542 def check(self, upload):
543 changes = upload.changes
545 # Only check sourceful uploads.
546 if changes.source is None:
548 # Only check uploads to unstable or experimental.
549 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
553 if 'Dinstall::LintianTags' not in cnf:
555 tagfile = cnf['Dinstall::LintianTags']
557 with open(tagfile, 'r') as sourcefile:
558 sourcecontent = sourcefile.read()
560 lintiantags = yaml.load(sourcecontent)['lintian']
561 except yaml.YAMLError as msg:
562 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
564 fd, temp_filename = utils.temp_filename()
565 temptagfile = os.fdopen(fd, 'w')
566 for tags in lintiantags.itervalues():
568 print >>temptagfile, tag
571 changespath = os.path.join(upload.directory, changes.filename)
574 cmd = "lintian --show-overrides --tags-from-file {0} {1}".format(temp_filename, changespath)
575 result, output = commands.getstatusoutput(cmd)
577 os.unlink(temp_filename)
580 utils.warn("lintian failed for %s [return code: %s]." % \
581 (changespath, result))
582 utils.warn(utils.prefix_multi_line_string(output, \
583 " [possible output:] "))
585 parsed_tags = lintian.parse_lintian_output(output)
586 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
587 if len(rejects) != 0:
588 raise Reject('\n'.join(rejects))
592 class SourceFormatCheck(Check):
593 """Check source format is allowed in the target suite"""
594 def per_suite_check(self, upload, suite):
595 source = upload.changes.source
596 session = upload.session
600 source_format = source.dsc['Format']
601 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
602 if query.first() is None:
603 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
605 class SuiteArchitectureCheck(Check):
606 def per_suite_check(self, upload, suite):
607 session = upload.session
608 for arch in upload.changes.architectures:
609 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
610 if query.first() is None:
611 raise Reject('Architecture {0} is not allowed in suite {2}'.format(arch, suite.suite_name))
615 class VersionCheck(Check):
616 """Check version constraints"""
617 def _highest_source_version(self, session, source_name, suite):
618 db_source = session.query(DBSource).filter_by(source=source_name) \
619 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
620 if db_source is None:
623 return db_source.version
625 def _highest_binary_version(self, session, binary_name, suite, architecture):
626 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
627 .filter(DBBinary.suites.contains(suite)) \
628 .join(DBBinary.architecture) \
629 .filter(Architecture.arch_string.in_(['all', architecture])) \
630 .order_by(DBBinary.version.desc()).first()
631 if db_binary is None:
634 return db_binary.version
636 def _version_checks(self, upload, suite, op):
637 session = upload.session
639 if upload.changes.source is not None:
640 source_name = upload.changes.source.dsc['Source']
641 source_version = upload.changes.source.dsc['Version']
642 v = self._highest_source_version(session, source_name, suite)
643 if v is not None and not op(version_compare(source_version, v)):
644 raise Reject('Version check failed (source={0}, version={1}, other-version={2}, suite={3})'.format(source_name, source_version, v, suite.suite_name))
646 for binary in upload.changes.binaries:
647 binary_name = binary.control['Package']
648 binary_version = binary.control['Version']
649 architecture = binary.control['Architecture']
650 v = self._highest_binary_version(session, binary_name, suite, architecture)
651 if v is not None and not op(version_compare(binary_version, v)):
652 raise Reject('Version check failed (binary={0}, version={1}, other-version={2}, suite={3})'.format(binary_name, binary_version, v, suite.suite_name))
654 def per_suite_check(self, upload, suite):
655 session = upload.session
657 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
658 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
659 must_be_newer_than = [ vc.reference for vc in vc_newer ]
660 # Must be newer than old versions in `suite`
661 must_be_newer_than.append(suite)
663 for s in must_be_newer_than:
664 self._version_checks(upload, s, lambda result: result > 0)
666 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
667 must_be_older_than = [ vc.reference for vc in vc_older ]
669 for s in must_be_older_than:
670 self._version_checks(upload, s, lambda result: result < 0)