1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the L{Check} class for the interface.
26 from daklib.config import Config
27 from daklib.dbconn import *
28 import daklib.dbconn as dbconn
29 from daklib.regexes import *
30 from daklib.textutils import fix_maintainer, ParseMaintError
31 import daklib.lintian as lintian
32 import daklib.utils as utils
36 from apt_pkg import version_compare
41 # TODO: replace by subprocess
44 class Reject(Exception):
45 """exception raised by failing checks"""
49 """base class for checks
51 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
52 raise a L{daklib.checks.Reject} exception including a human-readable
53 description why the upload should be rejected.
55 def check(self, upload):
58 @type upload: L{daklib.archive.ArchiveUpload}
59 @param upload: upload to check
61 @raise daklib.checks.Reject: upload should be rejected
64 def per_suite_check(self, upload, suite):
65 """do per-suite checks
67 @type upload: L{daklib.archive.ArchiveUpload}
68 @param upload: upload to check
70 @type suite: L{daklib.dbconn.Suite}
71 @param suite: suite to check
73 @raise daklib.checks.Reject: upload should be rejected
78 """allow to force ignore failing test
80 C{True} if it is acceptable to force ignoring a failing test,
85 class SignatureCheck(Check):
86 """Check signature of changes and dsc file (if included in upload)
88 Make sure the signature is valid and done by a known user.
90 def check(self, upload):
91 changes = upload.changes
92 if not changes.valid_signature:
93 raise Reject("Signature for .changes not valid.")
94 if changes.source is not None:
95 if not changes.source.valid_signature:
96 raise Reject("Signature for .dsc not valid.")
97 if changes.source.primary_fingerprint != changes.primary_fingerprint:
98 raise Reject(".changes and .dsc not signed by the same key.")
99 if upload.fingerprint is None or upload.fingerprint.uid is None:
100 raise Reject(".changes signed by unknown key.")
102 class ChangesCheck(Check):
103 """Check changes file for syntax errors."""
104 def check(self, upload):
105 changes = upload.changes
106 control = changes.changes
107 fn = changes.filename
109 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
110 if field not in control:
111 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
113 source_match = re_field_source.match(control['Source'])
115 raise Reject('{0}: Invalid Source field'.format(fn))
116 version_match = re_field_version.match(control['Version'])
117 if not version_match:
118 raise Reject('{0}: Invalid Version field'.format(fn))
119 version_without_epoch = version_match.group('without_epoch')
121 match = re_file_changes.match(fn)
123 raise Reject('{0}: Does not match re_file_changes'.format(fn))
124 if match.group('package') != source_match.group('package'):
125 raise Reject('{0}: Filename does not match Source field'.format(fn))
126 if match.group('version') != version_without_epoch:
127 raise Reject('{0}: Filename does not match Version field'.format(fn))
129 for bn in changes.binary_names:
130 if not re_field_package.match(bn):
131 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
133 if 'source' in changes.architectures and changes.source is None:
134 raise Reject("Changes has architecture source, but no source found.")
135 if changes.source is not None and 'source' not in changes.architectures:
136 raise Reject("Upload includes source, but changes does not say so.")
139 fix_maintainer(changes.changes['Maintainer'])
140 except ParseMaintError as e:
141 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
144 changed_by = changes.changes.get('Changed-By')
145 if changed_by is not None:
146 fix_maintainer(changed_by)
147 except ParseMaintError as e:
148 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
150 if len(changes.files) == 0:
151 raise Reject("Changes includes no files.")
153 for bugnum in changes.closed_bugs:
154 if not re_isanum.match(bugnum):
155 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
159 class HashesCheck(Check):
160 """Check hashes in .changes and .dsc are valid."""
161 def check(self, upload):
162 changes = upload.changes
163 for f in changes.files.itervalues():
164 f.check(upload.directory)
165 source = changes.source
166 if source is not None:
167 for f in source.files.itervalues():
168 f.check(upload.directory)
170 class BinaryCheck(Check):
171 """Check binary packages for syntax errors."""
172 def check(self, upload):
173 for binary in upload.changes.binaries:
174 self.check_binary(upload, binary)
176 binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
177 for bn in binary_names:
178 if bn not in upload.changes.binary_names:
179 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
183 def check_binary(self, upload, binary):
184 fn = binary.hashed_file.filename
185 control = binary.control
187 for field in ('Package', 'Architecture', 'Version', 'Description'):
188 if field not in control:
189 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
193 package = control['Package']
194 if not re_field_package.match(package):
195 raise Reject('{0}: Invalid Package field'.format(fn))
197 version = control['Version']
198 version_match = re_field_version.match(version)
199 if not version_match:
200 raise Reject('{0}: Invalid Version field'.format(fn))
201 version_without_epoch = version_match.group('without_epoch')
203 architecture = control['Architecture']
204 if architecture not in upload.changes.architectures:
205 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
206 if architecture == 'source':
207 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
209 source = control.get('Source')
210 if source is not None and not re_field_source.match(source):
211 raise Reject('{0}: Invalid Source field'.format(fn))
215 match = re_file_binary.match(fn)
216 if package != match.group('package'):
217 raise Reject('{0}: filename does not match Package field'.format(fn))
218 if version_without_epoch != match.group('version'):
219 raise Reject('{0}: filename does not match Version field'.format(fn))
220 if architecture != match.group('architecture'):
221 raise Reject('{0}: filename does not match Architecture field'.format(fn))
223 # check dependency field syntax
225 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
226 'Provides', 'Recommends', 'Replaces', 'Suggests'):
227 value = control.get(field)
228 if value is not None:
229 if value.strip() == '':
230 raise Reject('{0}: empty {1} field'.format(fn, field))
232 apt_pkg.parse_depends(value)
234 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
236 for field in ('Built-Using',):
237 value = control.get(field)
238 if value is not None:
239 if value.strip() == '':
240 raise Reject('{0}: empty {1} field'.format(fn, field))
242 apt_pkg.parse_src_depends(value)
244 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
246 class BinaryTimestampCheck(Check):
247 """check timestamps of files in binary packages
249 Files in the near future cause ugly warnings and extreme time travel
250 can cause errors on extraction.
252 def check(self, upload):
254 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
255 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1984'), '%Y'))
257 class TarTime(object):
259 self.future_files = dict()
260 self.past_files = dict()
261 def callback(self, member, data):
262 if member.mtime > future_cutoff:
263 future_files[member.name] = member.mtime
264 elif member.mtime < past_cutoff:
265 past_files[member.name] = member.mtime
267 def format_reason(filename, direction, files):
268 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
269 for fn, ts in files.iteritems():
270 reason += " {0} ({1})".format(fn, time.ctime(ts))
273 for binary in upload.changes.binaries:
274 filename = binary.hashed_file.filename
275 path = os.path.join(upload.directory, filename)
276 deb = apt_inst.DebFile(path)
278 deb.control.go(tar.callback)
280 raise Reject(format_reason(filename, 'future', tar.future_files))
282 raise Reject(format_reason(filename, 'past', tar.past_files))
284 class SourceCheck(Check):
285 """Check source package for syntax errors."""
286 def check_filename(self, control, filename, regex):
287 # In case we have an .orig.tar.*, we have to strip the Debian revison
288 # from the version number. So handle this special case first.
290 match = re_file_orig.match(filename)
293 match = regex.match(filename)
296 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
297 if match.group('package') != control['Source']:
298 raise Reject('{0}: filename does not match Source field'.format(filename))
300 version = control['Version']
302 version = re_field_version_upstream.match(version).group('upstream')
303 version_match = re_field_version.match(version)
304 version_without_epoch = version_match.group('without_epoch')
305 if match.group('version') != version_without_epoch:
306 raise Reject('{0}: filename does not match Version field'.format(filename))
308 def check(self, upload):
309 if upload.changes.source is None:
312 changes = upload.changes.changes
313 source = upload.changes.source
315 dsc_fn = source._dsc_file.filename
318 if not re_field_package.match(control['Source']):
319 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
320 if control['Source'] != changes['Source']:
321 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
322 if control['Version'] != changes['Version']:
323 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
326 self.check_filename(control, dsc_fn, re_file_dsc)
327 for f in source.files.itervalues():
328 self.check_filename(control, f.filename, re_file_source)
330 # check dependency field syntax
331 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
332 value = control.get(field)
333 if value is not None:
334 if value.strip() == '':
335 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
337 apt_pkg.parse_src_depends(value)
338 except Exception as e:
339 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
341 rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
343 raise Reject("\n".join(rejects))
347 class SingleDistributionCheck(Check):
348 """Check that the .changes targets only a single distribution."""
349 def check(self, upload):
350 if len(upload.changes.distributions) != 1:
351 raise Reject("Only uploads to a single distribution are allowed.")
353 class ACLCheck(Check):
354 """Check the uploader is allowed to upload the packages in .changes"""
356 def _does_hijack(self, session, upload, suite):
357 # Try to catch hijacks.
358 # This doesn't work correctly. Uploads to experimental can still
359 # "hijack" binaries from unstable. Also one can hijack packages
360 # via buildds (but people who try this should not be DMs).
361 for binary_name in upload.changes.binary_names:
362 binaries = session.query(DBBinary).join(DBBinary.source) \
363 .filter(DBBinary.suites.contains(suite)) \
364 .filter(DBBinary.package == binary_name)
365 for binary in binaries:
366 if binary.source.source != upload.changes.changes['Source']:
367 return True, binary, binary.source.source
368 return False, None, None
370 def _check_acl(self, session, upload, acl):
371 source_name = upload.changes.source_name
373 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
375 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
378 if not acl.allow_new:
380 return False, "NEW uploads are not allowed"
381 for f in upload.changes.files.itervalues():
382 if f.section == 'byhand' or f.section.startswith("raw-"):
383 return False, "BYHAND uploads are not allowed"
384 if not acl.allow_source and upload.changes.source is not None:
385 return False, "sourceful uploads are not allowed"
386 binaries = upload.changes.binaries
387 if len(binaries) != 0:
388 if not acl.allow_binary:
389 return False, "binary uploads are not allowed"
390 if upload.changes.source is None and not acl.allow_binary_only:
391 return False, "binary-only uploads are not allowed"
392 if not acl.allow_binary_all:
393 uploaded_arches = set(upload.changes.architectures)
394 uploaded_arches.discard('source')
395 allowed_arches = set(a.arch_string for a in acl.architectures)
396 forbidden_arches = uploaded_arches - allowed_arches
397 if len(forbidden_arches) != 0:
398 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
399 if not acl.allow_hijack:
400 for suite in upload.final_suites:
401 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
403 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
405 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
406 if acl.allow_per_source:
407 # XXX: Drop DMUA part here and switch to new implementation.
408 # XXX: Send warning mail once users can set the new DMUA flag
409 dmua_status, dmua_reason = self._check_dmua(upload)
411 return False, dmua_reason
412 #if acl_per_source is None:
413 # return False, "not allowed to upload source package '{0}'".format(source_name)
414 if acl.deny_per_source and acl_per_source is not None:
415 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
419 def _check_dmua(self, upload):
420 # This code is not very nice, but hopefully works until we can replace
421 # DM-Upload-Allowed, cf. https://lists.debian.org/debian-project/2012/06/msg00029.html
422 session = upload.session
424 # Check DM-Upload-Allowed
425 suites = upload.final_suites
426 assert len(suites) == 1
427 suite = list(suites)[0]
429 last_suites = ['unstable', 'experimental']
430 if suite.suite_name.endswith('-backports'):
431 last_suites = [suite.suite_name]
432 last = session.query(DBSource).filter_by(source=upload.changes.changes['Source']) \
433 .join(DBSource.suites).filter(Suite.suite_name.in_(last_suites)) \
434 .order_by(DBSource.version.desc()).limit(1).first()
436 return False, 'No existing source found in {0}'.format(' or '.join(last_suites))
437 if not last.dm_upload_allowed:
438 return False, 'DM-Upload-Allowed is not set in {0}={1}'.format(last.source, last.version)
440 # check current Changed-by is in last Maintainer or Uploaders
441 uploader_names = [ u.name for u in last.uploaders ]
442 changed_by_field = upload.changes.changes.get('Changed-By', upload.changes.changes['Maintainer'])
443 if changed_by_field not in uploader_names:
444 return False, '{0} is not an uploader for {1}={2}'.format(changed_by_field, last.source, last.version)
446 # check Changed-by is the DM
447 changed_by = fix_maintainer(changed_by_field)
448 uid = upload.fingerprint.uid
450 return False, 'Unknown uid for fingerprint {0}'.format(upload.fingerprint.fingerprint)
451 if uid.uid != changed_by[3] and uid.name != changed_by[2]:
452 return False, 'DMs are not allowed to sponsor uploads (expected {0} <{1}> as maintainer, but got {2})'.format(uid.name, uid.uid, changed_by_field)
456 def check(self, upload):
457 session = upload.session
458 fingerprint = upload.fingerprint
459 keyring = fingerprint.keyring
462 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
463 if not keyring.active:
464 raise Reject('Keyring {0} is not active'.format(keyring.name))
466 acl = fingerprint.acl or keyring.acl
468 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
469 result, reason = self._check_acl(session, upload, acl)
473 for acl in session.query(ACL).filter_by(is_global=True):
474 result, reason = self._check_acl(session, upload, acl)
480 def per_suite_check(self, upload, suite):
485 result, reason = self._check_acl(upload.session, upload, acl)
488 accept = accept or result
490 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
493 class TransitionCheck(Check):
494 """check for a transition"""
495 def check(self, upload):
496 if 'source' not in upload.changes.architectures:
499 transitions = self.get_transitions()
500 if transitions is None:
503 control = upload.changes.changes
504 source = re_field_source.match(control['Source']).group('package')
506 for trans in transitions:
507 t = transitions[trans]
511 # Will be None if nothing is in testing.
512 current = get_source_in_suite(source, "testing", session)
513 if current is not None:
514 compare = apt_pkg.version_compare(current.version, expected)
516 if current is None or compare < 0:
517 # This is still valid, the current version in testing is older than
518 # the new version we wait for, or there is none in testing yet
520 # Check if the source we look at is affected by this.
521 if source in t['packages']:
522 # The source is affected, lets reject it.
524 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
526 if current is not None:
527 currentlymsg = "at version {0}".format(current.version)
529 currentlymsg = "not present in testing"
531 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
533 rejectmsg += "\n".join(textwrap.wrap("""Your package
534 is part of a testing transition designed to get {0} migrated (it is
535 currently {1}, we need version {2}). This transition is managed by the
536 Release Team, and {3} is the Release-Team member responsible for it.
537 Please mail debian-release@lists.debian.org or contact {3} directly if you
538 need further assistance. You might want to upload to experimental until this
539 transition is done.""".format(source, currentlymsg, expected,t["rm"])))
541 raise Reject(rejectmsg)
545 def get_transitions(self):
547 path = cnf.get('Dinstall::ReleaseTransitions', '')
548 if path == '' or not os.path.exists(path):
551 contents = file(path, 'r').read()
553 transitions = yaml.load(contents)
555 except yaml.YAMLError as msg:
556 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
560 class NoSourceOnlyCheck(Check):
561 """Check for source-only upload
563 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
564 set. Otherwise they are rejected.
566 def check(self, upload):
567 if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
569 changes = upload.changes
570 if changes.source is not None and len(changes.binaries) == 0:
571 raise Reject('Source-only uploads are not allowed.')
574 class LintianCheck(Check):
575 """Check package using lintian"""
576 def check(self, upload):
577 changes = upload.changes
579 # Only check sourceful uploads.
580 if changes.source is None:
582 # Only check uploads to unstable or experimental.
583 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
587 if 'Dinstall::LintianTags' not in cnf:
589 tagfile = cnf['Dinstall::LintianTags']
591 with open(tagfile, 'r') as sourcefile:
592 sourcecontent = sourcefile.read()
594 lintiantags = yaml.load(sourcecontent)['lintian']
595 except yaml.YAMLError as msg:
596 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
598 fd, temp_filename = utils.temp_filename(mode=0o644)
599 temptagfile = os.fdopen(fd, 'w')
600 for tags in lintiantags.itervalues():
602 print >>temptagfile, tag
605 changespath = os.path.join(upload.directory, changes.filename)
608 cmd = "sudo -H -u {0} -- /usr/bin/lintian --show-overrides --tags-from-file {1} {2}".format(cnf.unprivgroup, temp_filename, changespath)
610 cmd = "/usr/bin/lintian --show-overrides --tags-from-file {0} {1}".format(temp_filename, changespath)
611 result, output = commands.getstatusoutput(cmd)
613 os.unlink(temp_filename)
616 utils.warn("lintian failed for %s [return code: %s]." % \
617 (changespath, result))
618 utils.warn(utils.prefix_multi_line_string(output, \
619 " [possible output:] "))
621 parsed_tags = lintian.parse_lintian_output(output)
622 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
623 if len(rejects) != 0:
624 raise Reject('\n'.join(rejects))
628 class SourceFormatCheck(Check):
629 """Check source format is allowed in the target suite"""
630 def per_suite_check(self, upload, suite):
631 source = upload.changes.source
632 session = upload.session
636 source_format = source.dsc['Format']
637 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
638 if query.first() is None:
639 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
641 class SuiteArchitectureCheck(Check):
642 def per_suite_check(self, upload, suite):
643 session = upload.session
644 for arch in upload.changes.architectures:
645 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
646 if query.first() is None:
647 raise Reject('Architecture {0} is not allowed in suite {2}'.format(arch, suite.suite_name))
651 class VersionCheck(Check):
652 """Check version constraints"""
653 def _highest_source_version(self, session, source_name, suite):
654 db_source = session.query(DBSource).filter_by(source=source_name) \
655 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
656 if db_source is None:
659 return db_source.version
661 def _highest_binary_version(self, session, binary_name, suite, architecture):
662 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
663 .filter(DBBinary.suites.contains(suite)) \
664 .join(DBBinary.architecture) \
665 .filter(Architecture.arch_string.in_(['all', architecture])) \
666 .order_by(DBBinary.version.desc()).first()
667 if db_binary is None:
670 return db_binary.version
672 def _version_checks(self, upload, suite, op):
673 session = upload.session
675 if upload.changes.source is not None:
676 source_name = upload.changes.source.dsc['Source']
677 source_version = upload.changes.source.dsc['Version']
678 v = self._highest_source_version(session, source_name, suite)
679 if v is not None and not op(version_compare(source_version, v)):
680 raise Reject('Version check failed (source={0}, version={1}, other-version={2}, suite={3})'.format(source_name, source_version, v, suite.suite_name))
682 for binary in upload.changes.binaries:
683 binary_name = binary.control['Package']
684 binary_version = binary.control['Version']
685 architecture = binary.control['Architecture']
686 v = self._highest_binary_version(session, binary_name, suite, architecture)
687 if v is not None and not op(version_compare(binary_version, v)):
688 raise Reject('Version check failed (binary={0}, version={1}, other-version={2}, suite={3})'.format(binary_name, binary_version, v, suite.suite_name))
690 def per_suite_check(self, upload, suite):
691 session = upload.session
693 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
694 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
695 must_be_newer_than = [ vc.reference for vc in vc_newer ]
696 # Must be newer than old versions in `suite`
697 must_be_newer_than.append(suite)
699 for s in must_be_newer_than:
700 self._version_checks(upload, s, lambda result: result > 0)
702 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
703 must_be_older_than = [ vc.reference for vc in vc_older ]
705 for s in must_be_older_than:
706 self._version_checks(upload, s, lambda result: result < 0)