1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the L{Check} class for the interface.
26 from daklib.config import Config
27 import daklib.daksubprocess
28 from daklib.dbconn import *
29 import daklib.dbconn as dbconn
30 from daklib.regexes import *
31 from daklib.textutils import fix_maintainer, ParseMaintError
32 import daklib.lintian as lintian
33 import daklib.utils as utils
38 from apt_pkg import version_compare
47 def check_fields_for_valid_utf8(filename, control):
48 """Check all fields of a control file for valid UTF-8"""
49 for field in control.keys():
52 control[field].decode('utf-8')
53 except UnicodeDecodeError:
54 raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field))
56 class Reject(Exception):
57 """exception raised by failing checks"""
60 class RejectExternalFilesMismatch(Reject):
61 """exception raised by failing the external hashes check"""
64 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
66 class RejectACL(Reject):
67 """exception raise by failing ACL checks"""
68 def __init__(self, acl, reason):
73 return "ACL {0}: {1}".format(self.acl.name, self.reason)
76 """base class for checks
78 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
79 raise a L{daklib.checks.Reject} exception including a human-readable
80 description why the upload should be rejected.
82 def check(self, upload):
85 @type upload: L{daklib.archive.ArchiveUpload}
86 @param upload: upload to check
88 @raise daklib.checks.Reject: upload should be rejected
91 def per_suite_check(self, upload, suite):
92 """do per-suite checks
94 @type upload: L{daklib.archive.ArchiveUpload}
95 @param upload: upload to check
97 @type suite: L{daklib.dbconn.Suite}
98 @param suite: suite to check
100 @raise daklib.checks.Reject: upload should be rejected
105 """allow to force ignore failing test
107 C{True} if it is acceptable to force ignoring a failing test,
112 class SignatureAndHashesCheck(Check):
113 def check_replay(self, upload):
114 # Use private session as we want to remember having seen the .changes
116 session = upload.session
117 history = SignatureHistory.from_signed_file(upload.changes)
118 r = history.query(session)
120 raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen))
123 """Check signature of changes and dsc file (if included in upload)
125 Make sure the signature is valid and done by a known user.
127 def check(self, upload):
128 changes = upload.changes
129 if not changes.valid_signature:
130 raise Reject("Signature for .changes not valid.")
131 self.check_replay(upload)
132 self._check_hashes(upload, changes.filename, changes.files.itervalues())
136 source = changes.source
137 except Exception as e:
138 raise Reject("Invalid dsc file: {0}".format(e))
139 if source is not None:
140 if not source.valid_signature:
141 raise Reject("Signature for .dsc not valid.")
142 if source.primary_fingerprint != changes.primary_fingerprint:
143 raise Reject(".changes and .dsc not signed by the same key.")
144 self._check_hashes(upload, source.filename, source.files.itervalues())
146 if upload.fingerprint is None or upload.fingerprint.uid is None:
147 raise Reject(".changes signed by unknown key.")
149 """Make sure hashes match existing files
151 @type upload: L{daklib.archive.ArchiveUpload}
152 @param upload: upload we are processing
155 @param filename: name of the file the expected hash values are taken from
157 @type files: sequence of L{daklib.upload.HashedFile}
158 @param files: files to check the hashes for
160 def _check_hashes(self, upload, filename, files):
163 f.check(upload.directory)
164 except daklib.upload.FileDoesNotExist as e:
165 raise Reject('{0}: {1}\n'
166 'Perhaps you need to include the file in your upload?'
167 .format(filename, unicode(e)))
168 except daklib.upload.UploadException as e:
169 raise Reject('{0}: {1}'.format(filename, unicode(e)))
171 class SignatureTimestampCheck(Check):
172 """Check timestamp of .changes signature"""
173 def check(self, upload):
174 changes = upload.changes
176 now = datetime.datetime.utcnow()
177 timestamp = changes.signature_timestamp
178 age = now - timestamp
180 age_max = datetime.timedelta(days=365)
181 age_min = datetime.timedelta(days=-7)
184 raise Reject('{0}: Signature from {1} is too old (maximum age is {2} days)'.format(changes.filename, timestamp, age_max.days))
186 raise Reject('{0}: Signature from {1} is too far in the future (tolerance is {2} days)'.format(changes.filename, timestamp, abs(age_min.days)))
190 class ChangesCheck(Check):
191 """Check changes file for syntax errors."""
192 def check(self, upload):
193 changes = upload.changes
194 control = changes.changes
195 fn = changes.filename
197 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
198 if field not in control:
199 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
201 check_fields_for_valid_utf8(fn, control)
203 source_match = re_field_source.match(control['Source'])
205 raise Reject('{0}: Invalid Source field'.format(fn))
206 version_match = re_field_version.match(control['Version'])
207 if not version_match:
208 raise Reject('{0}: Invalid Version field'.format(fn))
209 version_without_epoch = version_match.group('without_epoch')
211 match = re_file_changes.match(fn)
213 raise Reject('{0}: Does not match re_file_changes'.format(fn))
214 if match.group('package') != source_match.group('package'):
215 raise Reject('{0}: Filename does not match Source field'.format(fn))
216 if match.group('version') != version_without_epoch:
217 raise Reject('{0}: Filename does not match Version field'.format(fn))
219 for bn in changes.binary_names:
220 if not re_field_package.match(bn):
221 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
223 if 'source' in changes.architectures and changes.source is None:
224 raise Reject("Changes has architecture source, but no source found.")
225 if changes.source is not None and 'source' not in changes.architectures:
226 raise Reject("Upload includes source, but changes does not say so.")
229 fix_maintainer(changes.changes['Maintainer'])
230 except ParseMaintError as e:
231 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
234 changed_by = changes.changes.get('Changed-By')
235 if changed_by is not None:
236 fix_maintainer(changed_by)
237 except ParseMaintError as e:
238 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
240 if len(changes.files) == 0:
241 raise Reject("Changes includes no files.")
243 for bugnum in changes.closed_bugs:
244 if not re_isanum.match(bugnum):
245 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
249 class ExternalHashesCheck(Check):
250 """Checks hashes in .changes and .dsc against an external database."""
251 def check_single(self, session, f):
252 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
253 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
258 if ext_size != f.size:
259 raise RejectExternalFilesMismatch(f.filename, 'size', f.size, ext_size)
261 if ext_md5sum != f.md5sum:
262 raise RejectExternalFilesMismatch(f.filename, 'md5sum', f.md5sum, ext_md5sum)
264 if ext_sha1sum != f.sha1sum:
265 raise RejectExternalFilesMismatch(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
267 if ext_sha256sum != f.sha256sum:
268 raise RejectExternalFilesMismatch(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
270 def check(self, upload):
273 if not cnf.use_extfiles:
276 session = upload.session
277 changes = upload.changes
279 for f in changes.files.itervalues():
280 self.check_single(session, f)
281 source = changes.source
282 if source is not None:
283 for f in source.files.itervalues():
284 self.check_single(session, f)
286 class BinaryCheck(Check):
287 """Check binary packages for syntax errors."""
288 def check(self, upload):
289 for binary in upload.changes.binaries:
290 self.check_binary(upload, binary)
292 binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
293 for bn in binary_names:
294 if bn not in upload.changes.binary_names:
295 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
299 def check_binary(self, upload, binary):
300 fn = binary.hashed_file.filename
301 control = binary.control
303 for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'):
304 if field not in control:
305 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
307 check_fields_for_valid_utf8(fn, control)
311 package = control['Package']
312 if not re_field_package.match(package):
313 raise Reject('{0}: Invalid Package field'.format(fn))
315 version = control['Version']
316 version_match = re_field_version.match(version)
317 if not version_match:
318 raise Reject('{0}: Invalid Version field'.format(fn))
319 version_without_epoch = version_match.group('without_epoch')
321 architecture = control['Architecture']
322 if architecture not in upload.changes.architectures:
323 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
324 if architecture == 'source':
325 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
327 source = control.get('Source')
328 if source is not None and not re_field_source.match(source):
329 raise Reject('{0}: Invalid Source field'.format(fn))
333 match = re_file_binary.match(fn)
334 if package != match.group('package'):
335 raise Reject('{0}: filename does not match Package field'.format(fn))
336 if version_without_epoch != match.group('version'):
337 raise Reject('{0}: filename does not match Version field'.format(fn))
338 if architecture != match.group('architecture'):
339 raise Reject('{0}: filename does not match Architecture field'.format(fn))
341 # check dependency field syntax
343 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
344 'Provides', 'Recommends', 'Replaces', 'Suggests'):
345 value = control.get(field)
346 if value is not None:
347 if value.strip() == '':
348 raise Reject('{0}: empty {1} field'.format(fn, field))
350 apt_pkg.parse_depends(value)
352 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
354 for field in ('Built-Using',):
355 value = control.get(field)
356 if value is not None:
357 if value.strip() == '':
358 raise Reject('{0}: empty {1} field'.format(fn, field))
360 apt_pkg.parse_src_depends(value)
362 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
364 # "Multi-Arch: no" breaks wanna-build, #768353
365 multi_arch = control.get("Multi-Arch")
366 if multi_arch == 'no':
367 raise Reject('{0}: Multi-Arch: no support in Debian is broken (#768353)'.format(fn))
369 class BinaryTimestampCheck(Check):
370 """check timestamps of files in binary packages
372 Files in the near future cause ugly warnings and extreme time travel
373 can cause errors on extraction.
375 def check(self, upload):
377 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
378 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
380 class TarTime(object):
382 self.future_files = dict()
383 self.past_files = dict()
384 def callback(self, member, data):
385 if member.mtime > future_cutoff:
386 self.future_files[member.name] = member.mtime
387 elif member.mtime < past_cutoff:
388 self.past_files[member.name] = member.mtime
390 def format_reason(filename, direction, files):
391 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
392 for fn, ts in files.iteritems():
393 reason += " {0} ({1})".format(fn, time.ctime(ts))
396 for binary in upload.changes.binaries:
397 filename = binary.hashed_file.filename
398 path = os.path.join(upload.directory, filename)
399 deb = apt_inst.DebFile(path)
401 deb.control.go(tar.callback)
403 raise Reject(format_reason(filename, 'future', tar.future_files))
405 raise Reject(format_reason(filename, 'past', tar.past_files))
407 class SourceCheck(Check):
408 """Check source package for syntax errors."""
409 def check_filename(self, control, filename, regex):
410 # In case we have an .orig.tar.*, we have to strip the Debian revison
411 # from the version number. So handle this special case first.
413 match = re_file_orig.match(filename)
416 match = regex.match(filename)
419 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
420 if match.group('package') != control['Source']:
421 raise Reject('{0}: filename does not match Source field'.format(filename))
423 version = control['Version']
425 upstream_match = re_field_version_upstream.match(version)
426 if not upstream_match:
427 raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version))
428 version = upstream_match.group('upstream')
429 version_match = re_field_version.match(version)
430 version_without_epoch = version_match.group('without_epoch')
431 if match.group('version') != version_without_epoch:
432 raise Reject('{0}: filename does not match Version field'.format(filename))
434 def check(self, upload):
435 if upload.changes.source is None:
438 changes = upload.changes.changes
439 source = upload.changes.source
441 dsc_fn = source._dsc_file.filename
443 check_fields_for_valid_utf8(dsc_fn, control)
446 if not re_field_package.match(control['Source']):
447 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
448 if control['Source'] != changes['Source']:
449 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
450 if control['Version'] != changes['Version']:
451 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
454 self.check_filename(control, dsc_fn, re_file_dsc)
455 for f in source.files.itervalues():
456 self.check_filename(control, f.filename, re_file_source)
458 # check dependency field syntax
459 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
460 value = control.get(field)
461 if value is not None:
462 if value.strip() == '':
463 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
465 apt_pkg.parse_src_depends(value)
466 except Exception as e:
467 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
469 rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
471 raise Reject("\n".join(rejects))
475 class SingleDistributionCheck(Check):
476 """Check that the .changes targets only a single distribution."""
477 def check(self, upload):
478 if len(upload.changes.distributions) != 1:
479 raise Reject("Only uploads to a single distribution are allowed.")
481 class ACLCheck(Check):
482 """Check the uploader is allowed to upload the packages in .changes"""
484 def _does_hijack(self, session, upload, suite):
485 # Try to catch hijacks.
486 # This doesn't work correctly. Uploads to experimental can still
487 # "hijack" binaries from unstable. Also one can hijack packages
488 # via buildds (but people who try this should not be DMs).
489 for binary_name in upload.changes.binary_names:
490 binaries = session.query(DBBinary).join(DBBinary.source) \
491 .filter(DBBinary.suites.contains(suite)) \
492 .filter(DBBinary.package == binary_name)
493 for binary in binaries:
494 if binary.source.source != upload.changes.changes['Source']:
495 return True, binary.package, binary.source.source
496 return False, None, None
498 def _check_acl(self, session, upload, acl):
499 source_name = upload.changes.source_name
501 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
503 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
506 if not acl.allow_new:
508 return False, "NEW uploads are not allowed"
509 for f in upload.changes.files.itervalues():
510 if f.section == 'byhand' or f.section.startswith("raw-"):
511 return False, "BYHAND uploads are not allowed"
512 if not acl.allow_source and upload.changes.source is not None:
513 return False, "sourceful uploads are not allowed"
514 binaries = upload.changes.binaries
515 if len(binaries) != 0:
516 if not acl.allow_binary:
517 return False, "binary uploads are not allowed"
518 if upload.changes.source is None and not acl.allow_binary_only:
519 return False, "binary-only uploads are not allowed"
520 if not acl.allow_binary_all:
521 uploaded_arches = set(upload.changes.architectures)
522 uploaded_arches.discard('source')
523 allowed_arches = set(a.arch_string for a in acl.architectures)
524 forbidden_arches = uploaded_arches - allowed_arches
525 if len(forbidden_arches) != 0:
526 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
527 if not acl.allow_hijack:
528 for suite in upload.final_suites:
529 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
531 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
533 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
534 if acl.allow_per_source:
535 if acl_per_source is None:
536 return False, "not allowed to upload source package '{0}'".format(source_name)
537 if acl.deny_per_source and acl_per_source is not None:
538 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
542 def check(self, upload):
543 session = upload.session
544 fingerprint = upload.fingerprint
545 keyring = fingerprint.keyring
548 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
549 if not keyring.active:
550 raise Reject('Keyring {0} is not active'.format(keyring.name))
552 acl = fingerprint.acl or keyring.acl
554 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
555 result, reason = self._check_acl(session, upload, acl)
557 raise RejectACL(acl, reason)
559 for acl in session.query(ACL).filter_by(is_global=True):
560 result, reason = self._check_acl(session, upload, acl)
562 raise RejectACL(acl, reason)
566 def per_suite_check(self, upload, suite):
571 result, reason = self._check_acl(upload.session, upload, acl)
574 accept = accept or result
576 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
579 class TransitionCheck(Check):
580 """check for a transition"""
581 def check(self, upload):
582 if 'source' not in upload.changes.architectures:
585 transitions = self.get_transitions()
586 if transitions is None:
589 session = upload.session
591 control = upload.changes.changes
592 source = re_field_source.match(control['Source']).group('package')
594 for trans in transitions:
595 t = transitions[trans]
596 transition_source = t["source"]
599 # Will be None if nothing is in testing.
600 current = get_source_in_suite(transition_source, "testing", session)
601 if current is not None:
602 compare = apt_pkg.version_compare(current.version, expected)
604 if current is None or compare < 0:
605 # This is still valid, the current version in testing is older than
606 # the new version we wait for, or there is none in testing yet
608 # Check if the source we look at is affected by this.
609 if source in t['packages']:
610 # The source is affected, lets reject it.
612 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
614 if current is not None:
615 currentlymsg = "at version {0}".format(current.version)
617 currentlymsg = "not present in testing"
619 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
621 rejectmsg += "\n".join(textwrap.wrap("""Your package
622 is part of a testing transition designed to get {0} migrated (it is
623 currently {1}, we need version {2}). This transition is managed by the
624 Release Team, and {3} is the Release-Team member responsible for it.
625 Please mail debian-release@lists.debian.org or contact {3} directly if you
626 need further assistance. You might want to upload to experimental until this
627 transition is done.""".format(transition_source, currentlymsg, expected,t["rm"])))
629 raise Reject(rejectmsg)
633 def get_transitions(self):
635 path = cnf.get('Dinstall::ReleaseTransitions', '')
636 if path == '' or not os.path.exists(path):
639 contents = file(path, 'r').read()
641 transitions = yaml.safe_load(contents)
643 except yaml.YAMLError as msg:
644 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
648 class NoSourceOnlyCheck(Check):
649 def is_source_only_upload(self, upload):
650 changes = upload.changes
651 if changes.source is not None and len(changes.binaries) == 0:
655 """Check for source-only upload
657 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
658 set. Otherwise they are rejected.
660 Source-only uploads are only accepted for source packages having a
661 Package-List field that also lists architectures per package. This
662 check can be disabled via
663 Dinstall::AllowSourceOnlyUploadsWithoutPackageList.
665 Source-only uploads to NEW are only allowed if
666 Dinstall::AllowSourceOnlyNew is set.
668 Uploads not including architecture-independent packages are only
669 allowed if Dinstall::AllowNoArchIndepUploads is set.
672 def check(self, upload):
673 if not self.is_source_only_upload(upload):
676 allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads')
677 allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList')
678 allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew')
679 allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads')
680 changes = upload.changes
682 if not allow_source_only_uploads:
683 raise Reject('Source-only uploads are not allowed.')
684 if not allow_source_only_uploads_without_package_list \
685 and changes.source.package_list.fallback:
686 raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.')
687 if not allow_source_only_new and upload.new:
688 raise Reject('Source-only uploads to NEW are not allowed.')
690 if not allow_no_arch_indep_uploads \
691 and 'all' not in changes.architectures \
692 and 'experimental' not in changes.distributions \
693 and changes.source.package_list.has_arch_indep_packages():
694 raise Reject('Uploads not including architecture-independent packages are not allowed.')
698 class LintianCheck(Check):
699 """Check package using lintian"""
700 def check(self, upload):
701 changes = upload.changes
703 # Only check sourceful uploads.
704 if changes.source is None:
706 # Only check uploads to unstable or experimental.
707 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
711 if 'Dinstall::LintianTags' not in cnf:
713 tagfile = cnf['Dinstall::LintianTags']
715 with open(tagfile, 'r') as sourcefile:
716 sourcecontent = sourcefile.read()
718 lintiantags = yaml.safe_load(sourcecontent)['lintian']
719 except yaml.YAMLError as msg:
720 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
722 fd, temp_filename = utils.temp_filename(mode=0o644)
723 temptagfile = os.fdopen(fd, 'w')
724 for tags in lintiantags.itervalues():
726 print >>temptagfile, tag
729 changespath = os.path.join(upload.directory, changes.filename)
734 user = cnf.get('Dinstall::UnprivUser') or None
736 cmd.extend(['sudo', '-H', '-u', user])
738 cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
739 output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT)
740 except subprocess.CalledProcessError as e:
741 result = e.returncode
744 os.unlink(temp_filename)
747 utils.warn("lintian failed for %s [return code: %s]." % \
748 (changespath, result))
749 utils.warn(utils.prefix_multi_line_string(output, \
750 " [possible output:] "))
752 parsed_tags = lintian.parse_lintian_output(output)
753 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
754 if len(rejects) != 0:
755 raise Reject('\n'.join(rejects))
759 class SourceFormatCheck(Check):
760 """Check source format is allowed in the target suite"""
761 def per_suite_check(self, upload, suite):
762 source = upload.changes.source
763 session = upload.session
767 source_format = source.dsc['Format']
768 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
769 if query.first() is None:
770 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
772 class SuiteArchitectureCheck(Check):
773 def per_suite_check(self, upload, suite):
774 session = upload.session
775 for arch in upload.changes.architectures:
776 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
777 if query.first() is None:
778 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
782 class VersionCheck(Check):
783 """Check version constraints"""
784 def _highest_source_version(self, session, source_name, suite):
785 db_source = session.query(DBSource).filter_by(source=source_name) \
786 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
787 if db_source is None:
790 return db_source.version
792 def _highest_binary_version(self, session, binary_name, suite, architecture):
793 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
794 .filter(DBBinary.suites.contains(suite)) \
795 .join(DBBinary.architecture) \
796 .filter(Architecture.arch_string.in_(['all', architecture])) \
797 .order_by(DBBinary.version.desc()).first()
798 if db_binary is None:
801 return db_binary.version
803 def _version_checks(self, upload, suite, other_suite, op, op_name):
804 session = upload.session
806 if upload.changes.source is not None:
807 source_name = upload.changes.source.dsc['Source']
808 source_version = upload.changes.source.dsc['Version']
809 v = self._highest_source_version(session, source_name, other_suite)
810 if v is not None and not op(version_compare(source_version, v)):
811 raise Reject("Version check failed:\n"
812 "Your upload included the source package {0}, version {1},\n"
813 "however {3} already has version {2}.\n"
814 "Uploads to {5} must have a {4} version than present in {3}."
815 .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
817 for binary in upload.changes.binaries:
818 binary_name = binary.control['Package']
819 binary_version = binary.control['Version']
820 architecture = binary.control['Architecture']
821 v = self._highest_binary_version(session, binary_name, other_suite, architecture)
822 if v is not None and not op(version_compare(binary_version, v)):
823 raise Reject("Version check failed:\n"
824 "Your upload included the binary package {0}, version {1}, for {2},\n"
825 "however {4} already has version {3}.\n"
826 "Uploads to {6} must have a {5} version than present in {4}."
827 .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
829 def per_suite_check(self, upload, suite):
830 session = upload.session
832 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
833 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
834 must_be_newer_than = [ vc.reference for vc in vc_newer ]
835 # Must be newer than old versions in `suite`
836 must_be_newer_than.append(suite)
838 for s in must_be_newer_than:
839 self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
841 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
842 must_be_older_than = [ vc.reference for vc in vc_older ]
844 for s in must_be_older_than:
845 self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')