1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the L{Check} class for the interface.
26 from daklib.config import Config
27 from daklib.dbconn import *
28 import daklib.dbconn as dbconn
29 from daklib.regexes import *
30 from daklib.textutils import fix_maintainer, ParseMaintError
31 import daklib.lintian as lintian
32 import daklib.utils as utils
33 from daklib.upload import InvalidHashException
37 from apt_pkg import version_compare
43 # TODO: replace by subprocess
46 class Reject(Exception):
47 """exception raised by failing checks"""
50 class RejectStupidMaintainerException(Exception):
51 """exception raised by failing the external hashes check"""
54 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
56 class RejectACL(Reject):
57 """exception raise by failing ACL checks"""
58 def __init__(self, acl, reason):
63 return "ACL {0}: {1}".format(self.acl.name, self.reason)
66 """base class for checks
68 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
69 raise a L{daklib.checks.Reject} exception including a human-readable
70 description why the upload should be rejected.
72 def check(self, upload):
75 @type upload: L{daklib.archive.ArchiveUpload}
76 @param upload: upload to check
78 @raise daklib.checks.Reject: upload should be rejected
81 def per_suite_check(self, upload, suite):
82 """do per-suite checks
84 @type upload: L{daklib.archive.ArchiveUpload}
85 @param upload: upload to check
87 @type suite: L{daklib.dbconn.Suite}
88 @param suite: suite to check
90 @raise daklib.checks.Reject: upload should be rejected
95 """allow to force ignore failing test
97 C{True} if it is acceptable to force ignoring a failing test,
102 class SignatureAndHashesCheck(Check):
103 """Check signature of changes and dsc file (if included in upload)
105 Make sure the signature is valid and done by a known user.
107 def check(self, upload):
108 changes = upload.changes
109 if not changes.valid_signature:
110 raise Reject("Signature for .changes not valid.")
111 self._check_hashes(upload, changes.filename, changes.files.itervalues())
115 source = changes.source
116 except Exception as e:
117 raise Reject("Invalid dsc file: {0}".format(e))
118 if source is not None:
119 if not source.valid_signature:
120 raise Reject("Signature for .dsc not valid.")
121 if source.primary_fingerprint != changes.primary_fingerprint:
122 raise Reject(".changes and .dsc not signed by the same key.")
123 self._check_hashes(upload, source.filename, source.files.itervalues())
125 if upload.fingerprint is None or upload.fingerprint.uid is None:
126 raise Reject(".changes signed by unknown key.")
128 """Make sure hashes match existing files
130 @type upload: L{daklib.archive.ArchiveUpload}
131 @param upload: upload we are processing
134 @param filename: name of the file the expected hash values are taken from
136 @type files: sequence of L{daklib.upload.HashedFile}
137 @param files: files to check the hashes for
139 def _check_hashes(self, upload, filename, files):
142 f.check(upload.directory)
144 if e.errno == errno.ENOENT:
145 raise Reject('{0} refers to non-existing file: {1}\n'
146 'Perhaps you need to include it in your upload?'
147 .format(filename, os.path.basename(e.filename)))
149 except InvalidHashException as e:
150 raise Reject('{0}: {1}'.format(filename, unicode(e)))
152 class ChangesCheck(Check):
153 """Check changes file for syntax errors."""
154 def check(self, upload):
155 changes = upload.changes
156 control = changes.changes
157 fn = changes.filename
159 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
160 if field not in control:
161 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
163 source_match = re_field_source.match(control['Source'])
165 raise Reject('{0}: Invalid Source field'.format(fn))
166 version_match = re_field_version.match(control['Version'])
167 if not version_match:
168 raise Reject('{0}: Invalid Version field'.format(fn))
169 version_without_epoch = version_match.group('without_epoch')
171 match = re_file_changes.match(fn)
173 raise Reject('{0}: Does not match re_file_changes'.format(fn))
174 if match.group('package') != source_match.group('package'):
175 raise Reject('{0}: Filename does not match Source field'.format(fn))
176 if match.group('version') != version_without_epoch:
177 raise Reject('{0}: Filename does not match Version field'.format(fn))
179 for bn in changes.binary_names:
180 if not re_field_package.match(bn):
181 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
183 if 'source' in changes.architectures and changes.source is None:
184 raise Reject("Changes has architecture source, but no source found.")
185 if changes.source is not None and 'source' not in changes.architectures:
186 raise Reject("Upload includes source, but changes does not say so.")
189 fix_maintainer(changes.changes['Maintainer'])
190 except ParseMaintError as e:
191 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
194 changed_by = changes.changes.get('Changed-By')
195 if changed_by is not None:
196 fix_maintainer(changed_by)
197 except ParseMaintError as e:
198 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
200 if len(changes.files) == 0:
201 raise Reject("Changes includes no files.")
203 for bugnum in changes.closed_bugs:
204 if not re_isanum.match(bugnum):
205 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
209 class ExternalHashesCheck(Check):
210 """Checks hashes in .changes and .dsc against an external database."""
211 def check_single(self, session, f):
212 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
213 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
218 if ext_size != f.size:
219 raise RejectStupidMaintainerException(f.filename, 'size', f.size, ext_size)
221 if ext_md5sum != f.md5sum:
222 raise RejectStupidMaintainerException(f.filename, 'md5sum', f.md5sum, ext_md5sum)
224 if ext_sha1sum != f.sha1sum:
225 raise RejectStupidMaintainerException(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
227 if ext_sha256sum != f.sha256sum:
228 raise RejectStupidMaintainerException(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
230 def check(self, upload):
233 if not cnf.use_extfiles:
236 session = upload.session
237 changes = upload.changes
239 for f in changes.files.itervalues():
240 self.check_single(session, f)
241 source = changes.source
242 if source is not None:
243 for f in source.files.itervalues():
244 self.check_single(session, f)
246 class BinaryCheck(Check):
247 """Check binary packages for syntax errors."""
248 def check(self, upload):
249 for binary in upload.changes.binaries:
250 self.check_binary(upload, binary)
252 binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
253 for bn in binary_names:
254 if bn not in upload.changes.binary_names:
255 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
259 def check_binary(self, upload, binary):
260 fn = binary.hashed_file.filename
261 control = binary.control
263 for field in ('Package', 'Architecture', 'Version', 'Description'):
264 if field not in control:
265 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
269 package = control['Package']
270 if not re_field_package.match(package):
271 raise Reject('{0}: Invalid Package field'.format(fn))
273 version = control['Version']
274 version_match = re_field_version.match(version)
275 if not version_match:
276 raise Reject('{0}: Invalid Version field'.format(fn))
277 version_without_epoch = version_match.group('without_epoch')
279 architecture = control['Architecture']
280 if architecture not in upload.changes.architectures:
281 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
282 if architecture == 'source':
283 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
285 source = control.get('Source')
286 if source is not None and not re_field_source.match(source):
287 raise Reject('{0}: Invalid Source field'.format(fn))
291 match = re_file_binary.match(fn)
292 if package != match.group('package'):
293 raise Reject('{0}: filename does not match Package field'.format(fn))
294 if version_without_epoch != match.group('version'):
295 raise Reject('{0}: filename does not match Version field'.format(fn))
296 if architecture != match.group('architecture'):
297 raise Reject('{0}: filename does not match Architecture field'.format(fn))
299 # check dependency field syntax
301 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
302 'Provides', 'Recommends', 'Replaces', 'Suggests'):
303 value = control.get(field)
304 if value is not None:
305 if value.strip() == '':
306 raise Reject('{0}: empty {1} field'.format(fn, field))
308 apt_pkg.parse_depends(value)
310 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
312 for field in ('Built-Using',):
313 value = control.get(field)
314 if value is not None:
315 if value.strip() == '':
316 raise Reject('{0}: empty {1} field'.format(fn, field))
318 apt_pkg.parse_src_depends(value)
320 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
322 class BinaryTimestampCheck(Check):
323 """check timestamps of files in binary packages
325 Files in the near future cause ugly warnings and extreme time travel
326 can cause errors on extraction.
328 def check(self, upload):
330 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
331 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
333 class TarTime(object):
335 self.future_files = dict()
336 self.past_files = dict()
337 def callback(self, member, data):
338 if member.mtime > future_cutoff:
339 self.future_files[member.name] = member.mtime
340 elif member.mtime < past_cutoff:
341 self.past_files[member.name] = member.mtime
343 def format_reason(filename, direction, files):
344 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
345 for fn, ts in files.iteritems():
346 reason += " {0} ({1})".format(fn, time.ctime(ts))
349 for binary in upload.changes.binaries:
350 filename = binary.hashed_file.filename
351 path = os.path.join(upload.directory, filename)
352 deb = apt_inst.DebFile(path)
354 deb.control.go(tar.callback)
356 raise Reject(format_reason(filename, 'future', tar.future_files))
358 raise Reject(format_reason(filename, 'past', tar.past_files))
360 class SourceCheck(Check):
361 """Check source package for syntax errors."""
362 def check_filename(self, control, filename, regex):
363 # In case we have an .orig.tar.*, we have to strip the Debian revison
364 # from the version number. So handle this special case first.
366 match = re_file_orig.match(filename)
369 match = regex.match(filename)
372 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
373 if match.group('package') != control['Source']:
374 raise Reject('{0}: filename does not match Source field'.format(filename))
376 version = control['Version']
378 upstream_match = re_field_version_upstream.match(version)
379 if not upstream_match:
380 raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version))
381 version = upstream_match.group('upstream')
382 version_match = re_field_version.match(version)
383 version_without_epoch = version_match.group('without_epoch')
384 if match.group('version') != version_without_epoch:
385 raise Reject('{0}: filename does not match Version field'.format(filename))
387 def check(self, upload):
388 if upload.changes.source is None:
391 changes = upload.changes.changes
392 source = upload.changes.source
394 dsc_fn = source._dsc_file.filename
397 if not re_field_package.match(control['Source']):
398 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
399 if control['Source'] != changes['Source']:
400 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
401 if control['Version'] != changes['Version']:
402 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
405 self.check_filename(control, dsc_fn, re_file_dsc)
406 for f in source.files.itervalues():
407 self.check_filename(control, f.filename, re_file_source)
409 # check dependency field syntax
410 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
411 value = control.get(field)
412 if value is not None:
413 if value.strip() == '':
414 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
416 apt_pkg.parse_src_depends(value)
417 except Exception as e:
418 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
420 rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
422 raise Reject("\n".join(rejects))
426 class SingleDistributionCheck(Check):
427 """Check that the .changes targets only a single distribution."""
428 def check(self, upload):
429 if len(upload.changes.distributions) != 1:
430 raise Reject("Only uploads to a single distribution are allowed.")
432 class ACLCheck(Check):
433 """Check the uploader is allowed to upload the packages in .changes"""
435 def _does_hijack(self, session, upload, suite):
436 # Try to catch hijacks.
437 # This doesn't work correctly. Uploads to experimental can still
438 # "hijack" binaries from unstable. Also one can hijack packages
439 # via buildds (but people who try this should not be DMs).
440 for binary_name in upload.changes.binary_names:
441 binaries = session.query(DBBinary).join(DBBinary.source) \
442 .filter(DBBinary.suites.contains(suite)) \
443 .filter(DBBinary.package == binary_name)
444 for binary in binaries:
445 if binary.source.source != upload.changes.changes['Source']:
446 return True, binary.package, binary.source.source
447 return False, None, None
449 def _check_acl(self, session, upload, acl):
450 source_name = upload.changes.source_name
452 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
454 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
457 if not acl.allow_new:
459 return False, "NEW uploads are not allowed"
460 for f in upload.changes.files.itervalues():
461 if f.section == 'byhand' or f.section.startswith("raw-"):
462 return False, "BYHAND uploads are not allowed"
463 if not acl.allow_source and upload.changes.source is not None:
464 return False, "sourceful uploads are not allowed"
465 binaries = upload.changes.binaries
466 if len(binaries) != 0:
467 if not acl.allow_binary:
468 return False, "binary uploads are not allowed"
469 if upload.changes.source is None and not acl.allow_binary_only:
470 return False, "binary-only uploads are not allowed"
471 if not acl.allow_binary_all:
472 uploaded_arches = set(upload.changes.architectures)
473 uploaded_arches.discard('source')
474 allowed_arches = set(a.arch_string for a in acl.architectures)
475 forbidden_arches = uploaded_arches - allowed_arches
476 if len(forbidden_arches) != 0:
477 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
478 if not acl.allow_hijack:
479 for suite in upload.final_suites:
480 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
482 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
484 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
485 if acl.allow_per_source:
486 if acl_per_source is None:
487 return False, "not allowed to upload source package '{0}'".format(source_name)
488 if acl.deny_per_source and acl_per_source is not None:
489 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
493 def check(self, upload):
494 session = upload.session
495 fingerprint = upload.fingerprint
496 keyring = fingerprint.keyring
499 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
500 if not keyring.active:
501 raise Reject('Keyring {0} is not active'.format(keyring.name))
503 acl = fingerprint.acl or keyring.acl
505 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
506 result, reason = self._check_acl(session, upload, acl)
508 raise RejectACL(acl, reason)
510 for acl in session.query(ACL).filter_by(is_global=True):
511 result, reason = self._check_acl(session, upload, acl)
513 raise RejectACL(acl, reason)
517 def per_suite_check(self, upload, suite):
522 result, reason = self._check_acl(upload.session, upload, acl)
525 accept = accept or result
527 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
530 class TransitionCheck(Check):
531 """check for a transition"""
532 def check(self, upload):
533 if 'source' not in upload.changes.architectures:
536 transitions = self.get_transitions()
537 if transitions is None:
540 control = upload.changes.changes
541 source = re_field_source.match(control['Source']).group('package')
543 for trans in transitions:
544 t = transitions[trans]
548 # Will be None if nothing is in testing.
549 current = get_source_in_suite(source, "testing", session)
550 if current is not None:
551 compare = apt_pkg.version_compare(current.version, expected)
553 if current is None or compare < 0:
554 # This is still valid, the current version in testing is older than
555 # the new version we wait for, or there is none in testing yet
557 # Check if the source we look at is affected by this.
558 if source in t['packages']:
559 # The source is affected, lets reject it.
561 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
563 if current is not None:
564 currentlymsg = "at version {0}".format(current.version)
566 currentlymsg = "not present in testing"
568 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
570 rejectmsg += "\n".join(textwrap.wrap("""Your package
571 is part of a testing transition designed to get {0} migrated (it is
572 currently {1}, we need version {2}). This transition is managed by the
573 Release Team, and {3} is the Release-Team member responsible for it.
574 Please mail debian-release@lists.debian.org or contact {3} directly if you
575 need further assistance. You might want to upload to experimental until this
576 transition is done.""".format(source, currentlymsg, expected,t["rm"])))
578 raise Reject(rejectmsg)
582 def get_transitions(self):
584 path = cnf.get('Dinstall::ReleaseTransitions', '')
585 if path == '' or not os.path.exists(path):
588 contents = file(path, 'r').read()
590 transitions = yaml.load(contents)
592 except yaml.YAMLError as msg:
593 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
597 class NoSourceOnlyCheck(Check):
598 """Check for source-only upload
600 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
601 set. Otherwise they are rejected.
603 def check(self, upload):
604 if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
606 changes = upload.changes
607 if changes.source is not None and len(changes.binaries) == 0:
608 raise Reject('Source-only uploads are not allowed.')
611 class LintianCheck(Check):
612 """Check package using lintian"""
613 def check(self, upload):
614 changes = upload.changes
616 # Only check sourceful uploads.
617 if changes.source is None:
619 # Only check uploads to unstable or experimental.
620 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
624 if 'Dinstall::LintianTags' not in cnf:
626 tagfile = cnf['Dinstall::LintianTags']
628 with open(tagfile, 'r') as sourcefile:
629 sourcecontent = sourcefile.read()
631 lintiantags = yaml.load(sourcecontent)['lintian']
632 except yaml.YAMLError as msg:
633 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
635 fd, temp_filename = utils.temp_filename(mode=0o644)
636 temptagfile = os.fdopen(fd, 'w')
637 for tags in lintiantags.itervalues():
639 print >>temptagfile, tag
642 changespath = os.path.join(upload.directory, changes.filename)
646 user = cnf.get('Dinstall::UnprivUser') or None
648 cmd.extend(['sudo', '-H', '-u', user])
650 cmd.extend(['LINTIAN_COLL_UNPACKED_SKIP_SIG=1', '/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
651 result, output = commands.getstatusoutput(" ".join(cmd))
653 os.unlink(temp_filename)
656 utils.warn("lintian failed for %s [return code: %s]." % \
657 (changespath, result))
658 utils.warn(utils.prefix_multi_line_string(output, \
659 " [possible output:] "))
661 parsed_tags = lintian.parse_lintian_output(output)
662 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
663 if len(rejects) != 0:
664 raise Reject('\n'.join(rejects))
668 class SourceFormatCheck(Check):
669 """Check source format is allowed in the target suite"""
670 def per_suite_check(self, upload, suite):
671 source = upload.changes.source
672 session = upload.session
676 source_format = source.dsc['Format']
677 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
678 if query.first() is None:
679 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
681 class SuiteArchitectureCheck(Check):
682 def per_suite_check(self, upload, suite):
683 session = upload.session
684 for arch in upload.changes.architectures:
685 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
686 if query.first() is None:
687 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
691 class VersionCheck(Check):
692 """Check version constraints"""
693 def _highest_source_version(self, session, source_name, suite):
694 db_source = session.query(DBSource).filter_by(source=source_name) \
695 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
696 if db_source is None:
699 return db_source.version
701 def _highest_binary_version(self, session, binary_name, suite, architecture):
702 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
703 .filter(DBBinary.suites.contains(suite)) \
704 .join(DBBinary.architecture) \
705 .filter(Architecture.arch_string.in_(['all', architecture])) \
706 .order_by(DBBinary.version.desc()).first()
707 if db_binary is None:
710 return db_binary.version
712 def _version_checks(self, upload, suite, other_suite, op, op_name):
713 session = upload.session
715 if upload.changes.source is not None:
716 source_name = upload.changes.source.dsc['Source']
717 source_version = upload.changes.source.dsc['Version']
718 v = self._highest_source_version(session, source_name, other_suite)
719 if v is not None and not op(version_compare(source_version, v)):
720 raise Reject("Version check failed:\n"
721 "Your upload included the source package {0}, version {1},\n"
722 "however {3} already has version {2}.\n"
723 "Uploads to {5} must have a {4} version than present in {3}."
724 .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
726 for binary in upload.changes.binaries:
727 binary_name = binary.control['Package']
728 binary_version = binary.control['Version']
729 architecture = binary.control['Architecture']
730 v = self._highest_binary_version(session, binary_name, other_suite, architecture)
731 if v is not None and not op(version_compare(binary_version, v)):
732 raise Reject("Version check failed:\n"
733 "Your upload included the binary package {0}, version {1}, for {2},\n"
734 "however {4} already has version {3}.\n"
735 "Uploads to {6} must have a {5} version than present in {4}."
736 .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
738 def per_suite_check(self, upload, suite):
739 session = upload.session
741 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
742 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
743 must_be_newer_than = [ vc.reference for vc in vc_newer ]
744 # Must be newer than old versions in `suite`
745 must_be_newer_than.append(suite)
747 for s in must_be_newer_than:
748 self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
750 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
751 must_be_older_than = [ vc.reference for vc in vc_older ]
753 for s in must_be_older_than:
754 self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')