1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the L{Check} class for the interface.
26 from daklib.config import Config
27 from daklib.dbconn import *
28 import daklib.dbconn as dbconn
29 from daklib.regexes import *
30 from daklib.textutils import fix_maintainer, ParseMaintError
31 import daklib.lintian as lintian
32 import daklib.utils as utils
33 from daklib.upload import InvalidHashException
37 from apt_pkg import version_compare
43 # TODO: replace by subprocess
46 class Reject(Exception):
47 """exception raised by failing checks"""
50 class RejectStupidMaintainerException(Exception):
51 """exception raised by failing the external hashes check"""
54 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
56 class RejectACL(Reject):
57 """exception raise by failing ACL checks"""
58 def __init__(self, acl, reason):
63 return "ACL {0}: {1}".format(self.acl.name, self.reason)
66 """base class for checks
68 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
69 raise a L{daklib.checks.Reject} exception including a human-readable
70 description why the upload should be rejected.
72 def check(self, upload):
75 @type upload: L{daklib.archive.ArchiveUpload}
76 @param upload: upload to check
78 @raise daklib.checks.Reject: upload should be rejected
81 def per_suite_check(self, upload, suite):
82 """do per-suite checks
84 @type upload: L{daklib.archive.ArchiveUpload}
85 @param upload: upload to check
87 @type suite: L{daklib.dbconn.Suite}
88 @param suite: suite to check
90 @raise daklib.checks.Reject: upload should be rejected
95 """allow to force ignore failing test
97 C{True} if it is acceptable to force ignoring a failing test,
102 class SignatureAndHashesCheck(Check):
103 """Check signature of changes and dsc file (if included in upload)
105 Make sure the signature is valid and done by a known user.
107 def check(self, upload):
108 changes = upload.changes
109 if not changes.valid_signature:
110 raise Reject("Signature for .changes not valid.")
111 self._check_hashes(upload, changes.filename, changes.files.itervalues())
115 source = changes.source
116 except Exception as e:
117 raise Reject("Invalid dsc file: {0}".format(e))
118 if source is not None:
119 if not source.valid_signature:
120 raise Reject("Signature for .dsc not valid.")
121 if source.primary_fingerprint != changes.primary_fingerprint:
122 raise Reject(".changes and .dsc not signed by the same key.")
123 self._check_hashes(upload, source.filename, source.files.itervalues())
125 if upload.fingerprint is None or upload.fingerprint.uid is None:
126 raise Reject(".changes signed by unknown key.")
128 """Make sure hashes match existing files
130 @type upload: L{daklib.archive.ArchiveUpload}
131 @param upload: upload we are processing
134 @param filename: name of the file the expected hash values are taken from
136 @type files: sequence of L{daklib.upload.HashedFile}
137 @param files: files to check the hashes for
139 def _check_hashes(self, upload, filename, files):
142 f.check(upload.directory)
144 if e.errno == errno.ENOENT:
145 raise Reject('{0} refers to non-existing file: {1}\n'
146 'Perhaps you need to include it in your upload?'
147 .format(filename, os.path.basename(e.filename)))
149 except InvalidHashException as e:
150 raise Reject('{0}: {1}'.format(filename, unicode(e)))
152 class ChangesCheck(Check):
153 """Check changes file for syntax errors."""
154 def check(self, upload):
155 changes = upload.changes
156 control = changes.changes
157 fn = changes.filename
159 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
160 if field not in control:
161 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
163 source_match = re_field_source.match(control['Source'])
165 raise Reject('{0}: Invalid Source field'.format(fn))
166 version_match = re_field_version.match(control['Version'])
167 if not version_match:
168 raise Reject('{0}: Invalid Version field'.format(fn))
169 version_without_epoch = version_match.group('without_epoch')
171 match = re_file_changes.match(fn)
173 raise Reject('{0}: Does not match re_file_changes'.format(fn))
174 if match.group('package') != source_match.group('package'):
175 raise Reject('{0}: Filename does not match Source field'.format(fn))
176 if match.group('version') != version_without_epoch:
177 raise Reject('{0}: Filename does not match Version field'.format(fn))
179 for bn in changes.binary_names:
180 if not re_field_package.match(bn):
181 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
183 if 'source' in changes.architectures and changes.source is None:
184 raise Reject("Changes has architecture source, but no source found.")
185 if changes.source is not None and 'source' not in changes.architectures:
186 raise Reject("Upload includes source, but changes does not say so.")
189 fix_maintainer(changes.changes['Maintainer'])
190 except ParseMaintError as e:
191 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
194 changed_by = changes.changes.get('Changed-By')
195 if changed_by is not None:
196 fix_maintainer(changed_by)
197 except ParseMaintError as e:
198 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
200 if len(changes.files) == 0:
201 raise Reject("Changes includes no files.")
203 for bugnum in changes.closed_bugs:
204 if not re_isanum.match(bugnum):
205 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
209 class ExternalHashesCheck(Check):
210 """Checks hashes in .changes and .dsc against an external database."""
211 def check_single(self, session, f):
212 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
213 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
218 if ext_size != f.size:
219 raise RejectStupidMaintainerException(f.filename, 'size', f.size, ext_size)
221 if ext_md5sum != f.md5sum:
222 raise RejectStupidMaintainerException(f.filename, 'md5sum', f.md5sum, ext_md5sum)
224 if ext_sha1sum != f.sha1sum:
225 raise RejectStupidMaintainerException(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
227 if ext_sha256sum != f.sha256sum:
228 raise RejectStupidMaintainerException(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
230 def check(self, upload):
233 if not cnf.use_extfiles:
236 session = upload.session
237 changes = upload.changes
239 for f in changes.files.itervalues():
240 self.check_single(session, f)
241 source = changes.source
242 if source is not None:
243 for f in source.files.itervalues():
244 self.check_single(session, f)
246 class BinaryCheck(Check):
247 """Check binary packages for syntax errors."""
248 def check(self, upload):
249 for binary in upload.changes.binaries:
250 self.check_binary(upload, binary)
252 binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
253 for bn in binary_names:
254 if bn not in upload.changes.binary_names:
255 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
259 def check_binary(self, upload, binary):
260 fn = binary.hashed_file.filename
261 control = binary.control
263 for field in ('Package', 'Architecture', 'Version', 'Description'):
264 if field not in control:
265 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
269 package = control['Package']
270 if not re_field_package.match(package):
271 raise Reject('{0}: Invalid Package field'.format(fn))
273 version = control['Version']
274 version_match = re_field_version.match(version)
275 if not version_match:
276 raise Reject('{0}: Invalid Version field'.format(fn))
277 version_without_epoch = version_match.group('without_epoch')
279 architecture = control['Architecture']
280 if architecture not in upload.changes.architectures:
281 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
282 if architecture == 'source':
283 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
285 source = control.get('Source')
286 if source is not None and not re_field_source.match(source):
287 raise Reject('{0}: Invalid Source field'.format(fn))
291 match = re_file_binary.match(fn)
292 if package != match.group('package'):
293 raise Reject('{0}: filename does not match Package field'.format(fn))
294 if version_without_epoch != match.group('version'):
295 raise Reject('{0}: filename does not match Version field'.format(fn))
296 if architecture != match.group('architecture'):
297 raise Reject('{0}: filename does not match Architecture field'.format(fn))
299 # check dependency field syntax
301 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
302 'Provides', 'Recommends', 'Replaces', 'Suggests'):
303 value = control.get(field)
304 if value is not None:
305 if value.strip() == '':
306 raise Reject('{0}: empty {1} field'.format(fn, field))
308 apt_pkg.parse_depends(value)
310 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
312 for field in ('Built-Using',):
313 value = control.get(field)
314 if value is not None:
315 if value.strip() == '':
316 raise Reject('{0}: empty {1} field'.format(fn, field))
318 apt_pkg.parse_src_depends(value)
320 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
322 class BinaryTimestampCheck(Check):
323 """check timestamps of files in binary packages
325 Files in the near future cause ugly warnings and extreme time travel
326 can cause errors on extraction.
328 def check(self, upload):
330 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
331 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1984'), '%Y'))
333 class TarTime(object):
335 self.future_files = dict()
336 self.past_files = dict()
337 def callback(self, member, data):
338 if member.mtime > future_cutoff:
339 future_files[member.name] = member.mtime
340 elif member.mtime < past_cutoff:
341 past_files[member.name] = member.mtime
343 def format_reason(filename, direction, files):
344 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
345 for fn, ts in files.iteritems():
346 reason += " {0} ({1})".format(fn, time.ctime(ts))
349 for binary in upload.changes.binaries:
350 filename = binary.hashed_file.filename
351 path = os.path.join(upload.directory, filename)
352 deb = apt_inst.DebFile(path)
354 deb.control.go(tar.callback)
356 raise Reject(format_reason(filename, 'future', tar.future_files))
358 raise Reject(format_reason(filename, 'past', tar.past_files))
360 class SourceCheck(Check):
361 """Check source package for syntax errors."""
362 def check_filename(self, control, filename, regex):
363 # In case we have an .orig.tar.*, we have to strip the Debian revison
364 # from the version number. So handle this special case first.
366 match = re_file_orig.match(filename)
369 match = regex.match(filename)
372 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
373 if match.group('package') != control['Source']:
374 raise Reject('{0}: filename does not match Source field'.format(filename))
376 version = control['Version']
378 version = re_field_version_upstream.match(version).group('upstream')
379 version_match = re_field_version.match(version)
380 version_without_epoch = version_match.group('without_epoch')
381 if match.group('version') != version_without_epoch:
382 raise Reject('{0}: filename does not match Version field'.format(filename))
384 def check(self, upload):
385 if upload.changes.source is None:
388 changes = upload.changes.changes
389 source = upload.changes.source
391 dsc_fn = source._dsc_file.filename
394 if not re_field_package.match(control['Source']):
395 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
396 if control['Source'] != changes['Source']:
397 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
398 if control['Version'] != changes['Version']:
399 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
402 self.check_filename(control, dsc_fn, re_file_dsc)
403 for f in source.files.itervalues():
404 self.check_filename(control, f.filename, re_file_source)
406 # check dependency field syntax
407 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
408 value = control.get(field)
409 if value is not None:
410 if value.strip() == '':
411 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
413 apt_pkg.parse_src_depends(value)
414 except Exception as e:
415 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
417 rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
419 raise Reject("\n".join(rejects))
423 class SingleDistributionCheck(Check):
424 """Check that the .changes targets only a single distribution."""
425 def check(self, upload):
426 if len(upload.changes.distributions) != 1:
427 raise Reject("Only uploads to a single distribution are allowed.")
429 class ACLCheck(Check):
430 """Check the uploader is allowed to upload the packages in .changes"""
432 def _does_hijack(self, session, upload, suite):
433 # Try to catch hijacks.
434 # This doesn't work correctly. Uploads to experimental can still
435 # "hijack" binaries from unstable. Also one can hijack packages
436 # via buildds (but people who try this should not be DMs).
437 for binary_name in upload.changes.binary_names:
438 binaries = session.query(DBBinary).join(DBBinary.source) \
439 .filter(DBBinary.suites.contains(suite)) \
440 .filter(DBBinary.package == binary_name)
441 for binary in binaries:
442 if binary.source.source != upload.changes.changes['Source']:
443 return True, binary, binary.source.source
444 return False, None, None
446 def _check_acl(self, session, upload, acl):
447 source_name = upload.changes.source_name
449 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
451 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
454 if not acl.allow_new:
456 return False, "NEW uploads are not allowed"
457 for f in upload.changes.files.itervalues():
458 if f.section == 'byhand' or f.section.startswith("raw-"):
459 return False, "BYHAND uploads are not allowed"
460 if not acl.allow_source and upload.changes.source is not None:
461 return False, "sourceful uploads are not allowed"
462 binaries = upload.changes.binaries
463 if len(binaries) != 0:
464 if not acl.allow_binary:
465 return False, "binary uploads are not allowed"
466 if upload.changes.source is None and not acl.allow_binary_only:
467 return False, "binary-only uploads are not allowed"
468 if not acl.allow_binary_all:
469 uploaded_arches = set(upload.changes.architectures)
470 uploaded_arches.discard('source')
471 allowed_arches = set(a.arch_string for a in acl.architectures)
472 forbidden_arches = uploaded_arches - allowed_arches
473 if len(forbidden_arches) != 0:
474 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
475 if not acl.allow_hijack:
476 for suite in upload.final_suites:
477 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
479 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
481 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
482 if acl.allow_per_source:
483 if acl_per_source is None:
484 return False, "not allowed to upload source package '{0}'".format(source_name)
485 if acl.deny_per_source and acl_per_source is not None:
486 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
490 def check(self, upload):
491 session = upload.session
492 fingerprint = upload.fingerprint
493 keyring = fingerprint.keyring
496 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
497 if not keyring.active:
498 raise Reject('Keyring {0} is not active'.format(keyring.name))
500 acl = fingerprint.acl or keyring.acl
502 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
503 result, reason = self._check_acl(session, upload, acl)
505 raise RejectACL(acl, reason)
507 for acl in session.query(ACL).filter_by(is_global=True):
508 result, reason = self._check_acl(session, upload, acl)
510 raise RejectACL(acl, reason)
514 def per_suite_check(self, upload, suite):
519 result, reason = self._check_acl(upload.session, upload, acl)
522 accept = accept or result
524 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
527 class TransitionCheck(Check):
528 """check for a transition"""
529 def check(self, upload):
530 if 'source' not in upload.changes.architectures:
533 transitions = self.get_transitions()
534 if transitions is None:
537 control = upload.changes.changes
538 source = re_field_source.match(control['Source']).group('package')
540 for trans in transitions:
541 t = transitions[trans]
545 # Will be None if nothing is in testing.
546 current = get_source_in_suite(source, "testing", session)
547 if current is not None:
548 compare = apt_pkg.version_compare(current.version, expected)
550 if current is None or compare < 0:
551 # This is still valid, the current version in testing is older than
552 # the new version we wait for, or there is none in testing yet
554 # Check if the source we look at is affected by this.
555 if source in t['packages']:
556 # The source is affected, lets reject it.
558 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
560 if current is not None:
561 currentlymsg = "at version {0}".format(current.version)
563 currentlymsg = "not present in testing"
565 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
567 rejectmsg += "\n".join(textwrap.wrap("""Your package
568 is part of a testing transition designed to get {0} migrated (it is
569 currently {1}, we need version {2}). This transition is managed by the
570 Release Team, and {3} is the Release-Team member responsible for it.
571 Please mail debian-release@lists.debian.org or contact {3} directly if you
572 need further assistance. You might want to upload to experimental until this
573 transition is done.""".format(source, currentlymsg, expected,t["rm"])))
575 raise Reject(rejectmsg)
579 def get_transitions(self):
581 path = cnf.get('Dinstall::ReleaseTransitions', '')
582 if path == '' or not os.path.exists(path):
585 contents = file(path, 'r').read()
587 transitions = yaml.load(contents)
589 except yaml.YAMLError as msg:
590 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
594 class NoSourceOnlyCheck(Check):
595 """Check for source-only upload
597 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
598 set. Otherwise they are rejected.
600 def check(self, upload):
601 if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
603 changes = upload.changes
604 if changes.source is not None and len(changes.binaries) == 0:
605 raise Reject('Source-only uploads are not allowed.')
608 class LintianCheck(Check):
609 """Check package using lintian"""
610 def check(self, upload):
611 changes = upload.changes
613 # Only check sourceful uploads.
614 if changes.source is None:
616 # Only check uploads to unstable or experimental.
617 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
621 if 'Dinstall::LintianTags' not in cnf:
623 tagfile = cnf['Dinstall::LintianTags']
625 with open(tagfile, 'r') as sourcefile:
626 sourcecontent = sourcefile.read()
628 lintiantags = yaml.load(sourcecontent)['lintian']
629 except yaml.YAMLError as msg:
630 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
632 fd, temp_filename = utils.temp_filename(mode=0o644)
633 temptagfile = os.fdopen(fd, 'w')
634 for tags in lintiantags.itervalues():
636 print >>temptagfile, tag
639 changespath = os.path.join(upload.directory, changes.filename)
642 cmd = "sudo -H -u {0} -- /usr/bin/lintian --show-overrides --tags-from-file {1} {2}".format(cnf.unprivgroup, temp_filename, changespath)
644 cmd = "/usr/bin/lintian --show-overrides --tags-from-file {0} {1}".format(temp_filename, changespath)
645 result, output = commands.getstatusoutput(cmd)
647 os.unlink(temp_filename)
650 utils.warn("lintian failed for %s [return code: %s]." % \
651 (changespath, result))
652 utils.warn(utils.prefix_multi_line_string(output, \
653 " [possible output:] "))
655 parsed_tags = lintian.parse_lintian_output(output)
656 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
657 if len(rejects) != 0:
658 raise Reject('\n'.join(rejects))
662 class SourceFormatCheck(Check):
663 """Check source format is allowed in the target suite"""
664 def per_suite_check(self, upload, suite):
665 source = upload.changes.source
666 session = upload.session
670 source_format = source.dsc['Format']
671 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
672 if query.first() is None:
673 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
675 class SuiteArchitectureCheck(Check):
676 def per_suite_check(self, upload, suite):
677 session = upload.session
678 for arch in upload.changes.architectures:
679 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
680 if query.first() is None:
681 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
685 class VersionCheck(Check):
686 """Check version constraints"""
687 def _highest_source_version(self, session, source_name, suite):
688 db_source = session.query(DBSource).filter_by(source=source_name) \
689 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
690 if db_source is None:
693 return db_source.version
695 def _highest_binary_version(self, session, binary_name, suite, architecture):
696 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
697 .filter(DBBinary.suites.contains(suite)) \
698 .join(DBBinary.architecture) \
699 .filter(Architecture.arch_string.in_(['all', architecture])) \
700 .order_by(DBBinary.version.desc()).first()
701 if db_binary is None:
704 return db_binary.version
706 def _version_checks(self, upload, suite, op):
707 session = upload.session
709 if upload.changes.source is not None:
710 source_name = upload.changes.source.dsc['Source']
711 source_version = upload.changes.source.dsc['Version']
712 v = self._highest_source_version(session, source_name, suite)
713 if v is not None and not op(version_compare(source_version, v)):
714 raise Reject('Version check failed (source={0}, version={1}, other-version={2}, suite={3})'.format(source_name, source_version, v, suite.suite_name))
716 for binary in upload.changes.binaries:
717 binary_name = binary.control['Package']
718 binary_version = binary.control['Version']
719 architecture = binary.control['Architecture']
720 v = self._highest_binary_version(session, binary_name, suite, architecture)
721 if v is not None and not op(version_compare(binary_version, v)):
722 raise Reject('Version check failed (binary={0}, version={1}, other-version={2}, suite={3})'.format(binary_name, binary_version, v, suite.suite_name))
724 def per_suite_check(self, upload, suite):
725 session = upload.session
727 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
728 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
729 must_be_newer_than = [ vc.reference for vc in vc_newer ]
730 # Must be newer than old versions in `suite`
731 must_be_newer_than.append(suite)
733 for s in must_be_newer_than:
734 self._version_checks(upload, s, lambda result: result > 0)
736 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
737 must_be_older_than = [ vc.reference for vc in vc_older ]
739 for s in must_be_older_than:
740 self._version_checks(upload, s, lambda result: result < 0)