1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the L{Check} class for the interface.
26 from daklib.config import Config
27 import daklib.daksubprocess
28 from daklib.dbconn import *
29 import daklib.dbconn as dbconn
30 from daklib.regexes import *
31 from daklib.textutils import fix_maintainer, ParseMaintError
32 import daklib.lintian as lintian
33 import daklib.utils as utils
34 from daklib.upload import InvalidHashException
38 from apt_pkg import version_compare
45 def check_fields_for_valid_utf8(filename, control):
46 """Check all fields of a control file for valid UTF-8"""
47 for field in control.keys():
50 control[field].decode('utf-8')
51 except UnicodeDecodeError:
52 raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field))
54 class Reject(Exception):
55 """exception raised by failing checks"""
58 class RejectStupidMaintainerException(Exception):
59 """exception raised by failing the external hashes check"""
62 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
64 class RejectACL(Reject):
65 """exception raise by failing ACL checks"""
66 def __init__(self, acl, reason):
71 return "ACL {0}: {1}".format(self.acl.name, self.reason)
74 """base class for checks
76 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
77 raise a L{daklib.checks.Reject} exception including a human-readable
78 description why the upload should be rejected.
80 def check(self, upload):
83 @type upload: L{daklib.archive.ArchiveUpload}
84 @param upload: upload to check
86 @raise daklib.checks.Reject: upload should be rejected
89 def per_suite_check(self, upload, suite):
90 """do per-suite checks
92 @type upload: L{daklib.archive.ArchiveUpload}
93 @param upload: upload to check
95 @type suite: L{daklib.dbconn.Suite}
96 @param suite: suite to check
98 @raise daklib.checks.Reject: upload should be rejected
103 """allow to force ignore failing test
105 C{True} if it is acceptable to force ignoring a failing test,
110 class SignatureAndHashesCheck(Check):
111 """Check signature of changes and dsc file (if included in upload)
113 Make sure the signature is valid and done by a known user.
115 def check(self, upload):
116 changes = upload.changes
117 if not changes.valid_signature:
118 raise Reject("Signature for .changes not valid.")
119 self._check_hashes(upload, changes.filename, changes.files.itervalues())
123 source = changes.source
124 except Exception as e:
125 raise Reject("Invalid dsc file: {0}".format(e))
126 if source is not None:
127 if not source.valid_signature:
128 raise Reject("Signature for .dsc not valid.")
129 if source.primary_fingerprint != changes.primary_fingerprint:
130 raise Reject(".changes and .dsc not signed by the same key.")
131 self._check_hashes(upload, source.filename, source.files.itervalues())
133 if upload.fingerprint is None or upload.fingerprint.uid is None:
134 raise Reject(".changes signed by unknown key.")
136 """Make sure hashes match existing files
138 @type upload: L{daklib.archive.ArchiveUpload}
139 @param upload: upload we are processing
142 @param filename: name of the file the expected hash values are taken from
144 @type files: sequence of L{daklib.upload.HashedFile}
145 @param files: files to check the hashes for
147 def _check_hashes(self, upload, filename, files):
150 f.check(upload.directory)
152 if e.errno == errno.ENOENT:
153 raise Reject('{0} refers to non-existing file: {1}\n'
154 'Perhaps you need to include it in your upload?'
155 .format(filename, os.path.basename(e.filename)))
157 except InvalidHashException as e:
158 raise Reject('{0}: {1}'.format(filename, unicode(e)))
160 class ChangesCheck(Check):
161 """Check changes file for syntax errors."""
162 def check(self, upload):
163 changes = upload.changes
164 control = changes.changes
165 fn = changes.filename
167 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
168 if field not in control:
169 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
171 check_fields_for_valid_utf8(fn, control)
173 source_match = re_field_source.match(control['Source'])
175 raise Reject('{0}: Invalid Source field'.format(fn))
176 version_match = re_field_version.match(control['Version'])
177 if not version_match:
178 raise Reject('{0}: Invalid Version field'.format(fn))
179 version_without_epoch = version_match.group('without_epoch')
181 match = re_file_changes.match(fn)
183 raise Reject('{0}: Does not match re_file_changes'.format(fn))
184 if match.group('package') != source_match.group('package'):
185 raise Reject('{0}: Filename does not match Source field'.format(fn))
186 if match.group('version') != version_without_epoch:
187 raise Reject('{0}: Filename does not match Version field'.format(fn))
189 for bn in changes.binary_names:
190 if not re_field_package.match(bn):
191 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
193 if 'source' in changes.architectures and changes.source is None:
194 raise Reject("Changes has architecture source, but no source found.")
195 if changes.source is not None and 'source' not in changes.architectures:
196 raise Reject("Upload includes source, but changes does not say so.")
199 fix_maintainer(changes.changes['Maintainer'])
200 except ParseMaintError as e:
201 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
204 changed_by = changes.changes.get('Changed-By')
205 if changed_by is not None:
206 fix_maintainer(changed_by)
207 except ParseMaintError as e:
208 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
210 if len(changes.files) == 0:
211 raise Reject("Changes includes no files.")
213 for bugnum in changes.closed_bugs:
214 if not re_isanum.match(bugnum):
215 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
219 class ExternalHashesCheck(Check):
220 """Checks hashes in .changes and .dsc against an external database."""
221 def check_single(self, session, f):
222 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
223 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
228 if ext_size != f.size:
229 raise RejectStupidMaintainerException(f.filename, 'size', f.size, ext_size)
231 if ext_md5sum != f.md5sum:
232 raise RejectStupidMaintainerException(f.filename, 'md5sum', f.md5sum, ext_md5sum)
234 if ext_sha1sum != f.sha1sum:
235 raise RejectStupidMaintainerException(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
237 if ext_sha256sum != f.sha256sum:
238 raise RejectStupidMaintainerException(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
240 def check(self, upload):
243 if not cnf.use_extfiles:
246 session = upload.session
247 changes = upload.changes
249 for f in changes.files.itervalues():
250 self.check_single(session, f)
251 source = changes.source
252 if source is not None:
253 for f in source.files.itervalues():
254 self.check_single(session, f)
256 class BinaryCheck(Check):
257 """Check binary packages for syntax errors."""
258 def check(self, upload):
259 for binary in upload.changes.binaries:
260 self.check_binary(upload, binary)
262 binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
263 for bn in binary_names:
264 if bn not in upload.changes.binary_names:
265 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
269 def check_binary(self, upload, binary):
270 fn = binary.hashed_file.filename
271 control = binary.control
273 for field in ('Package', 'Architecture', 'Version', 'Description'):
274 if field not in control:
275 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
277 check_fields_for_valid_utf8(fn, control)
281 package = control['Package']
282 if not re_field_package.match(package):
283 raise Reject('{0}: Invalid Package field'.format(fn))
285 version = control['Version']
286 version_match = re_field_version.match(version)
287 if not version_match:
288 raise Reject('{0}: Invalid Version field'.format(fn))
289 version_without_epoch = version_match.group('without_epoch')
291 architecture = control['Architecture']
292 if architecture not in upload.changes.architectures:
293 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
294 if architecture == 'source':
295 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
297 source = control.get('Source')
298 if source is not None and not re_field_source.match(source):
299 raise Reject('{0}: Invalid Source field'.format(fn))
303 match = re_file_binary.match(fn)
304 if package != match.group('package'):
305 raise Reject('{0}: filename does not match Package field'.format(fn))
306 if version_without_epoch != match.group('version'):
307 raise Reject('{0}: filename does not match Version field'.format(fn))
308 if architecture != match.group('architecture'):
309 raise Reject('{0}: filename does not match Architecture field'.format(fn))
311 # check dependency field syntax
313 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
314 'Provides', 'Recommends', 'Replaces', 'Suggests'):
315 value = control.get(field)
316 if value is not None:
317 if value.strip() == '':
318 raise Reject('{0}: empty {1} field'.format(fn, field))
320 apt_pkg.parse_depends(value)
322 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
324 for field in ('Built-Using',):
325 value = control.get(field)
326 if value is not None:
327 if value.strip() == '':
328 raise Reject('{0}: empty {1} field'.format(fn, field))
330 apt_pkg.parse_src_depends(value)
332 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
334 class BinaryTimestampCheck(Check):
335 """check timestamps of files in binary packages
337 Files in the near future cause ugly warnings and extreme time travel
338 can cause errors on extraction.
340 def check(self, upload):
342 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
343 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
345 class TarTime(object):
347 self.future_files = dict()
348 self.past_files = dict()
349 def callback(self, member, data):
350 if member.mtime > future_cutoff:
351 self.future_files[member.name] = member.mtime
352 elif member.mtime < past_cutoff:
353 self.past_files[member.name] = member.mtime
355 def format_reason(filename, direction, files):
356 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
357 for fn, ts in files.iteritems():
358 reason += " {0} ({1})".format(fn, time.ctime(ts))
361 for binary in upload.changes.binaries:
362 filename = binary.hashed_file.filename
363 path = os.path.join(upload.directory, filename)
364 deb = apt_inst.DebFile(path)
366 deb.control.go(tar.callback)
368 raise Reject(format_reason(filename, 'future', tar.future_files))
370 raise Reject(format_reason(filename, 'past', tar.past_files))
372 class SourceCheck(Check):
373 """Check source package for syntax errors."""
374 def check_filename(self, control, filename, regex):
375 # In case we have an .orig.tar.*, we have to strip the Debian revison
376 # from the version number. So handle this special case first.
378 match = re_file_orig.match(filename)
381 match = regex.match(filename)
384 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
385 if match.group('package') != control['Source']:
386 raise Reject('{0}: filename does not match Source field'.format(filename))
388 version = control['Version']
390 upstream_match = re_field_version_upstream.match(version)
391 if not upstream_match:
392 raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version))
393 version = upstream_match.group('upstream')
394 version_match = re_field_version.match(version)
395 version_without_epoch = version_match.group('without_epoch')
396 if match.group('version') != version_without_epoch:
397 raise Reject('{0}: filename does not match Version field'.format(filename))
399 def check(self, upload):
400 if upload.changes.source is None:
403 changes = upload.changes.changes
404 source = upload.changes.source
406 dsc_fn = source._dsc_file.filename
408 check_fields_for_valid_utf8(dsc_fn, control)
411 if not re_field_package.match(control['Source']):
412 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
413 if control['Source'] != changes['Source']:
414 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
415 if control['Version'] != changes['Version']:
416 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
419 self.check_filename(control, dsc_fn, re_file_dsc)
420 for f in source.files.itervalues():
421 self.check_filename(control, f.filename, re_file_source)
423 # check dependency field syntax
424 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
425 value = control.get(field)
426 if value is not None:
427 if value.strip() == '':
428 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
430 apt_pkg.parse_src_depends(value)
431 except Exception as e:
432 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
434 rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
436 raise Reject("\n".join(rejects))
440 class SingleDistributionCheck(Check):
441 """Check that the .changes targets only a single distribution."""
442 def check(self, upload):
443 if len(upload.changes.distributions) != 1:
444 raise Reject("Only uploads to a single distribution are allowed.")
446 class ACLCheck(Check):
447 """Check the uploader is allowed to upload the packages in .changes"""
449 def _does_hijack(self, session, upload, suite):
450 # Try to catch hijacks.
451 # This doesn't work correctly. Uploads to experimental can still
452 # "hijack" binaries from unstable. Also one can hijack packages
453 # via buildds (but people who try this should not be DMs).
454 for binary_name in upload.changes.binary_names:
455 binaries = session.query(DBBinary).join(DBBinary.source) \
456 .filter(DBBinary.suites.contains(suite)) \
457 .filter(DBBinary.package == binary_name)
458 for binary in binaries:
459 if binary.source.source != upload.changes.changes['Source']:
460 return True, binary.package, binary.source.source
461 return False, None, None
463 def _check_acl(self, session, upload, acl):
464 source_name = upload.changes.source_name
466 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
468 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
471 if not acl.allow_new:
473 return False, "NEW uploads are not allowed"
474 for f in upload.changes.files.itervalues():
475 if f.section == 'byhand' or f.section.startswith("raw-"):
476 return False, "BYHAND uploads are not allowed"
477 if not acl.allow_source and upload.changes.source is not None:
478 return False, "sourceful uploads are not allowed"
479 binaries = upload.changes.binaries
480 if len(binaries) != 0:
481 if not acl.allow_binary:
482 return False, "binary uploads are not allowed"
483 if upload.changes.source is None and not acl.allow_binary_only:
484 return False, "binary-only uploads are not allowed"
485 if not acl.allow_binary_all:
486 uploaded_arches = set(upload.changes.architectures)
487 uploaded_arches.discard('source')
488 allowed_arches = set(a.arch_string for a in acl.architectures)
489 forbidden_arches = uploaded_arches - allowed_arches
490 if len(forbidden_arches) != 0:
491 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
492 if not acl.allow_hijack:
493 for suite in upload.final_suites:
494 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
496 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
498 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
499 if acl.allow_per_source:
500 if acl_per_source is None:
501 return False, "not allowed to upload source package '{0}'".format(source_name)
502 if acl.deny_per_source and acl_per_source is not None:
503 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
507 def check(self, upload):
508 session = upload.session
509 fingerprint = upload.fingerprint
510 keyring = fingerprint.keyring
513 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
514 if not keyring.active:
515 raise Reject('Keyring {0} is not active'.format(keyring.name))
517 acl = fingerprint.acl or keyring.acl
519 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
520 result, reason = self._check_acl(session, upload, acl)
522 raise RejectACL(acl, reason)
524 for acl in session.query(ACL).filter_by(is_global=True):
525 result, reason = self._check_acl(session, upload, acl)
527 raise RejectACL(acl, reason)
531 def per_suite_check(self, upload, suite):
536 result, reason = self._check_acl(upload.session, upload, acl)
539 accept = accept or result
541 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
544 class TransitionCheck(Check):
545 """check for a transition"""
546 def check(self, upload):
547 if 'source' not in upload.changes.architectures:
550 transitions = self.get_transitions()
551 if transitions is None:
554 control = upload.changes.changes
555 source = re_field_source.match(control['Source']).group('package')
557 for trans in transitions:
558 t = transitions[trans]
562 # Will be None if nothing is in testing.
563 current = get_source_in_suite(source, "testing", session)
564 if current is not None:
565 compare = apt_pkg.version_compare(current.version, expected)
567 if current is None or compare < 0:
568 # This is still valid, the current version in testing is older than
569 # the new version we wait for, or there is none in testing yet
571 # Check if the source we look at is affected by this.
572 if source in t['packages']:
573 # The source is affected, lets reject it.
575 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
577 if current is not None:
578 currentlymsg = "at version {0}".format(current.version)
580 currentlymsg = "not present in testing"
582 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
584 rejectmsg += "\n".join(textwrap.wrap("""Your package
585 is part of a testing transition designed to get {0} migrated (it is
586 currently {1}, we need version {2}). This transition is managed by the
587 Release Team, and {3} is the Release-Team member responsible for it.
588 Please mail debian-release@lists.debian.org or contact {3} directly if you
589 need further assistance. You might want to upload to experimental until this
590 transition is done.""".format(source, currentlymsg, expected,t["rm"])))
592 raise Reject(rejectmsg)
596 def get_transitions(self):
598 path = cnf.get('Dinstall::ReleaseTransitions', '')
599 if path == '' or not os.path.exists(path):
602 contents = file(path, 'r').read()
604 transitions = yaml.safe_load(contents)
606 except yaml.YAMLError as msg:
607 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
611 class NoSourceOnlyCheck(Check):
612 """Check for source-only upload
614 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
615 set. Otherwise they are rejected.
617 def check(self, upload):
618 if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
620 changes = upload.changes
621 if changes.source is not None and len(changes.binaries) == 0:
622 raise Reject('Source-only uploads are not allowed.')
625 class LintianCheck(Check):
626 """Check package using lintian"""
627 def check(self, upload):
628 changes = upload.changes
630 # Only check sourceful uploads.
631 if changes.source is None:
633 # Only check uploads to unstable or experimental.
634 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
638 if 'Dinstall::LintianTags' not in cnf:
640 tagfile = cnf['Dinstall::LintianTags']
642 with open(tagfile, 'r') as sourcefile:
643 sourcecontent = sourcefile.read()
645 lintiantags = yaml.safe_load(sourcecontent)['lintian']
646 except yaml.YAMLError as msg:
647 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
649 fd, temp_filename = utils.temp_filename(mode=0o644)
650 temptagfile = os.fdopen(fd, 'w')
651 for tags in lintiantags.itervalues():
653 print >>temptagfile, tag
656 changespath = os.path.join(upload.directory, changes.filename)
661 user = cnf.get('Dinstall::UnprivUser') or None
663 cmd.extend(['sudo', '-H', '-u', user])
665 cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
666 output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT)
667 except subprocess.CalledProcessError as e:
668 result = e.returncode
671 os.unlink(temp_filename)
674 utils.warn("lintian failed for %s [return code: %s]." % \
675 (changespath, result))
676 utils.warn(utils.prefix_multi_line_string(output, \
677 " [possible output:] "))
679 parsed_tags = lintian.parse_lintian_output(output)
680 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
681 if len(rejects) != 0:
682 raise Reject('\n'.join(rejects))
686 class SourceFormatCheck(Check):
687 """Check source format is allowed in the target suite"""
688 def per_suite_check(self, upload, suite):
689 source = upload.changes.source
690 session = upload.session
694 source_format = source.dsc['Format']
695 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
696 if query.first() is None:
697 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
699 class SuiteArchitectureCheck(Check):
700 def per_suite_check(self, upload, suite):
701 session = upload.session
702 for arch in upload.changes.architectures:
703 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
704 if query.first() is None:
705 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
709 class VersionCheck(Check):
710 """Check version constraints"""
711 def _highest_source_version(self, session, source_name, suite):
712 db_source = session.query(DBSource).filter_by(source=source_name) \
713 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
714 if db_source is None:
717 return db_source.version
719 def _highest_binary_version(self, session, binary_name, suite, architecture):
720 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
721 .filter(DBBinary.suites.contains(suite)) \
722 .join(DBBinary.architecture) \
723 .filter(Architecture.arch_string.in_(['all', architecture])) \
724 .order_by(DBBinary.version.desc()).first()
725 if db_binary is None:
728 return db_binary.version
730 def _version_checks(self, upload, suite, other_suite, op, op_name):
731 session = upload.session
733 if upload.changes.source is not None:
734 source_name = upload.changes.source.dsc['Source']
735 source_version = upload.changes.source.dsc['Version']
736 v = self._highest_source_version(session, source_name, other_suite)
737 if v is not None and not op(version_compare(source_version, v)):
738 raise Reject("Version check failed:\n"
739 "Your upload included the source package {0}, version {1},\n"
740 "however {3} already has version {2}.\n"
741 "Uploads to {5} must have a {4} version than present in {3}."
742 .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
744 for binary in upload.changes.binaries:
745 binary_name = binary.control['Package']
746 binary_version = binary.control['Version']
747 architecture = binary.control['Architecture']
748 v = self._highest_binary_version(session, binary_name, other_suite, architecture)
749 if v is not None and not op(version_compare(binary_version, v)):
750 raise Reject("Version check failed:\n"
751 "Your upload included the binary package {0}, version {1}, for {2},\n"
752 "however {4} already has version {3}.\n"
753 "Uploads to {6} must have a {5} version than present in {4}."
754 .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
756 def per_suite_check(self, upload, suite):
757 session = upload.session
759 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
760 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
761 must_be_newer_than = [ vc.reference for vc in vc_newer ]
762 # Must be newer than old versions in `suite`
763 must_be_newer_than.append(suite)
765 for s in must_be_newer_than:
766 self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
768 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
769 must_be_older_than = [ vc.reference for vc in vc_older ]
771 for s in must_be_older_than:
772 self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')