1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the L{Check} class for the interface.
26 from daklib.config import Config
27 from daklib.dbconn import *
28 import daklib.dbconn as dbconn
29 from daklib.regexes import *
30 from daklib.textutils import fix_maintainer, ParseMaintError
31 import daklib.lintian as lintian
32 import daklib.utils as utils
33 from daklib.upload import InvalidHashException
37 from apt_pkg import version_compare
43 # TODO: replace by subprocess
46 def check_fields_for_valid_utf8(filename, control):
47 """Check all fields of a control file for valid UTF-8"""
48 for field in control.keys():
51 control[field].decode('utf-8')
52 except UnicodeDecodeError:
53 raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field))
55 class Reject(Exception):
56 """exception raised by failing checks"""
59 class RejectStupidMaintainerException(Exception):
60 """exception raised by failing the external hashes check"""
63 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
65 class RejectACL(Reject):
66 """exception raise by failing ACL checks"""
67 def __init__(self, acl, reason):
72 return "ACL {0}: {1}".format(self.acl.name, self.reason)
75 """base class for checks
77 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
78 raise a L{daklib.checks.Reject} exception including a human-readable
79 description why the upload should be rejected.
81 def check(self, upload):
84 @type upload: L{daklib.archive.ArchiveUpload}
85 @param upload: upload to check
87 @raise daklib.checks.Reject: upload should be rejected
90 def per_suite_check(self, upload, suite):
91 """do per-suite checks
93 @type upload: L{daklib.archive.ArchiveUpload}
94 @param upload: upload to check
96 @type suite: L{daklib.dbconn.Suite}
97 @param suite: suite to check
99 @raise daklib.checks.Reject: upload should be rejected
104 """allow to force ignore failing test
106 C{True} if it is acceptable to force ignoring a failing test,
111 class SignatureAndHashesCheck(Check):
112 """Check signature of changes and dsc file (if included in upload)
114 Make sure the signature is valid and done by a known user.
116 def check(self, upload):
117 changes = upload.changes
118 if not changes.valid_signature:
119 raise Reject("Signature for .changes not valid.")
120 self._check_hashes(upload, changes.filename, changes.files.itervalues())
124 source = changes.source
125 except Exception as e:
126 raise Reject("Invalid dsc file: {0}".format(e))
127 if source is not None:
128 if not source.valid_signature:
129 raise Reject("Signature for .dsc not valid.")
130 if source.primary_fingerprint != changes.primary_fingerprint:
131 raise Reject(".changes and .dsc not signed by the same key.")
132 self._check_hashes(upload, source.filename, source.files.itervalues())
134 if upload.fingerprint is None or upload.fingerprint.uid is None:
135 raise Reject(".changes signed by unknown key.")
137 """Make sure hashes match existing files
139 @type upload: L{daklib.archive.ArchiveUpload}
140 @param upload: upload we are processing
143 @param filename: name of the file the expected hash values are taken from
145 @type files: sequence of L{daklib.upload.HashedFile}
146 @param files: files to check the hashes for
148 def _check_hashes(self, upload, filename, files):
151 f.check(upload.directory)
153 if e.errno == errno.ENOENT:
154 raise Reject('{0} refers to non-existing file: {1}\n'
155 'Perhaps you need to include it in your upload?'
156 .format(filename, os.path.basename(e.filename)))
158 except InvalidHashException as e:
159 raise Reject('{0}: {1}'.format(filename, unicode(e)))
161 class ChangesCheck(Check):
162 """Check changes file for syntax errors."""
163 def check(self, upload):
164 changes = upload.changes
165 control = changes.changes
166 fn = changes.filename
168 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
169 if field not in control:
170 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
172 check_fields_for_valid_utf8(fn, control)
174 source_match = re_field_source.match(control['Source'])
176 raise Reject('{0}: Invalid Source field'.format(fn))
177 version_match = re_field_version.match(control['Version'])
178 if not version_match:
179 raise Reject('{0}: Invalid Version field'.format(fn))
180 version_without_epoch = version_match.group('without_epoch')
182 match = re_file_changes.match(fn)
184 raise Reject('{0}: Does not match re_file_changes'.format(fn))
185 if match.group('package') != source_match.group('package'):
186 raise Reject('{0}: Filename does not match Source field'.format(fn))
187 if match.group('version') != version_without_epoch:
188 raise Reject('{0}: Filename does not match Version field'.format(fn))
190 for bn in changes.binary_names:
191 if not re_field_package.match(bn):
192 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
194 if 'source' in changes.architectures and changes.source is None:
195 raise Reject("Changes has architecture source, but no source found.")
196 if changes.source is not None and 'source' not in changes.architectures:
197 raise Reject("Upload includes source, but changes does not say so.")
200 fix_maintainer(changes.changes['Maintainer'])
201 except ParseMaintError as e:
202 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
205 changed_by = changes.changes.get('Changed-By')
206 if changed_by is not None:
207 fix_maintainer(changed_by)
208 except ParseMaintError as e:
209 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
211 if len(changes.files) == 0:
212 raise Reject("Changes includes no files.")
214 for bugnum in changes.closed_bugs:
215 if not re_isanum.match(bugnum):
216 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
220 class ExternalHashesCheck(Check):
221 """Checks hashes in .changes and .dsc against an external database."""
222 def check_single(self, session, f):
223 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
224 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
229 if ext_size != f.size:
230 raise RejectStupidMaintainerException(f.filename, 'size', f.size, ext_size)
232 if ext_md5sum != f.md5sum:
233 raise RejectStupidMaintainerException(f.filename, 'md5sum', f.md5sum, ext_md5sum)
235 if ext_sha1sum != f.sha1sum:
236 raise RejectStupidMaintainerException(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
238 if ext_sha256sum != f.sha256sum:
239 raise RejectStupidMaintainerException(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
241 def check(self, upload):
244 if not cnf.use_extfiles:
247 session = upload.session
248 changes = upload.changes
250 for f in changes.files.itervalues():
251 self.check_single(session, f)
252 source = changes.source
253 if source is not None:
254 for f in source.files.itervalues():
255 self.check_single(session, f)
257 class BinaryCheck(Check):
258 """Check binary packages for syntax errors."""
259 def check(self, upload):
260 for binary in upload.changes.binaries:
261 self.check_binary(upload, binary)
263 binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
264 for bn in binary_names:
265 if bn not in upload.changes.binary_names:
266 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
270 def check_binary(self, upload, binary):
271 fn = binary.hashed_file.filename
272 control = binary.control
274 for field in ('Package', 'Architecture', 'Version', 'Description'):
275 if field not in control:
276 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
278 check_fields_for_valid_utf8(fn, control)
282 package = control['Package']
283 if not re_field_package.match(package):
284 raise Reject('{0}: Invalid Package field'.format(fn))
286 version = control['Version']
287 version_match = re_field_version.match(version)
288 if not version_match:
289 raise Reject('{0}: Invalid Version field'.format(fn))
290 version_without_epoch = version_match.group('without_epoch')
292 architecture = control['Architecture']
293 if architecture not in upload.changes.architectures:
294 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
295 if architecture == 'source':
296 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
298 source = control.get('Source')
299 if source is not None and not re_field_source.match(source):
300 raise Reject('{0}: Invalid Source field'.format(fn))
304 match = re_file_binary.match(fn)
305 if package != match.group('package'):
306 raise Reject('{0}: filename does not match Package field'.format(fn))
307 if version_without_epoch != match.group('version'):
308 raise Reject('{0}: filename does not match Version field'.format(fn))
309 if architecture != match.group('architecture'):
310 raise Reject('{0}: filename does not match Architecture field'.format(fn))
312 # check dependency field syntax
314 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
315 'Provides', 'Recommends', 'Replaces', 'Suggests'):
316 value = control.get(field)
317 if value is not None:
318 if value.strip() == '':
319 raise Reject('{0}: empty {1} field'.format(fn, field))
321 apt_pkg.parse_depends(value)
323 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
325 for field in ('Built-Using',):
326 value = control.get(field)
327 if value is not None:
328 if value.strip() == '':
329 raise Reject('{0}: empty {1} field'.format(fn, field))
331 apt_pkg.parse_src_depends(value)
333 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
335 class BinaryTimestampCheck(Check):
336 """check timestamps of files in binary packages
338 Files in the near future cause ugly warnings and extreme time travel
339 can cause errors on extraction.
341 def check(self, upload):
343 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
344 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
346 class TarTime(object):
348 self.future_files = dict()
349 self.past_files = dict()
350 def callback(self, member, data):
351 if member.mtime > future_cutoff:
352 self.future_files[member.name] = member.mtime
353 elif member.mtime < past_cutoff:
354 self.past_files[member.name] = member.mtime
356 def format_reason(filename, direction, files):
357 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
358 for fn, ts in files.iteritems():
359 reason += " {0} ({1})".format(fn, time.ctime(ts))
362 for binary in upload.changes.binaries:
363 filename = binary.hashed_file.filename
364 path = os.path.join(upload.directory, filename)
365 deb = apt_inst.DebFile(path)
367 deb.control.go(tar.callback)
369 raise Reject(format_reason(filename, 'future', tar.future_files))
371 raise Reject(format_reason(filename, 'past', tar.past_files))
373 class SourceCheck(Check):
374 """Check source package for syntax errors."""
375 def check_filename(self, control, filename, regex):
376 # In case we have an .orig.tar.*, we have to strip the Debian revison
377 # from the version number. So handle this special case first.
379 match = re_file_orig.match(filename)
382 match = regex.match(filename)
385 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
386 if match.group('package') != control['Source']:
387 raise Reject('{0}: filename does not match Source field'.format(filename))
389 version = control['Version']
391 upstream_match = re_field_version_upstream.match(version)
392 if not upstream_match:
393 raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version))
394 version = upstream_match.group('upstream')
395 version_match = re_field_version.match(version)
396 version_without_epoch = version_match.group('without_epoch')
397 if match.group('version') != version_without_epoch:
398 raise Reject('{0}: filename does not match Version field'.format(filename))
400 def check(self, upload):
401 if upload.changes.source is None:
404 changes = upload.changes.changes
405 source = upload.changes.source
407 dsc_fn = source._dsc_file.filename
409 check_fields_for_valid_utf8(dsc_fn, control)
412 if not re_field_package.match(control['Source']):
413 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
414 if control['Source'] != changes['Source']:
415 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
416 if control['Version'] != changes['Version']:
417 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
420 self.check_filename(control, dsc_fn, re_file_dsc)
421 for f in source.files.itervalues():
422 self.check_filename(control, f.filename, re_file_source)
424 # check dependency field syntax
425 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
426 value = control.get(field)
427 if value is not None:
428 if value.strip() == '':
429 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
431 apt_pkg.parse_src_depends(value)
432 except Exception as e:
433 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
435 rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
437 raise Reject("\n".join(rejects))
441 class SingleDistributionCheck(Check):
442 """Check that the .changes targets only a single distribution."""
443 def check(self, upload):
444 if len(upload.changes.distributions) != 1:
445 raise Reject("Only uploads to a single distribution are allowed.")
447 class ACLCheck(Check):
448 """Check the uploader is allowed to upload the packages in .changes"""
450 def _does_hijack(self, session, upload, suite):
451 # Try to catch hijacks.
452 # This doesn't work correctly. Uploads to experimental can still
453 # "hijack" binaries from unstable. Also one can hijack packages
454 # via buildds (but people who try this should not be DMs).
455 for binary_name in upload.changes.binary_names:
456 binaries = session.query(DBBinary).join(DBBinary.source) \
457 .filter(DBBinary.suites.contains(suite)) \
458 .filter(DBBinary.package == binary_name)
459 for binary in binaries:
460 if binary.source.source != upload.changes.changes['Source']:
461 return True, binary.package, binary.source.source
462 return False, None, None
464 def _check_acl(self, session, upload, acl):
465 source_name = upload.changes.source_name
467 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
469 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
472 if not acl.allow_new:
474 return False, "NEW uploads are not allowed"
475 for f in upload.changes.files.itervalues():
476 if f.section == 'byhand' or f.section.startswith("raw-"):
477 return False, "BYHAND uploads are not allowed"
478 if not acl.allow_source and upload.changes.source is not None:
479 return False, "sourceful uploads are not allowed"
480 binaries = upload.changes.binaries
481 if len(binaries) != 0:
482 if not acl.allow_binary:
483 return False, "binary uploads are not allowed"
484 if upload.changes.source is None and not acl.allow_binary_only:
485 return False, "binary-only uploads are not allowed"
486 if not acl.allow_binary_all:
487 uploaded_arches = set(upload.changes.architectures)
488 uploaded_arches.discard('source')
489 allowed_arches = set(a.arch_string for a in acl.architectures)
490 forbidden_arches = uploaded_arches - allowed_arches
491 if len(forbidden_arches) != 0:
492 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
493 if not acl.allow_hijack:
494 for suite in upload.final_suites:
495 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
497 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
499 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
500 if acl.allow_per_source:
501 if acl_per_source is None:
502 return False, "not allowed to upload source package '{0}'".format(source_name)
503 if acl.deny_per_source and acl_per_source is not None:
504 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
508 def check(self, upload):
509 session = upload.session
510 fingerprint = upload.fingerprint
511 keyring = fingerprint.keyring
514 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
515 if not keyring.active:
516 raise Reject('Keyring {0} is not active'.format(keyring.name))
518 acl = fingerprint.acl or keyring.acl
520 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
521 result, reason = self._check_acl(session, upload, acl)
523 raise RejectACL(acl, reason)
525 for acl in session.query(ACL).filter_by(is_global=True):
526 result, reason = self._check_acl(session, upload, acl)
528 raise RejectACL(acl, reason)
532 def per_suite_check(self, upload, suite):
537 result, reason = self._check_acl(upload.session, upload, acl)
540 accept = accept or result
542 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
545 class TransitionCheck(Check):
546 """check for a transition"""
547 def check(self, upload):
548 if 'source' not in upload.changes.architectures:
551 transitions = self.get_transitions()
552 if transitions is None:
555 control = upload.changes.changes
556 source = re_field_source.match(control['Source']).group('package')
558 for trans in transitions:
559 t = transitions[trans]
563 # Will be None if nothing is in testing.
564 current = get_source_in_suite(source, "testing", session)
565 if current is not None:
566 compare = apt_pkg.version_compare(current.version, expected)
568 if current is None or compare < 0:
569 # This is still valid, the current version in testing is older than
570 # the new version we wait for, or there is none in testing yet
572 # Check if the source we look at is affected by this.
573 if source in t['packages']:
574 # The source is affected, lets reject it.
576 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
578 if current is not None:
579 currentlymsg = "at version {0}".format(current.version)
581 currentlymsg = "not present in testing"
583 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
585 rejectmsg += "\n".join(textwrap.wrap("""Your package
586 is part of a testing transition designed to get {0} migrated (it is
587 currently {1}, we need version {2}). This transition is managed by the
588 Release Team, and {3} is the Release-Team member responsible for it.
589 Please mail debian-release@lists.debian.org or contact {3} directly if you
590 need further assistance. You might want to upload to experimental until this
591 transition is done.""".format(source, currentlymsg, expected,t["rm"])))
593 raise Reject(rejectmsg)
597 def get_transitions(self):
599 path = cnf.get('Dinstall::ReleaseTransitions', '')
600 if path == '' or not os.path.exists(path):
603 contents = file(path, 'r').read()
605 transitions = yaml.safe_load(contents)
607 except yaml.YAMLError as msg:
608 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
612 class NoSourceOnlyCheck(Check):
613 """Check for source-only upload
615 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
616 set. Otherwise they are rejected.
618 def check(self, upload):
619 if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
621 changes = upload.changes
622 if changes.source is not None and len(changes.binaries) == 0:
623 raise Reject('Source-only uploads are not allowed.')
626 class LintianCheck(Check):
627 """Check package using lintian"""
628 def check(self, upload):
629 changes = upload.changes
631 # Only check sourceful uploads.
632 if changes.source is None:
634 # Only check uploads to unstable or experimental.
635 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
639 if 'Dinstall::LintianTags' not in cnf:
641 tagfile = cnf['Dinstall::LintianTags']
643 with open(tagfile, 'r') as sourcefile:
644 sourcecontent = sourcefile.read()
646 lintiantags = yaml.safe_load(sourcecontent)['lintian']
647 except yaml.YAMLError as msg:
648 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
650 fd, temp_filename = utils.temp_filename(mode=0o644)
651 temptagfile = os.fdopen(fd, 'w')
652 for tags in lintiantags.itervalues():
654 print >>temptagfile, tag
657 changespath = os.path.join(upload.directory, changes.filename)
661 user = cnf.get('Dinstall::UnprivUser') or None
663 cmd.extend(['sudo', '-H', '-u', user])
665 cmd.extend(['LINTIAN_COLL_UNPACKED_SKIP_SIG=1', '/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
666 result, output = commands.getstatusoutput(" ".join(cmd))
668 os.unlink(temp_filename)
671 utils.warn("lintian failed for %s [return code: %s]." % \
672 (changespath, result))
673 utils.warn(utils.prefix_multi_line_string(output, \
674 " [possible output:] "))
676 parsed_tags = lintian.parse_lintian_output(output)
677 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
678 if len(rejects) != 0:
679 raise Reject('\n'.join(rejects))
683 class SourceFormatCheck(Check):
684 """Check source format is allowed in the target suite"""
685 def per_suite_check(self, upload, suite):
686 source = upload.changes.source
687 session = upload.session
691 source_format = source.dsc['Format']
692 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
693 if query.first() is None:
694 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
696 class SuiteArchitectureCheck(Check):
697 def per_suite_check(self, upload, suite):
698 session = upload.session
699 for arch in upload.changes.architectures:
700 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
701 if query.first() is None:
702 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
706 class VersionCheck(Check):
707 """Check version constraints"""
708 def _highest_source_version(self, session, source_name, suite):
709 db_source = session.query(DBSource).filter_by(source=source_name) \
710 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
711 if db_source is None:
714 return db_source.version
716 def _highest_binary_version(self, session, binary_name, suite, architecture):
717 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
718 .filter(DBBinary.suites.contains(suite)) \
719 .join(DBBinary.architecture) \
720 .filter(Architecture.arch_string.in_(['all', architecture])) \
721 .order_by(DBBinary.version.desc()).first()
722 if db_binary is None:
725 return db_binary.version
727 def _version_checks(self, upload, suite, other_suite, op, op_name):
728 session = upload.session
730 if upload.changes.source is not None:
731 source_name = upload.changes.source.dsc['Source']
732 source_version = upload.changes.source.dsc['Version']
733 v = self._highest_source_version(session, source_name, other_suite)
734 if v is not None and not op(version_compare(source_version, v)):
735 raise Reject("Version check failed:\n"
736 "Your upload included the source package {0}, version {1},\n"
737 "however {3} already has version {2}.\n"
738 "Uploads to {5} must have a {4} version than present in {3}."
739 .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
741 for binary in upload.changes.binaries:
742 binary_name = binary.control['Package']
743 binary_version = binary.control['Version']
744 architecture = binary.control['Architecture']
745 v = self._highest_binary_version(session, binary_name, other_suite, architecture)
746 if v is not None and not op(version_compare(binary_version, v)):
747 raise Reject("Version check failed:\n"
748 "Your upload included the binary package {0}, version {1}, for {2},\n"
749 "however {4} already has version {3}.\n"
750 "Uploads to {6} must have a {5} version than present in {4}."
751 .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
753 def per_suite_check(self, upload, suite):
754 session = upload.session
756 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
757 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
758 must_be_newer_than = [ vc.reference for vc in vc_newer ]
759 # Must be newer than old versions in `suite`
760 must_be_newer_than.append(suite)
762 for s in must_be_newer_than:
763 self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
765 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
766 must_be_older_than = [ vc.reference for vc in vc_older ]
768 for s in must_be_older_than:
769 self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')