1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the L{Check} class for the interface.
26 from daklib.config import Config
27 import daklib.daksubprocess
28 from daklib.dbconn import *
29 import daklib.dbconn as dbconn
30 from daklib.regexes import *
31 from daklib.textutils import fix_maintainer, ParseMaintError
32 import daklib.lintian as lintian
33 import daklib.utils as utils
34 from daklib.upload import InvalidHashException
38 from apt_pkg import version_compare
46 def check_fields_for_valid_utf8(filename, control):
47 """Check all fields of a control file for valid UTF-8"""
48 for field in control.keys():
51 control[field].decode('utf-8')
52 except UnicodeDecodeError:
53 raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field))
55 class Reject(Exception):
56 """exception raised by failing checks"""
59 class RejectStupidMaintainerException(Exception):
60 """exception raised by failing the external hashes check"""
63 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
65 class RejectACL(Reject):
66 """exception raise by failing ACL checks"""
67 def __init__(self, acl, reason):
72 return "ACL {0}: {1}".format(self.acl.name, self.reason)
75 """base class for checks
77 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
78 raise a L{daklib.checks.Reject} exception including a human-readable
79 description why the upload should be rejected.
81 def check(self, upload):
84 @type upload: L{daklib.archive.ArchiveUpload}
85 @param upload: upload to check
87 @raise daklib.checks.Reject: upload should be rejected
90 def per_suite_check(self, upload, suite):
91 """do per-suite checks
93 @type upload: L{daklib.archive.ArchiveUpload}
94 @param upload: upload to check
96 @type suite: L{daklib.dbconn.Suite}
97 @param suite: suite to check
99 @raise daklib.checks.Reject: upload should be rejected
104 """allow to force ignore failing test
106 C{True} if it is acceptable to force ignoring a failing test,
111 class SignatureAndHashesCheck(Check):
112 """Check signature of changes and dsc file (if included in upload)
114 Make sure the signature is valid and done by a known user.
116 def check(self, upload):
117 changes = upload.changes
118 if not changes.valid_signature:
119 raise Reject("Signature for .changes not valid.")
120 self._check_hashes(upload, changes.filename, changes.files.itervalues())
124 source = changes.source
125 except Exception as e:
126 raise Reject("Invalid dsc file: {0}".format(e))
127 if source is not None:
128 if not source.valid_signature:
129 raise Reject("Signature for .dsc not valid.")
130 if source.primary_fingerprint != changes.primary_fingerprint:
131 raise Reject(".changes and .dsc not signed by the same key.")
132 self._check_hashes(upload, source.filename, source.files.itervalues())
134 if upload.fingerprint is None or upload.fingerprint.uid is None:
135 raise Reject(".changes signed by unknown key.")
137 """Make sure hashes match existing files
139 @type upload: L{daklib.archive.ArchiveUpload}
140 @param upload: upload we are processing
143 @param filename: name of the file the expected hash values are taken from
145 @type files: sequence of L{daklib.upload.HashedFile}
146 @param files: files to check the hashes for
148 def _check_hashes(self, upload, filename, files):
151 f.check(upload.directory)
153 if e.errno == errno.ENOENT:
154 raise Reject('{0} refers to non-existing file: {1}\n'
155 'Perhaps you need to include it in your upload?'
156 .format(filename, os.path.basename(e.filename)))
158 except InvalidHashException as e:
159 raise Reject('{0}: {1}'.format(filename, unicode(e)))
161 class ChangesCheck(Check):
162 """Check changes file for syntax errors."""
163 def check(self, upload):
164 changes = upload.changes
165 control = changes.changes
166 fn = changes.filename
168 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
169 if field not in control:
170 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
172 check_fields_for_valid_utf8(fn, control)
174 source_match = re_field_source.match(control['Source'])
176 raise Reject('{0}: Invalid Source field'.format(fn))
177 version_match = re_field_version.match(control['Version'])
178 if not version_match:
179 raise Reject('{0}: Invalid Version field'.format(fn))
180 version_without_epoch = version_match.group('without_epoch')
182 match = re_file_changes.match(fn)
184 raise Reject('{0}: Does not match re_file_changes'.format(fn))
185 if match.group('package') != source_match.group('package'):
186 raise Reject('{0}: Filename does not match Source field'.format(fn))
187 if match.group('version') != version_without_epoch:
188 raise Reject('{0}: Filename does not match Version field'.format(fn))
190 for bn in changes.binary_names:
191 if not re_field_package.match(bn):
192 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
194 if 'source' in changes.architectures and changes.source is None:
195 raise Reject("Changes has architecture source, but no source found.")
196 if changes.source is not None and 'source' not in changes.architectures:
197 raise Reject("Upload includes source, but changes does not say so.")
200 fix_maintainer(changes.changes['Maintainer'])
201 except ParseMaintError as e:
202 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
205 changed_by = changes.changes.get('Changed-By')
206 if changed_by is not None:
207 fix_maintainer(changed_by)
208 except ParseMaintError as e:
209 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
211 if len(changes.files) == 0:
212 raise Reject("Changes includes no files.")
214 for bugnum in changes.closed_bugs:
215 if not re_isanum.match(bugnum):
216 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
220 class ExternalHashesCheck(Check):
221 """Checks hashes in .changes and .dsc against an external database."""
222 def check_single(self, session, f):
223 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
224 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
229 if ext_size != f.size:
230 raise RejectStupidMaintainerException(f.filename, 'size', f.size, ext_size)
232 if ext_md5sum != f.md5sum:
233 raise RejectStupidMaintainerException(f.filename, 'md5sum', f.md5sum, ext_md5sum)
235 if ext_sha1sum != f.sha1sum:
236 raise RejectStupidMaintainerException(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
238 if ext_sha256sum != f.sha256sum:
239 raise RejectStupidMaintainerException(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
241 def check(self, upload):
244 if not cnf.use_extfiles:
247 session = upload.session
248 changes = upload.changes
250 for f in changes.files.itervalues():
251 self.check_single(session, f)
252 source = changes.source
253 if source is not None:
254 for f in source.files.itervalues():
255 self.check_single(session, f)
257 class BinaryCheck(Check):
258 """Check binary packages for syntax errors."""
259 def check(self, upload):
260 for binary in upload.changes.binaries:
261 self.check_binary(upload, binary)
263 binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
264 for bn in binary_names:
265 if bn not in upload.changes.binary_names:
266 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
270 def check_binary(self, upload, binary):
271 fn = binary.hashed_file.filename
272 control = binary.control
274 for field in ('Package', 'Architecture', 'Version', 'Description'):
275 if field not in control:
276 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
278 check_fields_for_valid_utf8(fn, control)
282 package = control['Package']
283 if not re_field_package.match(package):
284 raise Reject('{0}: Invalid Package field'.format(fn))
286 version = control['Version']
287 version_match = re_field_version.match(version)
288 if not version_match:
289 raise Reject('{0}: Invalid Version field'.format(fn))
290 version_without_epoch = version_match.group('without_epoch')
292 architecture = control['Architecture']
293 if architecture not in upload.changes.architectures:
294 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
295 if architecture == 'source':
296 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
298 source = control.get('Source')
299 if source is not None and not re_field_source.match(source):
300 raise Reject('{0}: Invalid Source field'.format(fn))
304 match = re_file_binary.match(fn)
305 if package != match.group('package'):
306 raise Reject('{0}: filename does not match Package field'.format(fn))
307 if version_without_epoch != match.group('version'):
308 raise Reject('{0}: filename does not match Version field'.format(fn))
309 if architecture != match.group('architecture'):
310 raise Reject('{0}: filename does not match Architecture field'.format(fn))
312 # check dependency field syntax
314 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
315 'Provides', 'Recommends', 'Replaces', 'Suggests'):
316 value = control.get(field)
317 if value is not None:
318 if value.strip() == '':
319 raise Reject('{0}: empty {1} field'.format(fn, field))
321 apt_pkg.parse_depends(value)
323 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
325 for field in ('Built-Using',):
326 value = control.get(field)
327 if value is not None:
328 if value.strip() == '':
329 raise Reject('{0}: empty {1} field'.format(fn, field))
331 apt_pkg.parse_src_depends(value)
333 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
335 class BinaryTimestampCheck(Check):
336 """check timestamps of files in binary packages
338 Files in the near future cause ugly warnings and extreme time travel
339 can cause errors on extraction.
341 def check(self, upload):
343 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
344 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
346 class TarTime(object):
348 self.future_files = dict()
349 self.past_files = dict()
350 def callback(self, member, data):
351 if member.mtime > future_cutoff:
352 self.future_files[member.name] = member.mtime
353 elif member.mtime < past_cutoff:
354 self.past_files[member.name] = member.mtime
356 def format_reason(filename, direction, files):
357 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
358 for fn, ts in files.iteritems():
359 reason += " {0} ({1})".format(fn, time.ctime(ts))
362 for binary in upload.changes.binaries:
363 filename = binary.hashed_file.filename
364 path = os.path.join(upload.directory, filename)
365 deb = apt_inst.DebFile(path)
367 deb.control.go(tar.callback)
369 raise Reject(format_reason(filename, 'future', tar.future_files))
371 raise Reject(format_reason(filename, 'past', tar.past_files))
373 class SourceCheck(Check):
374 """Check source package for syntax errors."""
375 def check_filename(self, control, filename, regex):
376 # In case we have an .orig.tar.*, we have to strip the Debian revison
377 # from the version number. So handle this special case first.
379 match = re_file_orig.match(filename)
382 match = regex.match(filename)
385 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
386 if match.group('package') != control['Source']:
387 raise Reject('{0}: filename does not match Source field'.format(filename))
389 version = control['Version']
391 upstream_match = re_field_version_upstream.match(version)
392 if not upstream_match:
393 raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version))
394 version = upstream_match.group('upstream')
395 version_match = re_field_version.match(version)
396 version_without_epoch = version_match.group('without_epoch')
397 if match.group('version') != version_without_epoch:
398 raise Reject('{0}: filename does not match Version field'.format(filename))
400 def check(self, upload):
401 if upload.changes.source is None:
404 changes = upload.changes.changes
405 source = upload.changes.source
407 dsc_fn = source._dsc_file.filename
409 check_fields_for_valid_utf8(dsc_fn, control)
412 if not re_field_package.match(control['Source']):
413 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
414 if control['Source'] != changes['Source']:
415 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
416 if control['Version'] != changes['Version']:
417 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
420 self.check_filename(control, dsc_fn, re_file_dsc)
421 for f in source.files.itervalues():
422 self.check_filename(control, f.filename, re_file_source)
424 # check dependency field syntax
425 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
426 value = control.get(field)
427 if value is not None:
428 if value.strip() == '':
429 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
431 apt_pkg.parse_src_depends(value)
432 except Exception as e:
433 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
435 rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
437 raise Reject("\n".join(rejects))
441 class SingleDistributionCheck(Check):
442 """Check that the .changes targets only a single distribution."""
443 def check(self, upload):
444 if len(upload.changes.distributions) != 1:
445 raise Reject("Only uploads to a single distribution are allowed.")
447 class ACLCheck(Check):
448 """Check the uploader is allowed to upload the packages in .changes"""
450 def _does_hijack(self, session, upload, suite):
451 # Try to catch hijacks.
452 # This doesn't work correctly. Uploads to experimental can still
453 # "hijack" binaries from unstable. Also one can hijack packages
454 # via buildds (but people who try this should not be DMs).
455 for binary_name in upload.changes.binary_names:
456 binaries = session.query(DBBinary).join(DBBinary.source) \
457 .filter(DBBinary.suites.contains(suite)) \
458 .filter(DBBinary.package == binary_name)
459 for binary in binaries:
460 if binary.source.source != upload.changes.changes['Source']:
461 return True, binary.package, binary.source.source
462 return False, None, None
464 def _check_acl(self, session, upload, acl):
465 source_name = upload.changes.source_name
467 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
469 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
472 if not acl.allow_new:
474 return False, "NEW uploads are not allowed"
475 for f in upload.changes.files.itervalues():
476 if f.section == 'byhand' or f.section.startswith("raw-"):
477 return False, "BYHAND uploads are not allowed"
478 if not acl.allow_source and upload.changes.source is not None:
479 return False, "sourceful uploads are not allowed"
480 binaries = upload.changes.binaries
481 if len(binaries) != 0:
482 if not acl.allow_binary:
483 return False, "binary uploads are not allowed"
484 if upload.changes.source is None and not acl.allow_binary_only:
485 return False, "binary-only uploads are not allowed"
486 if not acl.allow_binary_all:
487 uploaded_arches = set(upload.changes.architectures)
488 uploaded_arches.discard('source')
489 allowed_arches = set(a.arch_string for a in acl.architectures)
490 forbidden_arches = uploaded_arches - allowed_arches
491 if len(forbidden_arches) != 0:
492 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
493 if not acl.allow_hijack:
494 for suite in upload.final_suites:
495 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
497 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
499 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
500 if acl.allow_per_source:
501 if acl_per_source is None:
502 return False, "not allowed to upload source package '{0}'".format(source_name)
503 if acl.deny_per_source and acl_per_source is not None:
504 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
508 def check(self, upload):
509 session = upload.session
510 fingerprint = upload.fingerprint
511 keyring = fingerprint.keyring
514 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
515 if not keyring.active:
516 raise Reject('Keyring {0} is not active'.format(keyring.name))
518 acl = fingerprint.acl or keyring.acl
520 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
521 result, reason = self._check_acl(session, upload, acl)
523 raise RejectACL(acl, reason)
525 for acl in session.query(ACL).filter_by(is_global=True):
526 result, reason = self._check_acl(session, upload, acl)
528 raise RejectACL(acl, reason)
532 def per_suite_check(self, upload, suite):
537 result, reason = self._check_acl(upload.session, upload, acl)
540 accept = accept or result
542 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
545 class TransitionCheck(Check):
546 """check for a transition"""
547 def check(self, upload):
548 if 'source' not in upload.changes.architectures:
551 transitions = self.get_transitions()
552 if transitions is None:
555 session = upload.session
557 control = upload.changes.changes
558 source = re_field_source.match(control['Source']).group('package')
560 for trans in transitions:
561 t = transitions[trans]
562 transition_source = t["source"]
565 # Will be None if nothing is in testing.
566 current = get_source_in_suite(transition_source, "testing", session)
567 if current is not None:
568 compare = apt_pkg.version_compare(current.version, expected)
570 if current is None or compare < 0:
571 # This is still valid, the current version in testing is older than
572 # the new version we wait for, or there is none in testing yet
574 # Check if the source we look at is affected by this.
575 if source in t['packages']:
576 # The source is affected, lets reject it.
578 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
580 if current is not None:
581 currentlymsg = "at version {0}".format(current.version)
583 currentlymsg = "not present in testing"
585 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
587 rejectmsg += "\n".join(textwrap.wrap("""Your package
588 is part of a testing transition designed to get {0} migrated (it is
589 currently {1}, we need version {2}). This transition is managed by the
590 Release Team, and {3} is the Release-Team member responsible for it.
591 Please mail debian-release@lists.debian.org or contact {3} directly if you
592 need further assistance. You might want to upload to experimental until this
593 transition is done.""".format(transition_source, currentlymsg, expected,t["rm"])))
595 raise Reject(rejectmsg)
599 def get_transitions(self):
601 path = cnf.get('Dinstall::ReleaseTransitions', '')
602 if path == '' or not os.path.exists(path):
605 contents = file(path, 'r').read()
607 transitions = yaml.safe_load(contents)
609 except yaml.YAMLError as msg:
610 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
614 class NoSourceOnlyCheck(Check):
615 def is_source_only_upload(self, upload):
616 changes = upload.changes
617 if changes.source is not None and len(changes.binaries) == 0:
621 """Check for source-only upload
623 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
624 set. Otherwise they are rejected.
626 Source-only uploads are only accepted for source packages having a
627 Package-List field that also lists architectures per package. This
628 check can be disabled via
629 Dinstall::AllowSourceOnlyUploadsWithoutPackageList.
631 Source-only uploads to NEW are only allowed if
632 Dinstall::AllowSourceOnlyNew is set.
634 Uploads not including architecture-independent packages are only
635 allowed if Dinstall::AllowNoArchIndepUploads is set.
638 def check(self, upload):
639 if not self.is_source_only_upload(upload):
642 allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads')
643 allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList')
644 allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew')
645 allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads')
646 changes = upload.changes
648 if not allow_source_only_uploads:
649 raise Reject('Source-only uploads are not allowed.')
650 if not allow_source_only_uploads_without_package_list \
651 and changes.source.package_list.fallback:
652 raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.')
653 if not allow_source_only_new and upload.new:
654 raise Reject('Source-only uploads to NEW are not allowed.')
656 if not allow_no_arch_indep_uploads \
657 and 'all' not in changes.architectures \
658 and changes.source.package_list.has_arch_indep_packages():
659 raise Reject('Uploads not including architecture-independent packages are not allowed.')
663 class LintianCheck(Check):
664 """Check package using lintian"""
665 def check(self, upload):
666 changes = upload.changes
668 # Only check sourceful uploads.
669 if changes.source is None:
671 # Only check uploads to unstable or experimental.
672 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
676 if 'Dinstall::LintianTags' not in cnf:
678 tagfile = cnf['Dinstall::LintianTags']
680 with open(tagfile, 'r') as sourcefile:
681 sourcecontent = sourcefile.read()
683 lintiantags = yaml.safe_load(sourcecontent)['lintian']
684 except yaml.YAMLError as msg:
685 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
687 fd, temp_filename = utils.temp_filename(mode=0o644)
688 temptagfile = os.fdopen(fd, 'w')
689 for tags in lintiantags.itervalues():
691 print >>temptagfile, tag
694 changespath = os.path.join(upload.directory, changes.filename)
699 user = cnf.get('Dinstall::UnprivUser') or None
701 cmd.extend(['sudo', '-H', '-u', user])
703 cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
704 output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT)
705 except subprocess.CalledProcessError as e:
706 result = e.returncode
709 os.unlink(temp_filename)
712 utils.warn("lintian failed for %s [return code: %s]." % \
713 (changespath, result))
714 utils.warn(utils.prefix_multi_line_string(output, \
715 " [possible output:] "))
717 parsed_tags = lintian.parse_lintian_output(output)
718 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
719 if len(rejects) != 0:
720 raise Reject('\n'.join(rejects))
724 class SourceFormatCheck(Check):
725 """Check source format is allowed in the target suite"""
726 def per_suite_check(self, upload, suite):
727 source = upload.changes.source
728 session = upload.session
732 source_format = source.dsc['Format']
733 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
734 if query.first() is None:
735 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
737 class SuiteArchitectureCheck(Check):
738 def per_suite_check(self, upload, suite):
739 session = upload.session
740 for arch in upload.changes.architectures:
741 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
742 if query.first() is None:
743 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
747 class VersionCheck(Check):
748 """Check version constraints"""
749 def _highest_source_version(self, session, source_name, suite):
750 db_source = session.query(DBSource).filter_by(source=source_name) \
751 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
752 if db_source is None:
755 return db_source.version
757 def _highest_binary_version(self, session, binary_name, suite, architecture):
758 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
759 .filter(DBBinary.suites.contains(suite)) \
760 .join(DBBinary.architecture) \
761 .filter(Architecture.arch_string.in_(['all', architecture])) \
762 .order_by(DBBinary.version.desc()).first()
763 if db_binary is None:
766 return db_binary.version
768 def _version_checks(self, upload, suite, other_suite, op, op_name):
769 session = upload.session
771 if upload.changes.source is not None:
772 source_name = upload.changes.source.dsc['Source']
773 source_version = upload.changes.source.dsc['Version']
774 v = self._highest_source_version(session, source_name, other_suite)
775 if v is not None and not op(version_compare(source_version, v)):
776 raise Reject("Version check failed:\n"
777 "Your upload included the source package {0}, version {1},\n"
778 "however {3} already has version {2}.\n"
779 "Uploads to {5} must have a {4} version than present in {3}."
780 .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
782 for binary in upload.changes.binaries:
783 binary_name = binary.control['Package']
784 binary_version = binary.control['Version']
785 architecture = binary.control['Architecture']
786 v = self._highest_binary_version(session, binary_name, other_suite, architecture)
787 if v is not None and not op(version_compare(binary_version, v)):
788 raise Reject("Version check failed:\n"
789 "Your upload included the binary package {0}, version {1}, for {2},\n"
790 "however {4} already has version {3}.\n"
791 "Uploads to {6} must have a {5} version than present in {4}."
792 .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
794 def per_suite_check(self, upload, suite):
795 session = upload.session
797 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
798 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
799 must_be_newer_than = [ vc.reference for vc in vc_newer ]
800 # Must be newer than old versions in `suite`
801 must_be_newer_than.append(suite)
803 for s in must_be_newer_than:
804 self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
806 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
807 must_be_older_than = [ vc.reference for vc in vc_older ]
809 for s in must_be_older_than:
810 self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')