1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the L{Check} class for the interface.
26 from daklib.config import Config
27 import daklib.daksubprocess
28 from daklib.dbconn import *
29 import daklib.dbconn as dbconn
30 from daklib.regexes import *
31 from daklib.textutils import fix_maintainer, ParseMaintError
32 import daklib.lintian as lintian
33 import daklib.utils as utils
34 from daklib.upload import InvalidHashException
38 from apt_pkg import version_compare
46 def check_fields_for_valid_utf8(filename, control):
47 """Check all fields of a control file for valid UTF-8"""
48 for field in control.keys():
51 control[field].decode('utf-8')
52 except UnicodeDecodeError:
53 raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field))
55 class Reject(Exception):
56 """exception raised by failing checks"""
59 class RejectStupidMaintainerException(Exception):
60 """exception raised by failing the external hashes check"""
63 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
65 class RejectACL(Reject):
66 """exception raise by failing ACL checks"""
67 def __init__(self, acl, reason):
72 return "ACL {0}: {1}".format(self.acl.name, self.reason)
75 """base class for checks
77 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
78 raise a L{daklib.checks.Reject} exception including a human-readable
79 description why the upload should be rejected.
81 def check(self, upload):
84 @type upload: L{daklib.archive.ArchiveUpload}
85 @param upload: upload to check
87 @raise daklib.checks.Reject: upload should be rejected
90 def per_suite_check(self, upload, suite):
91 """do per-suite checks
93 @type upload: L{daklib.archive.ArchiveUpload}
94 @param upload: upload to check
96 @type suite: L{daklib.dbconn.Suite}
97 @param suite: suite to check
99 @raise daklib.checks.Reject: upload should be rejected
104 """allow to force ignore failing test
106 C{True} if it is acceptable to force ignoring a failing test,
111 class SignatureAndHashesCheck(Check):
112 def check_replay(self, upload):
113 # Use private session as we want to remember having seen the .changes
115 session = upload.session
116 history = SignatureHistory.from_signed_file(upload.changes)
117 r = history.query(session)
119 raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen))
122 """Check signature of changes and dsc file (if included in upload)
124 Make sure the signature is valid and done by a known user.
126 def check(self, upload):
127 changes = upload.changes
128 if not changes.valid_signature:
129 raise Reject("Signature for .changes not valid.")
130 self.check_replay(upload)
131 self._check_hashes(upload, changes.filename, changes.files.itervalues())
135 source = changes.source
136 except Exception as e:
137 raise Reject("Invalid dsc file: {0}".format(e))
138 if source is not None:
139 if not source.valid_signature:
140 raise Reject("Signature for .dsc not valid.")
141 if source.primary_fingerprint != changes.primary_fingerprint:
142 raise Reject(".changes and .dsc not signed by the same key.")
143 self._check_hashes(upload, source.filename, source.files.itervalues())
145 if upload.fingerprint is None or upload.fingerprint.uid is None:
146 raise Reject(".changes signed by unknown key.")
148 """Make sure hashes match existing files
150 @type upload: L{daklib.archive.ArchiveUpload}
151 @param upload: upload we are processing
154 @param filename: name of the file the expected hash values are taken from
156 @type files: sequence of L{daklib.upload.HashedFile}
157 @param files: files to check the hashes for
159 def _check_hashes(self, upload, filename, files):
162 f.check(upload.directory)
164 if e.errno == errno.ENOENT:
165 raise Reject('{0} refers to non-existing file: {1}\n'
166 'Perhaps you need to include it in your upload?'
167 .format(filename, os.path.basename(e.filename)))
169 except InvalidHashException as e:
170 raise Reject('{0}: {1}'.format(filename, unicode(e)))
172 class ChangesCheck(Check):
173 """Check changes file for syntax errors."""
174 def check(self, upload):
175 changes = upload.changes
176 control = changes.changes
177 fn = changes.filename
179 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
180 if field not in control:
181 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
183 check_fields_for_valid_utf8(fn, control)
185 source_match = re_field_source.match(control['Source'])
187 raise Reject('{0}: Invalid Source field'.format(fn))
188 version_match = re_field_version.match(control['Version'])
189 if not version_match:
190 raise Reject('{0}: Invalid Version field'.format(fn))
191 version_without_epoch = version_match.group('without_epoch')
193 match = re_file_changes.match(fn)
195 raise Reject('{0}: Does not match re_file_changes'.format(fn))
196 if match.group('package') != source_match.group('package'):
197 raise Reject('{0}: Filename does not match Source field'.format(fn))
198 if match.group('version') != version_without_epoch:
199 raise Reject('{0}: Filename does not match Version field'.format(fn))
201 for bn in changes.binary_names:
202 if not re_field_package.match(bn):
203 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
205 if 'source' in changes.architectures and changes.source is None:
206 raise Reject("Changes has architecture source, but no source found.")
207 if changes.source is not None and 'source' not in changes.architectures:
208 raise Reject("Upload includes source, but changes does not say so.")
211 fix_maintainer(changes.changes['Maintainer'])
212 except ParseMaintError as e:
213 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
216 changed_by = changes.changes.get('Changed-By')
217 if changed_by is not None:
218 fix_maintainer(changed_by)
219 except ParseMaintError as e:
220 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
222 if len(changes.files) == 0:
223 raise Reject("Changes includes no files.")
225 for bugnum in changes.closed_bugs:
226 if not re_isanum.match(bugnum):
227 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
231 class ExternalHashesCheck(Check):
232 """Checks hashes in .changes and .dsc against an external database."""
233 def check_single(self, session, f):
234 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
235 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
240 if ext_size != f.size:
241 raise RejectStupidMaintainerException(f.filename, 'size', f.size, ext_size)
243 if ext_md5sum != f.md5sum:
244 raise RejectStupidMaintainerException(f.filename, 'md5sum', f.md5sum, ext_md5sum)
246 if ext_sha1sum != f.sha1sum:
247 raise RejectStupidMaintainerException(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
249 if ext_sha256sum != f.sha256sum:
250 raise RejectStupidMaintainerException(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
252 def check(self, upload):
255 if not cnf.use_extfiles:
258 session = upload.session
259 changes = upload.changes
261 for f in changes.files.itervalues():
262 self.check_single(session, f)
263 source = changes.source
264 if source is not None:
265 for f in source.files.itervalues():
266 self.check_single(session, f)
268 class BinaryCheck(Check):
269 """Check binary packages for syntax errors."""
270 def check(self, upload):
271 for binary in upload.changes.binaries:
272 self.check_binary(upload, binary)
274 binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
275 for bn in binary_names:
276 if bn not in upload.changes.binary_names:
277 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
281 def check_binary(self, upload, binary):
282 fn = binary.hashed_file.filename
283 control = binary.control
285 for field in ('Package', 'Architecture', 'Version', 'Description'):
286 if field not in control:
287 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
289 check_fields_for_valid_utf8(fn, control)
293 package = control['Package']
294 if not re_field_package.match(package):
295 raise Reject('{0}: Invalid Package field'.format(fn))
297 version = control['Version']
298 version_match = re_field_version.match(version)
299 if not version_match:
300 raise Reject('{0}: Invalid Version field'.format(fn))
301 version_without_epoch = version_match.group('without_epoch')
303 architecture = control['Architecture']
304 if architecture not in upload.changes.architectures:
305 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
306 if architecture == 'source':
307 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
309 source = control.get('Source')
310 if source is not None and not re_field_source.match(source):
311 raise Reject('{0}: Invalid Source field'.format(fn))
315 match = re_file_binary.match(fn)
316 if package != match.group('package'):
317 raise Reject('{0}: filename does not match Package field'.format(fn))
318 if version_without_epoch != match.group('version'):
319 raise Reject('{0}: filename does not match Version field'.format(fn))
320 if architecture != match.group('architecture'):
321 raise Reject('{0}: filename does not match Architecture field'.format(fn))
323 # check dependency field syntax
325 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
326 'Provides', 'Recommends', 'Replaces', 'Suggests'):
327 value = control.get(field)
328 if value is not None:
329 if value.strip() == '':
330 raise Reject('{0}: empty {1} field'.format(fn, field))
332 apt_pkg.parse_depends(value)
334 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
336 for field in ('Built-Using',):
337 value = control.get(field)
338 if value is not None:
339 if value.strip() == '':
340 raise Reject('{0}: empty {1} field'.format(fn, field))
342 apt_pkg.parse_src_depends(value)
344 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
346 class BinaryTimestampCheck(Check):
347 """check timestamps of files in binary packages
349 Files in the near future cause ugly warnings and extreme time travel
350 can cause errors on extraction.
352 def check(self, upload):
354 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
355 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
357 class TarTime(object):
359 self.future_files = dict()
360 self.past_files = dict()
361 def callback(self, member, data):
362 if member.mtime > future_cutoff:
363 self.future_files[member.name] = member.mtime
364 elif member.mtime < past_cutoff:
365 self.past_files[member.name] = member.mtime
367 def format_reason(filename, direction, files):
368 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
369 for fn, ts in files.iteritems():
370 reason += " {0} ({1})".format(fn, time.ctime(ts))
373 for binary in upload.changes.binaries:
374 filename = binary.hashed_file.filename
375 path = os.path.join(upload.directory, filename)
376 deb = apt_inst.DebFile(path)
378 deb.control.go(tar.callback)
380 raise Reject(format_reason(filename, 'future', tar.future_files))
382 raise Reject(format_reason(filename, 'past', tar.past_files))
384 class SourceCheck(Check):
385 """Check source package for syntax errors."""
386 def check_filename(self, control, filename, regex):
387 # In case we have an .orig.tar.*, we have to strip the Debian revison
388 # from the version number. So handle this special case first.
390 match = re_file_orig.match(filename)
393 match = regex.match(filename)
396 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
397 if match.group('package') != control['Source']:
398 raise Reject('{0}: filename does not match Source field'.format(filename))
400 version = control['Version']
402 upstream_match = re_field_version_upstream.match(version)
403 if not upstream_match:
404 raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version))
405 version = upstream_match.group('upstream')
406 version_match = re_field_version.match(version)
407 version_without_epoch = version_match.group('without_epoch')
408 if match.group('version') != version_without_epoch:
409 raise Reject('{0}: filename does not match Version field'.format(filename))
411 def check(self, upload):
412 if upload.changes.source is None:
415 changes = upload.changes.changes
416 source = upload.changes.source
418 dsc_fn = source._dsc_file.filename
420 check_fields_for_valid_utf8(dsc_fn, control)
423 if not re_field_package.match(control['Source']):
424 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
425 if control['Source'] != changes['Source']:
426 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
427 if control['Version'] != changes['Version']:
428 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
431 self.check_filename(control, dsc_fn, re_file_dsc)
432 for f in source.files.itervalues():
433 self.check_filename(control, f.filename, re_file_source)
435 # check dependency field syntax
436 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
437 value = control.get(field)
438 if value is not None:
439 if value.strip() == '':
440 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
442 apt_pkg.parse_src_depends(value)
443 except Exception as e:
444 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
446 rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
448 raise Reject("\n".join(rejects))
452 class SingleDistributionCheck(Check):
453 """Check that the .changes targets only a single distribution."""
454 def check(self, upload):
455 if len(upload.changes.distributions) != 1:
456 raise Reject("Only uploads to a single distribution are allowed.")
458 class ACLCheck(Check):
459 """Check the uploader is allowed to upload the packages in .changes"""
461 def _does_hijack(self, session, upload, suite):
462 # Try to catch hijacks.
463 # This doesn't work correctly. Uploads to experimental can still
464 # "hijack" binaries from unstable. Also one can hijack packages
465 # via buildds (but people who try this should not be DMs).
466 for binary_name in upload.changes.binary_names:
467 binaries = session.query(DBBinary).join(DBBinary.source) \
468 .filter(DBBinary.suites.contains(suite)) \
469 .filter(DBBinary.package == binary_name)
470 for binary in binaries:
471 if binary.source.source != upload.changes.changes['Source']:
472 return True, binary.package, binary.source.source
473 return False, None, None
475 def _check_acl(self, session, upload, acl):
476 source_name = upload.changes.source_name
478 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
480 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
483 if not acl.allow_new:
485 return False, "NEW uploads are not allowed"
486 for f in upload.changes.files.itervalues():
487 if f.section == 'byhand' or f.section.startswith("raw-"):
488 return False, "BYHAND uploads are not allowed"
489 if not acl.allow_source and upload.changes.source is not None:
490 return False, "sourceful uploads are not allowed"
491 binaries = upload.changes.binaries
492 if len(binaries) != 0:
493 if not acl.allow_binary:
494 return False, "binary uploads are not allowed"
495 if upload.changes.source is None and not acl.allow_binary_only:
496 return False, "binary-only uploads are not allowed"
497 if not acl.allow_binary_all:
498 uploaded_arches = set(upload.changes.architectures)
499 uploaded_arches.discard('source')
500 allowed_arches = set(a.arch_string for a in acl.architectures)
501 forbidden_arches = uploaded_arches - allowed_arches
502 if len(forbidden_arches) != 0:
503 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
504 if not acl.allow_hijack:
505 for suite in upload.final_suites:
506 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
508 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
510 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
511 if acl.allow_per_source:
512 if acl_per_source is None:
513 return False, "not allowed to upload source package '{0}'".format(source_name)
514 if acl.deny_per_source and acl_per_source is not None:
515 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
519 def check(self, upload):
520 session = upload.session
521 fingerprint = upload.fingerprint
522 keyring = fingerprint.keyring
525 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
526 if not keyring.active:
527 raise Reject('Keyring {0} is not active'.format(keyring.name))
529 acl = fingerprint.acl or keyring.acl
531 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
532 result, reason = self._check_acl(session, upload, acl)
534 raise RejectACL(acl, reason)
536 for acl in session.query(ACL).filter_by(is_global=True):
537 result, reason = self._check_acl(session, upload, acl)
539 raise RejectACL(acl, reason)
543 def per_suite_check(self, upload, suite):
548 result, reason = self._check_acl(upload.session, upload, acl)
551 accept = accept or result
553 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
556 class TransitionCheck(Check):
557 """check for a transition"""
558 def check(self, upload):
559 if 'source' not in upload.changes.architectures:
562 transitions = self.get_transitions()
563 if transitions is None:
566 session = upload.session
568 control = upload.changes.changes
569 source = re_field_source.match(control['Source']).group('package')
571 for trans in transitions:
572 t = transitions[trans]
573 transition_source = t["source"]
576 # Will be None if nothing is in testing.
577 current = get_source_in_suite(transition_source, "testing", session)
578 if current is not None:
579 compare = apt_pkg.version_compare(current.version, expected)
581 if current is None or compare < 0:
582 # This is still valid, the current version in testing is older than
583 # the new version we wait for, or there is none in testing yet
585 # Check if the source we look at is affected by this.
586 if source in t['packages']:
587 # The source is affected, lets reject it.
589 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
591 if current is not None:
592 currentlymsg = "at version {0}".format(current.version)
594 currentlymsg = "not present in testing"
596 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
598 rejectmsg += "\n".join(textwrap.wrap("""Your package
599 is part of a testing transition designed to get {0} migrated (it is
600 currently {1}, we need version {2}). This transition is managed by the
601 Release Team, and {3} is the Release-Team member responsible for it.
602 Please mail debian-release@lists.debian.org or contact {3} directly if you
603 need further assistance. You might want to upload to experimental until this
604 transition is done.""".format(transition_source, currentlymsg, expected,t["rm"])))
606 raise Reject(rejectmsg)
610 def get_transitions(self):
612 path = cnf.get('Dinstall::ReleaseTransitions', '')
613 if path == '' or not os.path.exists(path):
616 contents = file(path, 'r').read()
618 transitions = yaml.safe_load(contents)
620 except yaml.YAMLError as msg:
621 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
625 class NoSourceOnlyCheck(Check):
626 def is_source_only_upload(self, upload):
627 changes = upload.changes
628 if changes.source is not None and len(changes.binaries) == 0:
632 """Check for source-only upload
634 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
635 set. Otherwise they are rejected.
637 Source-only uploads are only accepted for source packages having a
638 Package-List field that also lists architectures per package. This
639 check can be disabled via
640 Dinstall::AllowSourceOnlyUploadsWithoutPackageList.
642 Source-only uploads to NEW are only allowed if
643 Dinstall::AllowSourceOnlyNew is set.
645 Uploads not including architecture-independent packages are only
646 allowed if Dinstall::AllowNoArchIndepUploads is set.
649 def check(self, upload):
650 if not self.is_source_only_upload(upload):
653 allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads')
654 allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList')
655 allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew')
656 allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads')
657 changes = upload.changes
659 if not allow_source_only_uploads:
660 raise Reject('Source-only uploads are not allowed.')
661 if not allow_source_only_uploads_without_package_list \
662 and changes.source.package_list.fallback:
663 raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.')
664 if not allow_source_only_new and upload.new:
665 raise Reject('Source-only uploads to NEW are not allowed.')
667 if not allow_no_arch_indep_uploads \
668 and 'all' not in changes.architectures \
669 and changes.source.package_list.has_arch_indep_packages():
670 raise Reject('Uploads not including architecture-independent packages are not allowed.')
674 class LintianCheck(Check):
675 """Check package using lintian"""
676 def check(self, upload):
677 changes = upload.changes
679 # Only check sourceful uploads.
680 if changes.source is None:
682 # Only check uploads to unstable or experimental.
683 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
687 if 'Dinstall::LintianTags' not in cnf:
689 tagfile = cnf['Dinstall::LintianTags']
691 with open(tagfile, 'r') as sourcefile:
692 sourcecontent = sourcefile.read()
694 lintiantags = yaml.safe_load(sourcecontent)['lintian']
695 except yaml.YAMLError as msg:
696 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
698 fd, temp_filename = utils.temp_filename(mode=0o644)
699 temptagfile = os.fdopen(fd, 'w')
700 for tags in lintiantags.itervalues():
702 print >>temptagfile, tag
705 changespath = os.path.join(upload.directory, changes.filename)
710 user = cnf.get('Dinstall::UnprivUser') or None
712 cmd.extend(['sudo', '-H', '-u', user])
714 cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
715 output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT)
716 except subprocess.CalledProcessError as e:
717 result = e.returncode
720 os.unlink(temp_filename)
723 utils.warn("lintian failed for %s [return code: %s]." % \
724 (changespath, result))
725 utils.warn(utils.prefix_multi_line_string(output, \
726 " [possible output:] "))
728 parsed_tags = lintian.parse_lintian_output(output)
729 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
730 if len(rejects) != 0:
731 raise Reject('\n'.join(rejects))
735 class SourceFormatCheck(Check):
736 """Check source format is allowed in the target suite"""
737 def per_suite_check(self, upload, suite):
738 source = upload.changes.source
739 session = upload.session
743 source_format = source.dsc['Format']
744 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
745 if query.first() is None:
746 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
748 class SuiteArchitectureCheck(Check):
749 def per_suite_check(self, upload, suite):
750 session = upload.session
751 for arch in upload.changes.architectures:
752 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
753 if query.first() is None:
754 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
758 class VersionCheck(Check):
759 """Check version constraints"""
760 def _highest_source_version(self, session, source_name, suite):
761 db_source = session.query(DBSource).filter_by(source=source_name) \
762 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
763 if db_source is None:
766 return db_source.version
768 def _highest_binary_version(self, session, binary_name, suite, architecture):
769 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
770 .filter(DBBinary.suites.contains(suite)) \
771 .join(DBBinary.architecture) \
772 .filter(Architecture.arch_string.in_(['all', architecture])) \
773 .order_by(DBBinary.version.desc()).first()
774 if db_binary is None:
777 return db_binary.version
779 def _version_checks(self, upload, suite, other_suite, op, op_name):
780 session = upload.session
782 if upload.changes.source is not None:
783 source_name = upload.changes.source.dsc['Source']
784 source_version = upload.changes.source.dsc['Version']
785 v = self._highest_source_version(session, source_name, other_suite)
786 if v is not None and not op(version_compare(source_version, v)):
787 raise Reject("Version check failed:\n"
788 "Your upload included the source package {0}, version {1},\n"
789 "however {3} already has version {2}.\n"
790 "Uploads to {5} must have a {4} version than present in {3}."
791 .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
793 for binary in upload.changes.binaries:
794 binary_name = binary.control['Package']
795 binary_version = binary.control['Version']
796 architecture = binary.control['Architecture']
797 v = self._highest_binary_version(session, binary_name, other_suite, architecture)
798 if v is not None and not op(version_compare(binary_version, v)):
799 raise Reject("Version check failed:\n"
800 "Your upload included the binary package {0}, version {1}, for {2},\n"
801 "however {4} already has version {3}.\n"
802 "Uploads to {6} must have a {5} version than present in {4}."
803 .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
805 def per_suite_check(self, upload, suite):
806 session = upload.session
808 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
809 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
810 must_be_newer_than = [ vc.reference for vc in vc_newer ]
811 # Must be newer than old versions in `suite`
812 must_be_newer_than.append(suite)
814 for s in must_be_newer_than:
815 self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
817 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
818 must_be_older_than = [ vc.reference for vc in vc_older ]
820 for s in must_be_older_than:
821 self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')