1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the L{Check} class for the interface.
26 from daklib.config import Config
27 import daklib.daksubprocess
28 from daklib.dbconn import *
29 import daklib.dbconn as dbconn
30 from daklib.regexes import *
31 from daklib.textutils import fix_maintainer, ParseMaintError
32 import daklib.lintian as lintian
33 import daklib.utils as utils
34 from daklib.upload import InvalidHashException
38 from apt_pkg import version_compare
46 def check_fields_for_valid_utf8(filename, control):
47 """Check all fields of a control file for valid UTF-8"""
48 for field in control.keys():
51 control[field].decode('utf-8')
52 except UnicodeDecodeError:
53 raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field))
55 class Reject(Exception):
56 """exception raised by failing checks"""
59 class RejectStupidMaintainerException(Exception):
60 """exception raised by failing the external hashes check"""
63 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
65 class RejectACL(Reject):
66 """exception raise by failing ACL checks"""
67 def __init__(self, acl, reason):
72 return "ACL {0}: {1}".format(self.acl.name, self.reason)
75 """base class for checks
77 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
78 raise a L{daklib.checks.Reject} exception including a human-readable
79 description why the upload should be rejected.
81 def check(self, upload):
84 @type upload: L{daklib.archive.ArchiveUpload}
85 @param upload: upload to check
87 @raise daklib.checks.Reject: upload should be rejected
90 def per_suite_check(self, upload, suite):
91 """do per-suite checks
93 @type upload: L{daklib.archive.ArchiveUpload}
94 @param upload: upload to check
96 @type suite: L{daklib.dbconn.Suite}
97 @param suite: suite to check
99 @raise daklib.checks.Reject: upload should be rejected
104 """allow to force ignore failing test
106 C{True} if it is acceptable to force ignoring a failing test,
111 class SignatureAndHashesCheck(Check):
112 def check_replay(self, upload):
113 # Use private session as we want to remember having seen the .changes
115 session = DBConn().session()
116 history = SignatureHistory.from_signed_file(upload.changes)
117 r = history.query(session)
119 raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen))
124 """Check signature of changes and dsc file (if included in upload)
126 Make sure the signature is valid and done by a known user.
128 def check(self, upload):
129 changes = upload.changes
130 if not changes.valid_signature:
131 raise Reject("Signature for .changes not valid.")
132 self.check_replay(upload)
133 self._check_hashes(upload, changes.filename, changes.files.itervalues())
137 source = changes.source
138 except Exception as e:
139 raise Reject("Invalid dsc file: {0}".format(e))
140 if source is not None:
141 if not source.valid_signature:
142 raise Reject("Signature for .dsc not valid.")
143 if source.primary_fingerprint != changes.primary_fingerprint:
144 raise Reject(".changes and .dsc not signed by the same key.")
145 self._check_hashes(upload, source.filename, source.files.itervalues())
147 if upload.fingerprint is None or upload.fingerprint.uid is None:
148 raise Reject(".changes signed by unknown key.")
150 """Make sure hashes match existing files
152 @type upload: L{daklib.archive.ArchiveUpload}
153 @param upload: upload we are processing
156 @param filename: name of the file the expected hash values are taken from
158 @type files: sequence of L{daklib.upload.HashedFile}
159 @param files: files to check the hashes for
161 def _check_hashes(self, upload, filename, files):
164 f.check(upload.directory)
166 if e.errno == errno.ENOENT:
167 raise Reject('{0} refers to non-existing file: {1}\n'
168 'Perhaps you need to include it in your upload?'
169 .format(filename, os.path.basename(e.filename)))
171 except InvalidHashException as e:
172 raise Reject('{0}: {1}'.format(filename, unicode(e)))
174 class ChangesCheck(Check):
175 """Check changes file for syntax errors."""
176 def check(self, upload):
177 changes = upload.changes
178 control = changes.changes
179 fn = changes.filename
181 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
182 if field not in control:
183 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
185 check_fields_for_valid_utf8(fn, control)
187 source_match = re_field_source.match(control['Source'])
189 raise Reject('{0}: Invalid Source field'.format(fn))
190 version_match = re_field_version.match(control['Version'])
191 if not version_match:
192 raise Reject('{0}: Invalid Version field'.format(fn))
193 version_without_epoch = version_match.group('without_epoch')
195 match = re_file_changes.match(fn)
197 raise Reject('{0}: Does not match re_file_changes'.format(fn))
198 if match.group('package') != source_match.group('package'):
199 raise Reject('{0}: Filename does not match Source field'.format(fn))
200 if match.group('version') != version_without_epoch:
201 raise Reject('{0}: Filename does not match Version field'.format(fn))
203 for bn in changes.binary_names:
204 if not re_field_package.match(bn):
205 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
207 if 'source' in changes.architectures and changes.source is None:
208 raise Reject("Changes has architecture source, but no source found.")
209 if changes.source is not None and 'source' not in changes.architectures:
210 raise Reject("Upload includes source, but changes does not say so.")
213 fix_maintainer(changes.changes['Maintainer'])
214 except ParseMaintError as e:
215 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
218 changed_by = changes.changes.get('Changed-By')
219 if changed_by is not None:
220 fix_maintainer(changed_by)
221 except ParseMaintError as e:
222 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
224 if len(changes.files) == 0:
225 raise Reject("Changes includes no files.")
227 for bugnum in changes.closed_bugs:
228 if not re_isanum.match(bugnum):
229 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
233 class ExternalHashesCheck(Check):
234 """Checks hashes in .changes and .dsc against an external database."""
235 def check_single(self, session, f):
236 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
237 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
242 if ext_size != f.size:
243 raise RejectStupidMaintainerException(f.filename, 'size', f.size, ext_size)
245 if ext_md5sum != f.md5sum:
246 raise RejectStupidMaintainerException(f.filename, 'md5sum', f.md5sum, ext_md5sum)
248 if ext_sha1sum != f.sha1sum:
249 raise RejectStupidMaintainerException(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
251 if ext_sha256sum != f.sha256sum:
252 raise RejectStupidMaintainerException(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
254 def check(self, upload):
257 if not cnf.use_extfiles:
260 session = upload.session
261 changes = upload.changes
263 for f in changes.files.itervalues():
264 self.check_single(session, f)
265 source = changes.source
266 if source is not None:
267 for f in source.files.itervalues():
268 self.check_single(session, f)
270 class BinaryCheck(Check):
271 """Check binary packages for syntax errors."""
272 def check(self, upload):
273 for binary in upload.changes.binaries:
274 self.check_binary(upload, binary)
276 binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
277 for bn in binary_names:
278 if bn not in upload.changes.binary_names:
279 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
283 def check_binary(self, upload, binary):
284 fn = binary.hashed_file.filename
285 control = binary.control
287 for field in ('Package', 'Architecture', 'Version', 'Description'):
288 if field not in control:
289 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
291 check_fields_for_valid_utf8(fn, control)
295 package = control['Package']
296 if not re_field_package.match(package):
297 raise Reject('{0}: Invalid Package field'.format(fn))
299 version = control['Version']
300 version_match = re_field_version.match(version)
301 if not version_match:
302 raise Reject('{0}: Invalid Version field'.format(fn))
303 version_without_epoch = version_match.group('without_epoch')
305 architecture = control['Architecture']
306 if architecture not in upload.changes.architectures:
307 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
308 if architecture == 'source':
309 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
311 source = control.get('Source')
312 if source is not None and not re_field_source.match(source):
313 raise Reject('{0}: Invalid Source field'.format(fn))
317 match = re_file_binary.match(fn)
318 if package != match.group('package'):
319 raise Reject('{0}: filename does not match Package field'.format(fn))
320 if version_without_epoch != match.group('version'):
321 raise Reject('{0}: filename does not match Version field'.format(fn))
322 if architecture != match.group('architecture'):
323 raise Reject('{0}: filename does not match Architecture field'.format(fn))
325 # check dependency field syntax
327 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
328 'Provides', 'Recommends', 'Replaces', 'Suggests'):
329 value = control.get(field)
330 if value is not None:
331 if value.strip() == '':
332 raise Reject('{0}: empty {1} field'.format(fn, field))
334 apt_pkg.parse_depends(value)
336 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
338 for field in ('Built-Using',):
339 value = control.get(field)
340 if value is not None:
341 if value.strip() == '':
342 raise Reject('{0}: empty {1} field'.format(fn, field))
344 apt_pkg.parse_src_depends(value)
346 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
348 class BinaryTimestampCheck(Check):
349 """check timestamps of files in binary packages
351 Files in the near future cause ugly warnings and extreme time travel
352 can cause errors on extraction.
354 def check(self, upload):
356 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
357 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
359 class TarTime(object):
361 self.future_files = dict()
362 self.past_files = dict()
363 def callback(self, member, data):
364 if member.mtime > future_cutoff:
365 self.future_files[member.name] = member.mtime
366 elif member.mtime < past_cutoff:
367 self.past_files[member.name] = member.mtime
369 def format_reason(filename, direction, files):
370 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
371 for fn, ts in files.iteritems():
372 reason += " {0} ({1})".format(fn, time.ctime(ts))
375 for binary in upload.changes.binaries:
376 filename = binary.hashed_file.filename
377 path = os.path.join(upload.directory, filename)
378 deb = apt_inst.DebFile(path)
380 deb.control.go(tar.callback)
382 raise Reject(format_reason(filename, 'future', tar.future_files))
384 raise Reject(format_reason(filename, 'past', tar.past_files))
386 class SourceCheck(Check):
387 """Check source package for syntax errors."""
388 def check_filename(self, control, filename, regex):
389 # In case we have an .orig.tar.*, we have to strip the Debian revison
390 # from the version number. So handle this special case first.
392 match = re_file_orig.match(filename)
395 match = regex.match(filename)
398 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
399 if match.group('package') != control['Source']:
400 raise Reject('{0}: filename does not match Source field'.format(filename))
402 version = control['Version']
404 upstream_match = re_field_version_upstream.match(version)
405 if not upstream_match:
406 raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version))
407 version = upstream_match.group('upstream')
408 version_match = re_field_version.match(version)
409 version_without_epoch = version_match.group('without_epoch')
410 if match.group('version') != version_without_epoch:
411 raise Reject('{0}: filename does not match Version field'.format(filename))
413 def check(self, upload):
414 if upload.changes.source is None:
417 changes = upload.changes.changes
418 source = upload.changes.source
420 dsc_fn = source._dsc_file.filename
422 check_fields_for_valid_utf8(dsc_fn, control)
425 if not re_field_package.match(control['Source']):
426 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
427 if control['Source'] != changes['Source']:
428 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
429 if control['Version'] != changes['Version']:
430 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
433 self.check_filename(control, dsc_fn, re_file_dsc)
434 for f in source.files.itervalues():
435 self.check_filename(control, f.filename, re_file_source)
437 # check dependency field syntax
438 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
439 value = control.get(field)
440 if value is not None:
441 if value.strip() == '':
442 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
444 apt_pkg.parse_src_depends(value)
445 except Exception as e:
446 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
448 rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
450 raise Reject("\n".join(rejects))
454 class SingleDistributionCheck(Check):
455 """Check that the .changes targets only a single distribution."""
456 def check(self, upload):
457 if len(upload.changes.distributions) != 1:
458 raise Reject("Only uploads to a single distribution are allowed.")
460 class ACLCheck(Check):
461 """Check the uploader is allowed to upload the packages in .changes"""
463 def _does_hijack(self, session, upload, suite):
464 # Try to catch hijacks.
465 # This doesn't work correctly. Uploads to experimental can still
466 # "hijack" binaries from unstable. Also one can hijack packages
467 # via buildds (but people who try this should not be DMs).
468 for binary_name in upload.changes.binary_names:
469 binaries = session.query(DBBinary).join(DBBinary.source) \
470 .filter(DBBinary.suites.contains(suite)) \
471 .filter(DBBinary.package == binary_name)
472 for binary in binaries:
473 if binary.source.source != upload.changes.changes['Source']:
474 return True, binary.package, binary.source.source
475 return False, None, None
477 def _check_acl(self, session, upload, acl):
478 source_name = upload.changes.source_name
480 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
482 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
485 if not acl.allow_new:
487 return False, "NEW uploads are not allowed"
488 for f in upload.changes.files.itervalues():
489 if f.section == 'byhand' or f.section.startswith("raw-"):
490 return False, "BYHAND uploads are not allowed"
491 if not acl.allow_source and upload.changes.source is not None:
492 return False, "sourceful uploads are not allowed"
493 binaries = upload.changes.binaries
494 if len(binaries) != 0:
495 if not acl.allow_binary:
496 return False, "binary uploads are not allowed"
497 if upload.changes.source is None and not acl.allow_binary_only:
498 return False, "binary-only uploads are not allowed"
499 if not acl.allow_binary_all:
500 uploaded_arches = set(upload.changes.architectures)
501 uploaded_arches.discard('source')
502 allowed_arches = set(a.arch_string for a in acl.architectures)
503 forbidden_arches = uploaded_arches - allowed_arches
504 if len(forbidden_arches) != 0:
505 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
506 if not acl.allow_hijack:
507 for suite in upload.final_suites:
508 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
510 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
512 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
513 if acl.allow_per_source:
514 if acl_per_source is None:
515 return False, "not allowed to upload source package '{0}'".format(source_name)
516 if acl.deny_per_source and acl_per_source is not None:
517 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
521 def check(self, upload):
522 session = upload.session
523 fingerprint = upload.fingerprint
524 keyring = fingerprint.keyring
527 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
528 if not keyring.active:
529 raise Reject('Keyring {0} is not active'.format(keyring.name))
531 acl = fingerprint.acl or keyring.acl
533 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
534 result, reason = self._check_acl(session, upload, acl)
536 raise RejectACL(acl, reason)
538 for acl in session.query(ACL).filter_by(is_global=True):
539 result, reason = self._check_acl(session, upload, acl)
541 raise RejectACL(acl, reason)
545 def per_suite_check(self, upload, suite):
550 result, reason = self._check_acl(upload.session, upload, acl)
553 accept = accept or result
555 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
558 class TransitionCheck(Check):
559 """check for a transition"""
560 def check(self, upload):
561 if 'source' not in upload.changes.architectures:
564 transitions = self.get_transitions()
565 if transitions is None:
568 session = upload.session
570 control = upload.changes.changes
571 source = re_field_source.match(control['Source']).group('package')
573 for trans in transitions:
574 t = transitions[trans]
575 transition_source = t["source"]
578 # Will be None if nothing is in testing.
579 current = get_source_in_suite(transition_source, "testing", session)
580 if current is not None:
581 compare = apt_pkg.version_compare(current.version, expected)
583 if current is None or compare < 0:
584 # This is still valid, the current version in testing is older than
585 # the new version we wait for, or there is none in testing yet
587 # Check if the source we look at is affected by this.
588 if source in t['packages']:
589 # The source is affected, lets reject it.
591 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
593 if current is not None:
594 currentlymsg = "at version {0}".format(current.version)
596 currentlymsg = "not present in testing"
598 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
600 rejectmsg += "\n".join(textwrap.wrap("""Your package
601 is part of a testing transition designed to get {0} migrated (it is
602 currently {1}, we need version {2}). This transition is managed by the
603 Release Team, and {3} is the Release-Team member responsible for it.
604 Please mail debian-release@lists.debian.org or contact {3} directly if you
605 need further assistance. You might want to upload to experimental until this
606 transition is done.""".format(transition_source, currentlymsg, expected,t["rm"])))
608 raise Reject(rejectmsg)
612 def get_transitions(self):
614 path = cnf.get('Dinstall::ReleaseTransitions', '')
615 if path == '' or not os.path.exists(path):
618 contents = file(path, 'r').read()
620 transitions = yaml.safe_load(contents)
622 except yaml.YAMLError as msg:
623 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
627 class NoSourceOnlyCheck(Check):
628 def is_source_only_upload(self, upload):
629 changes = upload.changes
630 if changes.source is not None and len(changes.binaries) == 0:
634 """Check for source-only upload
636 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
637 set. Otherwise they are rejected.
639 Source-only uploads are only accepted for source packages having a
640 Package-List field that also lists architectures per package. This
641 check can be disabled via
642 Dinstall::AllowSourceOnlyUploadsWithoutPackageList.
644 Source-only uploads to NEW are only allowed if
645 Dinstall::AllowSourceOnlyNew is set.
647 Uploads not including architecture-independent packages are only
648 allowed if Dinstall::AllowNoArchIndepUploads is set.
651 def check(self, upload):
652 if not self.is_source_only_upload(upload):
655 allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads')
656 allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList')
657 allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew')
658 allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads')
659 changes = upload.changes
661 if not allow_source_only_uploads:
662 raise Reject('Source-only uploads are not allowed.')
663 if not allow_source_only_uploads_without_package_list \
664 and changes.source.package_list.fallback:
665 raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.')
666 if not allow_source_only_new and upload.new:
667 raise Reject('Source-only uploads to NEW are not allowed.')
669 if not allow_no_arch_indep_uploads \
670 and 'all' not in changes.architectures \
671 and changes.source.package_list.has_arch_indep_packages():
672 raise Reject('Uploads not including architecture-independent packages are not allowed.')
676 class LintianCheck(Check):
677 """Check package using lintian"""
678 def check(self, upload):
679 changes = upload.changes
681 # Only check sourceful uploads.
682 if changes.source is None:
684 # Only check uploads to unstable or experimental.
685 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
689 if 'Dinstall::LintianTags' not in cnf:
691 tagfile = cnf['Dinstall::LintianTags']
693 with open(tagfile, 'r') as sourcefile:
694 sourcecontent = sourcefile.read()
696 lintiantags = yaml.safe_load(sourcecontent)['lintian']
697 except yaml.YAMLError as msg:
698 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
700 fd, temp_filename = utils.temp_filename(mode=0o644)
701 temptagfile = os.fdopen(fd, 'w')
702 for tags in lintiantags.itervalues():
704 print >>temptagfile, tag
707 changespath = os.path.join(upload.directory, changes.filename)
712 user = cnf.get('Dinstall::UnprivUser') or None
714 cmd.extend(['sudo', '-H', '-u', user])
716 cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
717 output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT)
718 except subprocess.CalledProcessError as e:
719 result = e.returncode
722 os.unlink(temp_filename)
725 utils.warn("lintian failed for %s [return code: %s]." % \
726 (changespath, result))
727 utils.warn(utils.prefix_multi_line_string(output, \
728 " [possible output:] "))
730 parsed_tags = lintian.parse_lintian_output(output)
731 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
732 if len(rejects) != 0:
733 raise Reject('\n'.join(rejects))
737 class SourceFormatCheck(Check):
738 """Check source format is allowed in the target suite"""
739 def per_suite_check(self, upload, suite):
740 source = upload.changes.source
741 session = upload.session
745 source_format = source.dsc['Format']
746 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
747 if query.first() is None:
748 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
750 class SuiteArchitectureCheck(Check):
751 def per_suite_check(self, upload, suite):
752 session = upload.session
753 for arch in upload.changes.architectures:
754 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
755 if query.first() is None:
756 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
760 class VersionCheck(Check):
761 """Check version constraints"""
762 def _highest_source_version(self, session, source_name, suite):
763 db_source = session.query(DBSource).filter_by(source=source_name) \
764 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
765 if db_source is None:
768 return db_source.version
770 def _highest_binary_version(self, session, binary_name, suite, architecture):
771 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
772 .filter(DBBinary.suites.contains(suite)) \
773 .join(DBBinary.architecture) \
774 .filter(Architecture.arch_string.in_(['all', architecture])) \
775 .order_by(DBBinary.version.desc()).first()
776 if db_binary is None:
779 return db_binary.version
781 def _version_checks(self, upload, suite, other_suite, op, op_name):
782 session = upload.session
784 if upload.changes.source is not None:
785 source_name = upload.changes.source.dsc['Source']
786 source_version = upload.changes.source.dsc['Version']
787 v = self._highest_source_version(session, source_name, other_suite)
788 if v is not None and not op(version_compare(source_version, v)):
789 raise Reject("Version check failed:\n"
790 "Your upload included the source package {0}, version {1},\n"
791 "however {3} already has version {2}.\n"
792 "Uploads to {5} must have a {4} version than present in {3}."
793 .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
795 for binary in upload.changes.binaries:
796 binary_name = binary.control['Package']
797 binary_version = binary.control['Version']
798 architecture = binary.control['Architecture']
799 v = self._highest_binary_version(session, binary_name, other_suite, architecture)
800 if v is not None and not op(version_compare(binary_version, v)):
801 raise Reject("Version check failed:\n"
802 "Your upload included the binary package {0}, version {1}, for {2},\n"
803 "however {4} already has version {3}.\n"
804 "Uploads to {6} must have a {5} version than present in {4}."
805 .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
807 def per_suite_check(self, upload, suite):
808 session = upload.session
810 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
811 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
812 must_be_newer_than = [ vc.reference for vc in vc_newer ]
813 # Must be newer than old versions in `suite`
814 must_be_newer_than.append(suite)
816 for s in must_be_newer_than:
817 self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
819 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
820 must_be_older_than = [ vc.reference for vc in vc_older ]
822 for s in must_be_older_than:
823 self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')