1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the L{Check} class for the interface.
26 from daklib.config import Config
27 import daklib.daksubprocess
28 from daklib.dbconn import *
29 import daklib.dbconn as dbconn
30 from daklib.regexes import *
31 from daklib.textutils import fix_maintainer, ParseMaintError
32 import daklib.lintian as lintian
33 import daklib.utils as utils
38 from apt_pkg import version_compare
47 def check_fields_for_valid_utf8(filename, control):
48 """Check all fields of a control file for valid UTF-8"""
49 for field in control.keys():
52 control[field].decode('utf-8')
53 except UnicodeDecodeError:
54 raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field))
56 class Reject(Exception):
57 """exception raised by failing checks"""
60 class RejectStupidMaintainerException(Exception):
61 """exception raised by failing the external hashes check"""
64 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
66 class RejectACL(Reject):
67 """exception raise by failing ACL checks"""
68 def __init__(self, acl, reason):
73 return "ACL {0}: {1}".format(self.acl.name, self.reason)
76 """base class for checks
78 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
79 raise a L{daklib.checks.Reject} exception including a human-readable
80 description why the upload should be rejected.
82 def check(self, upload):
85 @type upload: L{daklib.archive.ArchiveUpload}
86 @param upload: upload to check
88 @raise daklib.checks.Reject: upload should be rejected
91 def per_suite_check(self, upload, suite):
92 """do per-suite checks
94 @type upload: L{daklib.archive.ArchiveUpload}
95 @param upload: upload to check
97 @type suite: L{daklib.dbconn.Suite}
98 @param suite: suite to check
100 @raise daklib.checks.Reject: upload should be rejected
105 """allow to force ignore failing test
107 C{True} if it is acceptable to force ignoring a failing test,
112 class SignatureAndHashesCheck(Check):
113 def check_replay(self, upload):
114 # Use private session as we want to remember having seen the .changes
116 session = upload.session
117 history = SignatureHistory.from_signed_file(upload.changes)
118 r = history.query(session)
120 raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen))
123 """Check signature of changes and dsc file (if included in upload)
125 Make sure the signature is valid and done by a known user.
127 def check(self, upload):
128 changes = upload.changes
129 if not changes.valid_signature:
130 raise Reject("Signature for .changes not valid.")
131 self.check_replay(upload)
132 self._check_hashes(upload, changes.filename, changes.files.itervalues())
136 source = changes.source
137 except Exception as e:
138 raise Reject("Invalid dsc file: {0}".format(e))
139 if source is not None:
140 if not source.valid_signature:
141 raise Reject("Signature for .dsc not valid.")
142 if source.primary_fingerprint != changes.primary_fingerprint:
143 raise Reject(".changes and .dsc not signed by the same key.")
144 self._check_hashes(upload, source.filename, source.files.itervalues())
146 if upload.fingerprint is None or upload.fingerprint.uid is None:
147 raise Reject(".changes signed by unknown key.")
149 """Make sure hashes match existing files
151 @type upload: L{daklib.archive.ArchiveUpload}
152 @param upload: upload we are processing
155 @param filename: name of the file the expected hash values are taken from
157 @type files: sequence of L{daklib.upload.HashedFile}
158 @param files: files to check the hashes for
160 def _check_hashes(self, upload, filename, files):
163 f.check(upload.directory)
164 except daklib.upload.FileDoesNotExist as e:
165 raise Reject('{0}: {1}\n'
166 'Perhaps you need to include the file in your upload?'
167 .format(filename, unicode(e)))
168 except daklib.upload.UploadException as e:
169 raise Reject('{0}: {1}'.format(filename, unicode(e)))
171 class SignatureTimestampCheck(Check):
172 """Check timestamp of .changes signature"""
173 def check(self, upload):
174 changes = upload.changes
176 now = datetime.datetime.utcnow()
177 timestamp = changes.signature_timestamp
178 age = now - timestamp
180 age_max = datetime.timedelta(days=365)
181 age_min = datetime.timedelta(days=-7)
184 raise Reject('{0}: Signature from {1} is too old (maximum age is {2} days)'.format(changes.filename, timestamp, age_max.days))
186 raise Reject('{0}: Signature from {1} is too far in the future (tolerance is {2} days)'.format(changes.filename, timestamp, abs(age_min.days)))
190 class ChangesCheck(Check):
191 """Check changes file for syntax errors."""
192 def check(self, upload):
193 changes = upload.changes
194 control = changes.changes
195 fn = changes.filename
197 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
198 if field not in control:
199 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
201 check_fields_for_valid_utf8(fn, control)
203 source_match = re_field_source.match(control['Source'])
205 raise Reject('{0}: Invalid Source field'.format(fn))
206 version_match = re_field_version.match(control['Version'])
207 if not version_match:
208 raise Reject('{0}: Invalid Version field'.format(fn))
209 version_without_epoch = version_match.group('without_epoch')
211 match = re_file_changes.match(fn)
213 raise Reject('{0}: Does not match re_file_changes'.format(fn))
214 if match.group('package') != source_match.group('package'):
215 raise Reject('{0}: Filename does not match Source field'.format(fn))
216 if match.group('version') != version_without_epoch:
217 raise Reject('{0}: Filename does not match Version field'.format(fn))
219 for bn in changes.binary_names:
220 if not re_field_package.match(bn):
221 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
223 if 'source' in changes.architectures and changes.source is None:
224 raise Reject("Changes has architecture source, but no source found.")
225 if changes.source is not None and 'source' not in changes.architectures:
226 raise Reject("Upload includes source, but changes does not say so.")
229 fix_maintainer(changes.changes['Maintainer'])
230 except ParseMaintError as e:
231 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
234 changed_by = changes.changes.get('Changed-By')
235 if changed_by is not None:
236 fix_maintainer(changed_by)
237 except ParseMaintError as e:
238 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
240 if len(changes.files) == 0:
241 raise Reject("Changes includes no files.")
243 for bugnum in changes.closed_bugs:
244 if not re_isanum.match(bugnum):
245 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
249 class ExternalHashesCheck(Check):
250 """Checks hashes in .changes and .dsc against an external database."""
251 def check_single(self, session, f):
252 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
253 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
258 if ext_size != f.size:
259 raise RejectStupidMaintainerException(f.filename, 'size', f.size, ext_size)
261 if ext_md5sum != f.md5sum:
262 raise RejectStupidMaintainerException(f.filename, 'md5sum', f.md5sum, ext_md5sum)
264 if ext_sha1sum != f.sha1sum:
265 raise RejectStupidMaintainerException(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
267 if ext_sha256sum != f.sha256sum:
268 raise RejectStupidMaintainerException(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
270 def check(self, upload):
273 if not cnf.use_extfiles:
276 session = upload.session
277 changes = upload.changes
279 for f in changes.files.itervalues():
280 self.check_single(session, f)
281 source = changes.source
282 if source is not None:
283 for f in source.files.itervalues():
284 self.check_single(session, f)
286 class BinaryCheck(Check):
287 """Check binary packages for syntax errors."""
288 def check(self, upload):
289 for binary in upload.changes.binaries:
290 self.check_binary(upload, binary)
292 binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
293 for bn in binary_names:
294 if bn not in upload.changes.binary_names:
295 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
299 def check_binary(self, upload, binary):
300 fn = binary.hashed_file.filename
301 control = binary.control
303 for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'):
304 if field not in control:
305 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
307 check_fields_for_valid_utf8(fn, control)
311 package = control['Package']
312 if not re_field_package.match(package):
313 raise Reject('{0}: Invalid Package field'.format(fn))
315 version = control['Version']
316 version_match = re_field_version.match(version)
317 if not version_match:
318 raise Reject('{0}: Invalid Version field'.format(fn))
319 version_without_epoch = version_match.group('without_epoch')
321 architecture = control['Architecture']
322 if architecture not in upload.changes.architectures:
323 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
324 if architecture == 'source':
325 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
327 source = control.get('Source')
328 if source is not None and not re_field_source.match(source):
329 raise Reject('{0}: Invalid Source field'.format(fn))
333 match = re_file_binary.match(fn)
334 if package != match.group('package'):
335 raise Reject('{0}: filename does not match Package field'.format(fn))
336 if version_without_epoch != match.group('version'):
337 raise Reject('{0}: filename does not match Version field'.format(fn))
338 if architecture != match.group('architecture'):
339 raise Reject('{0}: filename does not match Architecture field'.format(fn))
341 # check dependency field syntax
343 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
344 'Provides', 'Recommends', 'Replaces', 'Suggests'):
345 value = control.get(field)
346 if value is not None:
347 if value.strip() == '':
348 raise Reject('{0}: empty {1} field'.format(fn, field))
350 apt_pkg.parse_depends(value)
352 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
354 for field in ('Built-Using',):
355 value = control.get(field)
356 if value is not None:
357 if value.strip() == '':
358 raise Reject('{0}: empty {1} field'.format(fn, field))
360 apt_pkg.parse_src_depends(value)
362 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
364 class BinaryTimestampCheck(Check):
365 """check timestamps of files in binary packages
367 Files in the near future cause ugly warnings and extreme time travel
368 can cause errors on extraction.
370 def check(self, upload):
372 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
373 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
375 class TarTime(object):
377 self.future_files = dict()
378 self.past_files = dict()
379 def callback(self, member, data):
380 if member.mtime > future_cutoff:
381 self.future_files[member.name] = member.mtime
382 elif member.mtime < past_cutoff:
383 self.past_files[member.name] = member.mtime
385 def format_reason(filename, direction, files):
386 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
387 for fn, ts in files.iteritems():
388 reason += " {0} ({1})".format(fn, time.ctime(ts))
391 for binary in upload.changes.binaries:
392 filename = binary.hashed_file.filename
393 path = os.path.join(upload.directory, filename)
394 deb = apt_inst.DebFile(path)
396 deb.control.go(tar.callback)
398 raise Reject(format_reason(filename, 'future', tar.future_files))
400 raise Reject(format_reason(filename, 'past', tar.past_files))
402 class SourceCheck(Check):
403 """Check source package for syntax errors."""
404 def check_filename(self, control, filename, regex):
405 # In case we have an .orig.tar.*, we have to strip the Debian revison
406 # from the version number. So handle this special case first.
408 match = re_file_orig.match(filename)
411 match = regex.match(filename)
414 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
415 if match.group('package') != control['Source']:
416 raise Reject('{0}: filename does not match Source field'.format(filename))
418 version = control['Version']
420 upstream_match = re_field_version_upstream.match(version)
421 if not upstream_match:
422 raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version))
423 version = upstream_match.group('upstream')
424 version_match = re_field_version.match(version)
425 version_without_epoch = version_match.group('without_epoch')
426 if match.group('version') != version_without_epoch:
427 raise Reject('{0}: filename does not match Version field'.format(filename))
429 def check(self, upload):
430 if upload.changes.source is None:
433 changes = upload.changes.changes
434 source = upload.changes.source
436 dsc_fn = source._dsc_file.filename
438 check_fields_for_valid_utf8(dsc_fn, control)
441 if not re_field_package.match(control['Source']):
442 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
443 if control['Source'] != changes['Source']:
444 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
445 if control['Version'] != changes['Version']:
446 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
449 self.check_filename(control, dsc_fn, re_file_dsc)
450 for f in source.files.itervalues():
451 self.check_filename(control, f.filename, re_file_source)
453 # check dependency field syntax
454 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
455 value = control.get(field)
456 if value is not None:
457 if value.strip() == '':
458 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
460 apt_pkg.parse_src_depends(value)
461 except Exception as e:
462 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
464 rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
466 raise Reject("\n".join(rejects))
470 class SingleDistributionCheck(Check):
471 """Check that the .changes targets only a single distribution."""
472 def check(self, upload):
473 if len(upload.changes.distributions) != 1:
474 raise Reject("Only uploads to a single distribution are allowed.")
476 class ACLCheck(Check):
477 """Check the uploader is allowed to upload the packages in .changes"""
479 def _does_hijack(self, session, upload, suite):
480 # Try to catch hijacks.
481 # This doesn't work correctly. Uploads to experimental can still
482 # "hijack" binaries from unstable. Also one can hijack packages
483 # via buildds (but people who try this should not be DMs).
484 for binary_name in upload.changes.binary_names:
485 binaries = session.query(DBBinary).join(DBBinary.source) \
486 .filter(DBBinary.suites.contains(suite)) \
487 .filter(DBBinary.package == binary_name)
488 for binary in binaries:
489 if binary.source.source != upload.changes.changes['Source']:
490 return True, binary.package, binary.source.source
491 return False, None, None
493 def _check_acl(self, session, upload, acl):
494 source_name = upload.changes.source_name
496 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
498 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
501 if not acl.allow_new:
503 return False, "NEW uploads are not allowed"
504 for f in upload.changes.files.itervalues():
505 if f.section == 'byhand' or f.section.startswith("raw-"):
506 return False, "BYHAND uploads are not allowed"
507 if not acl.allow_source and upload.changes.source is not None:
508 return False, "sourceful uploads are not allowed"
509 binaries = upload.changes.binaries
510 if len(binaries) != 0:
511 if not acl.allow_binary:
512 return False, "binary uploads are not allowed"
513 if upload.changes.source is None and not acl.allow_binary_only:
514 return False, "binary-only uploads are not allowed"
515 if not acl.allow_binary_all:
516 uploaded_arches = set(upload.changes.architectures)
517 uploaded_arches.discard('source')
518 allowed_arches = set(a.arch_string for a in acl.architectures)
519 forbidden_arches = uploaded_arches - allowed_arches
520 if len(forbidden_arches) != 0:
521 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
522 if not acl.allow_hijack:
523 for suite in upload.final_suites:
524 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
526 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
528 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
529 if acl.allow_per_source:
530 if acl_per_source is None:
531 return False, "not allowed to upload source package '{0}'".format(source_name)
532 if acl.deny_per_source and acl_per_source is not None:
533 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
537 def check(self, upload):
538 session = upload.session
539 fingerprint = upload.fingerprint
540 keyring = fingerprint.keyring
543 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
544 if not keyring.active:
545 raise Reject('Keyring {0} is not active'.format(keyring.name))
547 acl = fingerprint.acl or keyring.acl
549 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
550 result, reason = self._check_acl(session, upload, acl)
552 raise RejectACL(acl, reason)
554 for acl in session.query(ACL).filter_by(is_global=True):
555 result, reason = self._check_acl(session, upload, acl)
557 raise RejectACL(acl, reason)
561 def per_suite_check(self, upload, suite):
566 result, reason = self._check_acl(upload.session, upload, acl)
569 accept = accept or result
571 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
574 class TransitionCheck(Check):
575 """check for a transition"""
576 def check(self, upload):
577 if 'source' not in upload.changes.architectures:
580 transitions = self.get_transitions()
581 if transitions is None:
584 session = upload.session
586 control = upload.changes.changes
587 source = re_field_source.match(control['Source']).group('package')
589 for trans in transitions:
590 t = transitions[trans]
591 transition_source = t["source"]
594 # Will be None if nothing is in testing.
595 current = get_source_in_suite(transition_source, "testing", session)
596 if current is not None:
597 compare = apt_pkg.version_compare(current.version, expected)
599 if current is None or compare < 0:
600 # This is still valid, the current version in testing is older than
601 # the new version we wait for, or there is none in testing yet
603 # Check if the source we look at is affected by this.
604 if source in t['packages']:
605 # The source is affected, lets reject it.
607 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
609 if current is not None:
610 currentlymsg = "at version {0}".format(current.version)
612 currentlymsg = "not present in testing"
614 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
616 rejectmsg += "\n".join(textwrap.wrap("""Your package
617 is part of a testing transition designed to get {0} migrated (it is
618 currently {1}, we need version {2}). This transition is managed by the
619 Release Team, and {3} is the Release-Team member responsible for it.
620 Please mail debian-release@lists.debian.org or contact {3} directly if you
621 need further assistance. You might want to upload to experimental until this
622 transition is done.""".format(transition_source, currentlymsg, expected,t["rm"])))
624 raise Reject(rejectmsg)
628 def get_transitions(self):
630 path = cnf.get('Dinstall::ReleaseTransitions', '')
631 if path == '' or not os.path.exists(path):
634 contents = file(path, 'r').read()
636 transitions = yaml.safe_load(contents)
638 except yaml.YAMLError as msg:
639 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
643 class NoSourceOnlyCheck(Check):
644 def is_source_only_upload(self, upload):
645 changes = upload.changes
646 if changes.source is not None and len(changes.binaries) == 0:
650 """Check for source-only upload
652 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
653 set. Otherwise they are rejected.
655 Source-only uploads are only accepted for source packages having a
656 Package-List field that also lists architectures per package. This
657 check can be disabled via
658 Dinstall::AllowSourceOnlyUploadsWithoutPackageList.
660 Source-only uploads to NEW are only allowed if
661 Dinstall::AllowSourceOnlyNew is set.
663 Uploads not including architecture-independent packages are only
664 allowed if Dinstall::AllowNoArchIndepUploads is set.
667 def check(self, upload):
668 if not self.is_source_only_upload(upload):
671 allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads')
672 allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList')
673 allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew')
674 allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads')
675 changes = upload.changes
677 if not allow_source_only_uploads:
678 raise Reject('Source-only uploads are not allowed.')
679 if not allow_source_only_uploads_without_package_list \
680 and changes.source.package_list.fallback:
681 raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.')
682 if not allow_source_only_new and upload.new:
683 raise Reject('Source-only uploads to NEW are not allowed.')
685 if not allow_no_arch_indep_uploads \
686 and 'all' not in changes.architectures \
687 and changes.source.package_list.has_arch_indep_packages():
688 raise Reject('Uploads not including architecture-independent packages are not allowed.')
692 class LintianCheck(Check):
693 """Check package using lintian"""
694 def check(self, upload):
695 changes = upload.changes
697 # Only check sourceful uploads.
698 if changes.source is None:
700 # Only check uploads to unstable or experimental.
701 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
705 if 'Dinstall::LintianTags' not in cnf:
707 tagfile = cnf['Dinstall::LintianTags']
709 with open(tagfile, 'r') as sourcefile:
710 sourcecontent = sourcefile.read()
712 lintiantags = yaml.safe_load(sourcecontent)['lintian']
713 except yaml.YAMLError as msg:
714 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
716 fd, temp_filename = utils.temp_filename(mode=0o644)
717 temptagfile = os.fdopen(fd, 'w')
718 for tags in lintiantags.itervalues():
720 print >>temptagfile, tag
723 changespath = os.path.join(upload.directory, changes.filename)
728 user = cnf.get('Dinstall::UnprivUser') or None
730 cmd.extend(['sudo', '-H', '-u', user])
732 cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
733 output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT)
734 except subprocess.CalledProcessError as e:
735 result = e.returncode
738 os.unlink(temp_filename)
741 utils.warn("lintian failed for %s [return code: %s]." % \
742 (changespath, result))
743 utils.warn(utils.prefix_multi_line_string(output, \
744 " [possible output:] "))
746 parsed_tags = lintian.parse_lintian_output(output)
747 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
748 if len(rejects) != 0:
749 raise Reject('\n'.join(rejects))
753 class SourceFormatCheck(Check):
754 """Check source format is allowed in the target suite"""
755 def per_suite_check(self, upload, suite):
756 source = upload.changes.source
757 session = upload.session
761 source_format = source.dsc['Format']
762 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
763 if query.first() is None:
764 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
766 class SuiteArchitectureCheck(Check):
767 def per_suite_check(self, upload, suite):
768 session = upload.session
769 for arch in upload.changes.architectures:
770 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
771 if query.first() is None:
772 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
776 class VersionCheck(Check):
777 """Check version constraints"""
778 def _highest_source_version(self, session, source_name, suite):
779 db_source = session.query(DBSource).filter_by(source=source_name) \
780 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
781 if db_source is None:
784 return db_source.version
786 def _highest_binary_version(self, session, binary_name, suite, architecture):
787 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
788 .filter(DBBinary.suites.contains(suite)) \
789 .join(DBBinary.architecture) \
790 .filter(Architecture.arch_string.in_(['all', architecture])) \
791 .order_by(DBBinary.version.desc()).first()
792 if db_binary is None:
795 return db_binary.version
797 def _version_checks(self, upload, suite, other_suite, op, op_name):
798 session = upload.session
800 if upload.changes.source is not None:
801 source_name = upload.changes.source.dsc['Source']
802 source_version = upload.changes.source.dsc['Version']
803 v = self._highest_source_version(session, source_name, other_suite)
804 if v is not None and not op(version_compare(source_version, v)):
805 raise Reject("Version check failed:\n"
806 "Your upload included the source package {0}, version {1},\n"
807 "however {3} already has version {2}.\n"
808 "Uploads to {5} must have a {4} version than present in {3}."
809 .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
811 for binary in upload.changes.binaries:
812 binary_name = binary.control['Package']
813 binary_version = binary.control['Version']
814 architecture = binary.control['Architecture']
815 v = self._highest_binary_version(session, binary_name, other_suite, architecture)
816 if v is not None and not op(version_compare(binary_version, v)):
817 raise Reject("Version check failed:\n"
818 "Your upload included the binary package {0}, version {1}, for {2},\n"
819 "however {4} already has version {3}.\n"
820 "Uploads to {6} must have a {5} version than present in {4}."
821 .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
823 def per_suite_check(self, upload, suite):
824 session = upload.session
826 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
827 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
828 must_be_newer_than = [ vc.reference for vc in vc_newer ]
829 # Must be newer than old versions in `suite`
830 must_be_newer_than.append(suite)
832 for s in must_be_newer_than:
833 self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
835 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
836 must_be_older_than = [ vc.reference for vc in vc_older ]
838 for s in must_be_older_than:
839 self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')