1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the L{Check} class for the interface.
26 from daklib.config import Config
27 import daklib.daksubprocess
28 from daklib.dbconn import *
29 import daklib.dbconn as dbconn
30 from daklib.regexes import *
31 from daklib.textutils import fix_maintainer, ParseMaintError
32 import daklib.lintian as lintian
33 import daklib.utils as utils
38 from apt_pkg import version_compare
47 def check_fields_for_valid_utf8(filename, control):
48 """Check all fields of a control file for valid UTF-8"""
49 for field in control.keys():
52 control[field].decode('utf-8')
53 except UnicodeDecodeError:
54 raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field))
56 class Reject(Exception):
57 """exception raised by failing checks"""
60 class RejectExternalFilesMismatch(Reject):
61 """exception raised by failing the external hashes check"""
64 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
66 class RejectACL(Reject):
67 """exception raise by failing ACL checks"""
68 def __init__(self, acl, reason):
73 return "ACL {0}: {1}".format(self.acl.name, self.reason)
76 """base class for checks
78 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
79 raise a L{daklib.checks.Reject} exception including a human-readable
80 description why the upload should be rejected.
82 def check(self, upload):
85 @type upload: L{daklib.archive.ArchiveUpload}
86 @param upload: upload to check
88 @raise daklib.checks.Reject: upload should be rejected
91 def per_suite_check(self, upload, suite):
92 """do per-suite checks
94 @type upload: L{daklib.archive.ArchiveUpload}
95 @param upload: upload to check
97 @type suite: L{daklib.dbconn.Suite}
98 @param suite: suite to check
100 @raise daklib.checks.Reject: upload should be rejected
105 """allow to force ignore failing test
107 C{True} if it is acceptable to force ignoring a failing test,
112 class SignatureAndHashesCheck(Check):
113 def check_replay(self, upload):
114 # Use private session as we want to remember having seen the .changes
116 session = upload.session
117 history = SignatureHistory.from_signed_file(upload.changes)
118 r = history.query(session)
120 raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen))
123 """Check signature of changes and dsc file (if included in upload)
125 Make sure the signature is valid and done by a known user.
127 def check(self, upload):
128 changes = upload.changes
129 if not changes.valid_signature:
130 raise Reject("Signature for .changes not valid.")
131 self.check_replay(upload)
132 self._check_hashes(upload, changes.filename, changes.files.itervalues())
136 source = changes.source
137 except Exception as e:
138 raise Reject("Invalid dsc file: {0}".format(e))
139 if source is not None:
140 if not source.valid_signature:
141 raise Reject("Signature for .dsc not valid.")
142 if source.primary_fingerprint != changes.primary_fingerprint:
143 raise Reject(".changes and .dsc not signed by the same key.")
144 self._check_hashes(upload, source.filename, source.files.itervalues())
146 if upload.fingerprint is None or upload.fingerprint.uid is None:
147 raise Reject(".changes signed by unknown key.")
149 """Make sure hashes match existing files
151 @type upload: L{daklib.archive.ArchiveUpload}
152 @param upload: upload we are processing
155 @param filename: name of the file the expected hash values are taken from
157 @type files: sequence of L{daklib.upload.HashedFile}
158 @param files: files to check the hashes for
160 def _check_hashes(self, upload, filename, files):
163 f.check(upload.directory)
164 except daklib.upload.FileDoesNotExist as e:
165 raise Reject('{0}: {1}\n'
166 'Perhaps you need to include the file in your upload?'
167 .format(filename, unicode(e)))
168 except daklib.upload.UploadException as e:
169 raise Reject('{0}: {1}'.format(filename, unicode(e)))
171 class SignatureTimestampCheck(Check):
172 """Check timestamp of .changes signature"""
173 def check(self, upload):
174 changes = upload.changes
176 now = datetime.datetime.utcnow()
177 timestamp = changes.signature_timestamp
178 age = now - timestamp
180 age_max = datetime.timedelta(days=365)
181 age_min = datetime.timedelta(days=-7)
184 raise Reject('{0}: Signature from {1} is too old (maximum age is {2} days)'.format(changes.filename, timestamp, age_max.days))
186 raise Reject('{0}: Signature from {1} is too far in the future (tolerance is {2} days)'.format(changes.filename, timestamp, abs(age_min.days)))
190 class ChangesCheck(Check):
191 """Check changes file for syntax errors."""
192 def check(self, upload):
193 changes = upload.changes
194 control = changes.changes
195 fn = changes.filename
197 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
198 if field not in control:
199 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
201 check_fields_for_valid_utf8(fn, control)
203 source_match = re_field_source.match(control['Source'])
205 raise Reject('{0}: Invalid Source field'.format(fn))
206 version_match = re_field_version.match(control['Version'])
207 if not version_match:
208 raise Reject('{0}: Invalid Version field'.format(fn))
209 version_without_epoch = version_match.group('without_epoch')
211 match = re_file_changes.match(fn)
213 raise Reject('{0}: Does not match re_file_changes'.format(fn))
214 if match.group('package') != source_match.group('package'):
215 raise Reject('{0}: Filename does not match Source field'.format(fn))
216 if match.group('version') != version_without_epoch:
217 raise Reject('{0}: Filename does not match Version field'.format(fn))
219 for bn in changes.binary_names:
220 if not re_field_package.match(bn):
221 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
223 if 'source' in changes.architectures and changes.source is None:
224 raise Reject("Changes has architecture source, but no source found.")
225 if changes.source is not None and 'source' not in changes.architectures:
226 raise Reject("Upload includes source, but changes does not say so.")
229 fix_maintainer(changes.changes['Maintainer'])
230 except ParseMaintError as e:
231 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
234 changed_by = changes.changes.get('Changed-By')
235 if changed_by is not None:
236 fix_maintainer(changed_by)
237 except ParseMaintError as e:
238 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
240 if len(changes.files) == 0:
241 raise Reject("Changes includes no files.")
243 for bugnum in changes.closed_bugs:
244 if not re_isanum.match(bugnum):
245 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
249 class ExternalHashesCheck(Check):
250 """Checks hashes in .changes and .dsc against an external database."""
251 def check_single(self, session, f):
252 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
253 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
258 if ext_size != f.size:
259 raise RejectExternalFilesMismatch(f.filename, 'size', f.size, ext_size)
261 if ext_md5sum != f.md5sum:
262 raise RejectExternalFilesMismatch(f.filename, 'md5sum', f.md5sum, ext_md5sum)
264 if ext_sha1sum != f.sha1sum:
265 raise RejectExternalFilesMismatch(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
267 if ext_sha256sum != f.sha256sum:
268 raise RejectExternalFilesMismatch(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
270 def check(self, upload):
273 if not cnf.use_extfiles:
276 session = upload.session
277 changes = upload.changes
279 for f in changes.files.itervalues():
280 self.check_single(session, f)
281 source = changes.source
282 if source is not None:
283 for f in source.files.itervalues():
284 self.check_single(session, f)
286 class BinaryCheck(Check):
287 """Check binary packages for syntax errors."""
288 def check(self, upload):
289 debug_deb_name_postfix = "-dbgsym"
290 # XXX: Handle dynamic debug section name here
292 for binary in upload.changes.binaries:
293 self.check_binary(upload, binary)
295 binaries = {binary.control['Package']: binary
296 for binary in upload.changes.binaries}
298 for name, binary in binaries.items():
299 if name in upload.changes.binary_names:
300 # Package is listed in Binary field. Everything is good.
302 elif daklib.utils.is_in_debug_section(binary.control):
303 # If we have a binary package in the debug section, we
304 # can allow it to not be present in the Binary field
305 # in the .changes file, so long as its name (without
306 # -dbgsym) is present in the Binary list.
307 if not name.endswith(debug_deb_name_postfix):
308 raise Reject('Package {0} is in the debug section, but '
309 'does not end in {1}.'.format(name, debug_deb_name_postfix))
311 # Right, so, it's named properly, let's check that
312 # the corresponding package is in the Binary list
313 origin_package_name = name[:-len(debug_deb_name_postfix)]
314 if origin_package_name not in upload.changes.binary_names:
316 "Debug package {debug}'s corresponding binary package "
317 "{origin} is not present in the Binary field.".format(
318 debug=name, origin=origin_package_name))
320 # Someone was a nasty little hacker and put a package
321 # into the .changes that isn't in debian/control. Bad,
323 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(name))
327 def check_binary(self, upload, binary):
328 fn = binary.hashed_file.filename
329 control = binary.control
331 for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'):
332 if field not in control:
333 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
335 check_fields_for_valid_utf8(fn, control)
339 package = control['Package']
340 if not re_field_package.match(package):
341 raise Reject('{0}: Invalid Package field'.format(fn))
343 version = control['Version']
344 version_match = re_field_version.match(version)
345 if not version_match:
346 raise Reject('{0}: Invalid Version field'.format(fn))
347 version_without_epoch = version_match.group('without_epoch')
349 architecture = control['Architecture']
350 if architecture not in upload.changes.architectures:
351 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
352 if architecture == 'source':
353 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
355 source = control.get('Source')
356 if source is not None and not re_field_source.match(source):
357 raise Reject('{0}: Invalid Source field'.format(fn))
361 match = re_file_binary.match(fn)
362 if package != match.group('package'):
363 raise Reject('{0}: filename does not match Package field'.format(fn))
364 if version_without_epoch != match.group('version'):
365 raise Reject('{0}: filename does not match Version field'.format(fn))
366 if architecture != match.group('architecture'):
367 raise Reject('{0}: filename does not match Architecture field'.format(fn))
369 # check dependency field syntax
371 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
372 'Provides', 'Recommends', 'Replaces', 'Suggests'):
373 value = control.get(field)
374 if value is not None:
375 if value.strip() == '':
376 raise Reject('{0}: empty {1} field'.format(fn, field))
378 apt_pkg.parse_depends(value)
380 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
382 for field in ('Built-Using',):
383 value = control.get(field)
384 if value is not None:
385 if value.strip() == '':
386 raise Reject('{0}: empty {1} field'.format(fn, field))
388 apt_pkg.parse_src_depends(value)
390 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
392 # "Multi-Arch: no" breaks wanna-build, #768353
393 multi_arch = control.get("Multi-Arch")
394 if multi_arch == 'no':
395 raise Reject('{0}: Multi-Arch: no support in Debian is broken (#768353)'.format(fn))
397 class BinaryTimestampCheck(Check):
398 """check timestamps of files in binary packages
400 Files in the near future cause ugly warnings and extreme time travel
401 can cause errors on extraction.
403 def check(self, upload):
405 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
406 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
408 class TarTime(object):
410 self.future_files = dict()
411 self.past_files = dict()
412 def callback(self, member, data):
413 if member.mtime > future_cutoff:
414 self.future_files[member.name] = member.mtime
415 elif member.mtime < past_cutoff:
416 self.past_files[member.name] = member.mtime
418 def format_reason(filename, direction, files):
419 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
420 for fn, ts in files.iteritems():
421 reason += " {0} ({1})".format(fn, time.ctime(ts))
424 for binary in upload.changes.binaries:
425 filename = binary.hashed_file.filename
426 path = os.path.join(upload.directory, filename)
427 deb = apt_inst.DebFile(path)
429 deb.control.go(tar.callback)
431 raise Reject(format_reason(filename, 'future', tar.future_files))
433 raise Reject(format_reason(filename, 'past', tar.past_files))
435 class SourceCheck(Check):
436 """Check source package for syntax errors."""
437 def check_filename(self, control, filename, regex):
438 # In case we have an .orig.tar.*, we have to strip the Debian revison
439 # from the version number. So handle this special case first.
441 match = re_file_orig.match(filename)
444 match = regex.match(filename)
447 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
448 if match.group('package') != control['Source']:
449 raise Reject('{0}: filename does not match Source field'.format(filename))
451 version = control['Version']
453 upstream_match = re_field_version_upstream.match(version)
454 if not upstream_match:
455 raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version))
456 version = upstream_match.group('upstream')
457 version_match = re_field_version.match(version)
458 version_without_epoch = version_match.group('without_epoch')
459 if match.group('version') != version_without_epoch:
460 raise Reject('{0}: filename does not match Version field'.format(filename))
462 def check(self, upload):
463 if upload.changes.source is None:
466 changes = upload.changes.changes
467 source = upload.changes.source
469 dsc_fn = source._dsc_file.filename
471 check_fields_for_valid_utf8(dsc_fn, control)
474 if not re_field_package.match(control['Source']):
475 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
476 if control['Source'] != changes['Source']:
477 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
478 if control['Version'] != changes['Version']:
479 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
482 self.check_filename(control, dsc_fn, re_file_dsc)
483 for f in source.files.itervalues():
484 self.check_filename(control, f.filename, re_file_source)
486 # check dependency field syntax
487 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
488 value = control.get(field)
489 if value is not None:
490 if value.strip() == '':
491 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
493 apt_pkg.parse_src_depends(value)
494 except Exception as e:
495 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
497 rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
499 raise Reject("\n".join(rejects))
503 class SingleDistributionCheck(Check):
504 """Check that the .changes targets only a single distribution."""
505 def check(self, upload):
506 if len(upload.changes.distributions) != 1:
507 raise Reject("Only uploads to a single distribution are allowed.")
509 class ACLCheck(Check):
510 """Check the uploader is allowed to upload the packages in .changes"""
512 def _does_hijack(self, session, upload, suite):
513 # Try to catch hijacks.
514 # This doesn't work correctly. Uploads to experimental can still
515 # "hijack" binaries from unstable. Also one can hijack packages
516 # via buildds (but people who try this should not be DMs).
517 for binary_name in upload.changes.binary_names:
518 binaries = session.query(DBBinary).join(DBBinary.source) \
519 .filter(DBBinary.suites.contains(suite)) \
520 .filter(DBBinary.package == binary_name)
521 for binary in binaries:
522 if binary.source.source != upload.changes.changes['Source']:
523 return True, binary.package, binary.source.source
524 return False, None, None
526 def _check_acl(self, session, upload, acl):
527 source_name = upload.changes.source_name
529 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
531 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
534 if not acl.allow_new:
536 return False, "NEW uploads are not allowed"
537 for f in upload.changes.files.itervalues():
538 if f.section == 'byhand' or f.section.startswith("raw-"):
539 return False, "BYHAND uploads are not allowed"
540 if not acl.allow_source and upload.changes.source is not None:
541 return False, "sourceful uploads are not allowed"
542 binaries = upload.changes.binaries
543 if len(binaries) != 0:
544 if not acl.allow_binary:
545 return False, "binary uploads are not allowed"
546 if upload.changes.source is None and not acl.allow_binary_only:
547 return False, "binary-only uploads are not allowed"
548 if not acl.allow_binary_all:
549 uploaded_arches = set(upload.changes.architectures)
550 uploaded_arches.discard('source')
551 allowed_arches = set(a.arch_string for a in acl.architectures)
552 forbidden_arches = uploaded_arches - allowed_arches
553 if len(forbidden_arches) != 0:
554 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
555 if not acl.allow_hijack:
556 for suite in upload.final_suites:
557 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
559 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
561 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
562 if acl.allow_per_source:
563 if acl_per_source is None:
564 return False, "not allowed to upload source package '{0}'".format(source_name)
565 if acl.deny_per_source and acl_per_source is not None:
566 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
570 def check(self, upload):
571 session = upload.session
572 fingerprint = upload.fingerprint
573 keyring = fingerprint.keyring
576 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
577 if not keyring.active:
578 raise Reject('Keyring {0} is not active'.format(keyring.name))
580 acl = fingerprint.acl or keyring.acl
582 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
583 result, reason = self._check_acl(session, upload, acl)
585 raise RejectACL(acl, reason)
587 for acl in session.query(ACL).filter_by(is_global=True):
588 result, reason = self._check_acl(session, upload, acl)
590 raise RejectACL(acl, reason)
594 def per_suite_check(self, upload, suite):
599 result, reason = self._check_acl(upload.session, upload, acl)
602 accept = accept or result
604 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
607 class TransitionCheck(Check):
608 """check for a transition"""
609 def check(self, upload):
610 if 'source' not in upload.changes.architectures:
613 transitions = self.get_transitions()
614 if transitions is None:
617 session = upload.session
619 control = upload.changes.changes
620 source = re_field_source.match(control['Source']).group('package')
622 for trans in transitions:
623 t = transitions[trans]
624 transition_source = t["source"]
627 # Will be None if nothing is in testing.
628 current = get_source_in_suite(transition_source, "testing", session)
629 if current is not None:
630 compare = apt_pkg.version_compare(current.version, expected)
632 if current is None or compare < 0:
633 # This is still valid, the current version in testing is older than
634 # the new version we wait for, or there is none in testing yet
636 # Check if the source we look at is affected by this.
637 if source in t['packages']:
638 # The source is affected, lets reject it.
640 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
642 if current is not None:
643 currentlymsg = "at version {0}".format(current.version)
645 currentlymsg = "not present in testing"
647 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
649 rejectmsg += "\n".join(textwrap.wrap("""Your package
650 is part of a testing transition designed to get {0} migrated (it is
651 currently {1}, we need version {2}). This transition is managed by the
652 Release Team, and {3} is the Release-Team member responsible for it.
653 Please mail debian-release@lists.debian.org or contact {3} directly if you
654 need further assistance. You might want to upload to experimental until this
655 transition is done.""".format(transition_source, currentlymsg, expected,t["rm"])))
657 raise Reject(rejectmsg)
661 def get_transitions(self):
663 path = cnf.get('Dinstall::ReleaseTransitions', '')
664 if path == '' or not os.path.exists(path):
667 contents = file(path, 'r').read()
669 transitions = yaml.safe_load(contents)
671 except yaml.YAMLError as msg:
672 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
676 class NoSourceOnlyCheck(Check):
677 def is_source_only_upload(self, upload):
678 changes = upload.changes
679 if changes.source is not None and len(changes.binaries) == 0:
683 """Check for source-only upload
685 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
686 set. Otherwise they are rejected.
688 Source-only uploads are only accepted for source packages having a
689 Package-List field that also lists architectures per package. This
690 check can be disabled via
691 Dinstall::AllowSourceOnlyUploadsWithoutPackageList.
693 Source-only uploads to NEW are only allowed if
694 Dinstall::AllowSourceOnlyNew is set.
696 Uploads not including architecture-independent packages are only
697 allowed if Dinstall::AllowNoArchIndepUploads is set.
700 def check(self, upload):
701 if not self.is_source_only_upload(upload):
704 allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads')
705 allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList')
706 allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew')
707 allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads')
708 changes = upload.changes
710 if not allow_source_only_uploads:
711 raise Reject('Source-only uploads are not allowed.')
712 if not allow_source_only_uploads_without_package_list \
713 and changes.source.package_list.fallback:
714 raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.')
715 if not allow_source_only_new and upload.new:
716 raise Reject('Source-only uploads to NEW are not allowed.')
718 if not allow_no_arch_indep_uploads \
719 and 'all' not in changes.architectures \
720 and 'experimental' not in changes.distributions \
721 and 'unstable' not in changes.distributions \
722 and changes.source.package_list.has_arch_indep_packages():
723 raise Reject('Uploads not including architecture-independent packages are not allowed.')
727 class LintianCheck(Check):
728 """Check package using lintian"""
729 def check(self, upload):
730 changes = upload.changes
732 # Only check sourceful uploads.
733 if changes.source is None:
735 # Only check uploads to unstable or experimental.
736 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
740 if 'Dinstall::LintianTags' not in cnf:
742 tagfile = cnf['Dinstall::LintianTags']
744 with open(tagfile, 'r') as sourcefile:
745 sourcecontent = sourcefile.read()
747 lintiantags = yaml.safe_load(sourcecontent)['lintian']
748 except yaml.YAMLError as msg:
749 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
751 fd, temp_filename = utils.temp_filename(mode=0o644)
752 temptagfile = os.fdopen(fd, 'w')
753 for tags in lintiantags.itervalues():
755 print >>temptagfile, tag
758 changespath = os.path.join(upload.directory, changes.filename)
763 user = cnf.get('Dinstall::UnprivUser') or None
765 cmd.extend(['sudo', '-H', '-u', user])
767 cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
768 output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT)
769 except subprocess.CalledProcessError as e:
770 result = e.returncode
773 os.unlink(temp_filename)
776 utils.warn("lintian failed for %s [return code: %s]." % \
777 (changespath, result))
778 utils.warn(utils.prefix_multi_line_string(output, \
779 " [possible output:] "))
781 parsed_tags = lintian.parse_lintian_output(output)
782 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
783 if len(rejects) != 0:
784 raise Reject('\n'.join(rejects))
788 class SourceFormatCheck(Check):
789 """Check source format is allowed in the target suite"""
790 def per_suite_check(self, upload, suite):
791 source = upload.changes.source
792 session = upload.session
796 source_format = source.dsc['Format']
797 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
798 if query.first() is None:
799 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
801 class SuiteArchitectureCheck(Check):
802 def per_suite_check(self, upload, suite):
803 session = upload.session
804 for arch in upload.changes.architectures:
805 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
806 if query.first() is None:
807 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
811 class VersionCheck(Check):
812 """Check version constraints"""
813 def _highest_source_version(self, session, source_name, suite):
814 db_source = session.query(DBSource).filter_by(source=source_name) \
815 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
816 if db_source is None:
819 return db_source.version
821 def _highest_binary_version(self, session, binary_name, suite, architecture):
822 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
823 .filter(DBBinary.suites.contains(suite)) \
824 .join(DBBinary.architecture) \
825 .filter(Architecture.arch_string.in_(['all', architecture])) \
826 .order_by(DBBinary.version.desc()).first()
827 if db_binary is None:
830 return db_binary.version
832 def _version_checks(self, upload, suite, other_suite, op, op_name):
833 session = upload.session
835 if upload.changes.source is not None:
836 source_name = upload.changes.source.dsc['Source']
837 source_version = upload.changes.source.dsc['Version']
838 v = self._highest_source_version(session, source_name, other_suite)
839 if v is not None and not op(version_compare(source_version, v)):
840 raise Reject("Version check failed:\n"
841 "Your upload included the source package {0}, version {1},\n"
842 "however {3} already has version {2}.\n"
843 "Uploads to {5} must have a {4} version than present in {3}."
844 .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
846 for binary in upload.changes.binaries:
847 binary_name = binary.control['Package']
848 binary_version = binary.control['Version']
849 architecture = binary.control['Architecture']
850 v = self._highest_binary_version(session, binary_name, other_suite, architecture)
851 if v is not None and not op(version_compare(binary_version, v)):
852 raise Reject("Version check failed:\n"
853 "Your upload included the binary package {0}, version {1}, for {2},\n"
854 "however {4} already has version {3}.\n"
855 "Uploads to {6} must have a {5} version than present in {4}."
856 .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
858 def per_suite_check(self, upload, suite):
859 session = upload.session
861 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
862 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
863 must_be_newer_than = [ vc.reference for vc in vc_newer ]
864 # Must be newer than old versions in `suite`
865 must_be_newer_than.append(suite)
867 for s in must_be_newer_than:
868 self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
870 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
871 must_be_older_than = [ vc.reference for vc in vc_older ]
873 for s in must_be_older_than:
874 self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')