1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
21 """module provided pre-acceptance tests
23 Please read the documentation for the L{Check} class for the interface.
26 from daklib.config import Config
27 import daklib.daksubprocess
28 from daklib.dbconn import *
29 import daklib.dbconn as dbconn
30 from daklib.regexes import *
31 from daklib.textutils import fix_maintainer, ParseMaintError
32 import daklib.lintian as lintian
33 import daklib.utils as utils
38 from apt_pkg import version_compare
47 def check_fields_for_valid_utf8(filename, control):
48 """Check all fields of a control file for valid UTF-8"""
49 for field in control.keys():
52 control[field].decode('utf-8')
53 except UnicodeDecodeError:
54 raise Reject('{0}: The {1} field is not valid UTF-8'.format(filename, field))
56 class Reject(Exception):
57 """exception raised by failing checks"""
60 class RejectExternalFilesMismatch(Reject):
61 """exception raised by failing the external hashes check"""
64 return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
66 class RejectACL(Reject):
67 """exception raise by failing ACL checks"""
68 def __init__(self, acl, reason):
73 return "ACL {0}: {1}".format(self.acl.name, self.reason)
76 """base class for checks
78 checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
79 raise a L{daklib.checks.Reject} exception including a human-readable
80 description why the upload should be rejected.
82 def check(self, upload):
85 @type upload: L{daklib.archive.ArchiveUpload}
86 @param upload: upload to check
88 @raise daklib.checks.Reject: upload should be rejected
91 def per_suite_check(self, upload, suite):
92 """do per-suite checks
94 @type upload: L{daklib.archive.ArchiveUpload}
95 @param upload: upload to check
97 @type suite: L{daklib.dbconn.Suite}
98 @param suite: suite to check
100 @raise daklib.checks.Reject: upload should be rejected
105 """allow to force ignore failing test
107 C{True} if it is acceptable to force ignoring a failing test,
112 class SignatureAndHashesCheck(Check):
113 def check_replay(self, upload):
114 # Use private session as we want to remember having seen the .changes
116 session = upload.session
117 history = SignatureHistory.from_signed_file(upload.changes)
118 r = history.query(session)
120 raise Reject('Signature for changes file was already seen at {0}.\nPlease refresh the signature of the changes file if you want to upload it again.'.format(r.seen))
123 """Check signature of changes and dsc file (if included in upload)
125 Make sure the signature is valid and done by a known user.
127 def check(self, upload):
128 changes = upload.changes
129 if not changes.valid_signature:
130 raise Reject("Signature for .changes not valid.")
131 self.check_replay(upload)
132 self._check_hashes(upload, changes.filename, changes.files.itervalues())
136 source = changes.source
137 except Exception as e:
138 raise Reject("Invalid dsc file: {0}".format(e))
139 if source is not None:
140 if not source.valid_signature:
141 raise Reject("Signature for .dsc not valid.")
142 if source.primary_fingerprint != changes.primary_fingerprint:
143 raise Reject(".changes and .dsc not signed by the same key.")
144 self._check_hashes(upload, source.filename, source.files.itervalues())
146 if upload.fingerprint is None or upload.fingerprint.uid is None:
147 raise Reject(".changes signed by unknown key.")
149 """Make sure hashes match existing files
151 @type upload: L{daklib.archive.ArchiveUpload}
152 @param upload: upload we are processing
155 @param filename: name of the file the expected hash values are taken from
157 @type files: sequence of L{daklib.upload.HashedFile}
158 @param files: files to check the hashes for
160 def _check_hashes(self, upload, filename, files):
163 f.check(upload.directory)
164 except daklib.upload.FileDoesNotExist as e:
165 raise Reject('{0}: {1}\n'
166 'Perhaps you need to include the file in your upload?'
167 .format(filename, unicode(e)))
168 except daklib.upload.UploadException as e:
169 raise Reject('{0}: {1}'.format(filename, unicode(e)))
171 class SignatureTimestampCheck(Check):
172 """Check timestamp of .changes signature"""
173 def check(self, upload):
174 changes = upload.changes
176 now = datetime.datetime.utcnow()
177 timestamp = changes.signature_timestamp
178 age = now - timestamp
180 age_max = datetime.timedelta(days=365)
181 age_min = datetime.timedelta(days=-7)
184 raise Reject('{0}: Signature from {1} is too old (maximum age is {2} days)'.format(changes.filename, timestamp, age_max.days))
186 raise Reject('{0}: Signature from {1} is too far in the future (tolerance is {2} days)'.format(changes.filename, timestamp, abs(age_min.days)))
190 class ChangesCheck(Check):
191 """Check changes file for syntax errors."""
192 def check(self, upload):
193 changes = upload.changes
194 control = changes.changes
195 fn = changes.filename
197 for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
198 if field not in control:
199 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
201 check_fields_for_valid_utf8(fn, control)
203 source_match = re_field_source.match(control['Source'])
205 raise Reject('{0}: Invalid Source field'.format(fn))
206 version_match = re_field_version.match(control['Version'])
207 if not version_match:
208 raise Reject('{0}: Invalid Version field'.format(fn))
209 version_without_epoch = version_match.group('without_epoch')
211 match = re_file_changes.match(fn)
213 raise Reject('{0}: Does not match re_file_changes'.format(fn))
214 if match.group('package') != source_match.group('package'):
215 raise Reject('{0}: Filename does not match Source field'.format(fn))
216 if match.group('version') != version_without_epoch:
217 raise Reject('{0}: Filename does not match Version field'.format(fn))
219 for bn in changes.binary_names:
220 if not re_field_package.match(bn):
221 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
223 if 'source' in changes.architectures and changes.source is None:
224 raise Reject("Changes has architecture source, but no source found.")
225 if changes.source is not None and 'source' not in changes.architectures:
226 raise Reject("Upload includes source, but changes does not say so.")
229 fix_maintainer(changes.changes['Maintainer'])
230 except ParseMaintError as e:
231 raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
234 changed_by = changes.changes.get('Changed-By')
235 if changed_by is not None:
236 fix_maintainer(changed_by)
237 except ParseMaintError as e:
238 raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
240 if len(changes.files) == 0:
241 raise Reject("Changes includes no files.")
243 for bugnum in changes.closed_bugs:
244 if not re_isanum.match(bugnum):
245 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
249 class ExternalHashesCheck(Check):
250 """Checks hashes in .changes and .dsc against an external database."""
251 def check_single(self, session, f):
252 q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
253 (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
258 if ext_size != f.size:
259 raise RejectExternalFilesMismatch(f.filename, 'size', f.size, ext_size)
261 if ext_md5sum != f.md5sum:
262 raise RejectExternalFilesMismatch(f.filename, 'md5sum', f.md5sum, ext_md5sum)
264 if ext_sha1sum != f.sha1sum:
265 raise RejectExternalFilesMismatch(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
267 if ext_sha256sum != f.sha256sum:
268 raise RejectExternalFilesMismatch(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
270 def check(self, upload):
273 if not cnf.use_extfiles:
276 session = upload.session
277 changes = upload.changes
279 for f in changes.files.itervalues():
280 self.check_single(session, f)
281 source = changes.source
282 if source is not None:
283 for f in source.files.itervalues():
284 self.check_single(session, f)
286 class BinaryCheck(Check):
287 """Check binary packages for syntax errors."""
288 def check(self, upload):
289 debug_deb_name_postfix = "-dbgsym"
290 debug_deb_section = "debug"
291 # XXX: Handle dynamic debug section name here
293 for binary in upload.changes.binaries:
294 self.check_binary(upload, binary)
296 binaries = {binary.control['Package']: binary
297 for binary in upload.changes.binaries}
299 for name, binary in binaries.items():
300 if binary.control['Section'] == debug_deb_section:
301 # If we have a Binary package in the Debug section, we
302 # can allow it to not be present in the Binary field
303 # in the .changes file, so long as its name (without
304 # -dbgsym) is present in the Binary list.
305 if not name.endswith(debug_deb_name_postfix):
306 raise Reject('Package {0} is in the Debug section, but '
307 'does not end in -dbgsym.'.format(name))
309 # Right, so, it's named properly, let's check that
310 # the corresponding package is in the Binary list
311 origin_package_name = name[:-len(debug_deb_name_postfix)]
312 if origin_package_name not in upload.changes.binary_names:
314 "Debug package {debug}'s corresponding binary package "
315 "{origin} is not present in the Binary field.".format(
316 debug=name, origin=origin_package_name))
318 # So, now we're sure the package is named correctly, and
319 # we have the other package. Lets let this slide through.
322 if name not in upload.changes.binary_names:
323 # Someone was a nasty little hacker and put a package
324 # into the .changes that isn't in debian/control. Bad,
326 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(name))
330 def check_binary(self, upload, binary):
331 fn = binary.hashed_file.filename
332 control = binary.control
334 for field in ('Package', 'Architecture', 'Version', 'Description', 'Section'):
335 if field not in control:
336 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
338 check_fields_for_valid_utf8(fn, control)
342 package = control['Package']
343 if not re_field_package.match(package):
344 raise Reject('{0}: Invalid Package field'.format(fn))
346 version = control['Version']
347 version_match = re_field_version.match(version)
348 if not version_match:
349 raise Reject('{0}: Invalid Version field'.format(fn))
350 version_without_epoch = version_match.group('without_epoch')
352 architecture = control['Architecture']
353 if architecture not in upload.changes.architectures:
354 raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
355 if architecture == 'source':
356 raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
358 source = control.get('Source')
359 if source is not None and not re_field_source.match(source):
360 raise Reject('{0}: Invalid Source field'.format(fn))
364 match = re_file_binary.match(fn)
365 if package != match.group('package'):
366 raise Reject('{0}: filename does not match Package field'.format(fn))
367 if version_without_epoch != match.group('version'):
368 raise Reject('{0}: filename does not match Version field'.format(fn))
369 if architecture != match.group('architecture'):
370 raise Reject('{0}: filename does not match Architecture field'.format(fn))
372 # check dependency field syntax
374 for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
375 'Provides', 'Recommends', 'Replaces', 'Suggests'):
376 value = control.get(field)
377 if value is not None:
378 if value.strip() == '':
379 raise Reject('{0}: empty {1} field'.format(fn, field))
381 apt_pkg.parse_depends(value)
383 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
385 for field in ('Built-Using',):
386 value = control.get(field)
387 if value is not None:
388 if value.strip() == '':
389 raise Reject('{0}: empty {1} field'.format(fn, field))
391 apt_pkg.parse_src_depends(value)
393 raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
395 # "Multi-Arch: no" breaks wanna-build, #768353
396 multi_arch = control.get("Multi-Arch")
397 if multi_arch == 'no':
398 raise Reject('{0}: Multi-Arch: no support in Debian is broken (#768353)'.format(fn))
400 class BinaryTimestampCheck(Check):
401 """check timestamps of files in binary packages
403 Files in the near future cause ugly warnings and extreme time travel
404 can cause errors on extraction.
406 def check(self, upload):
408 future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
409 past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))
411 class TarTime(object):
413 self.future_files = dict()
414 self.past_files = dict()
415 def callback(self, member, data):
416 if member.mtime > future_cutoff:
417 self.future_files[member.name] = member.mtime
418 elif member.mtime < past_cutoff:
419 self.past_files[member.name] = member.mtime
421 def format_reason(filename, direction, files):
422 reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
423 for fn, ts in files.iteritems():
424 reason += " {0} ({1})".format(fn, time.ctime(ts))
427 for binary in upload.changes.binaries:
428 filename = binary.hashed_file.filename
429 path = os.path.join(upload.directory, filename)
430 deb = apt_inst.DebFile(path)
432 deb.control.go(tar.callback)
434 raise Reject(format_reason(filename, 'future', tar.future_files))
436 raise Reject(format_reason(filename, 'past', tar.past_files))
438 class SourceCheck(Check):
439 """Check source package for syntax errors."""
440 def check_filename(self, control, filename, regex):
441 # In case we have an .orig.tar.*, we have to strip the Debian revison
442 # from the version number. So handle this special case first.
444 match = re_file_orig.match(filename)
447 match = regex.match(filename)
450 raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
451 if match.group('package') != control['Source']:
452 raise Reject('{0}: filename does not match Source field'.format(filename))
454 version = control['Version']
456 upstream_match = re_field_version_upstream.match(version)
457 if not upstream_match:
458 raise Reject('{0}: Source package includes upstream tarball, but {0} has no Debian revision.'.format(filename, version))
459 version = upstream_match.group('upstream')
460 version_match = re_field_version.match(version)
461 version_without_epoch = version_match.group('without_epoch')
462 if match.group('version') != version_without_epoch:
463 raise Reject('{0}: filename does not match Version field'.format(filename))
465 def check(self, upload):
466 if upload.changes.source is None:
469 changes = upload.changes.changes
470 source = upload.changes.source
472 dsc_fn = source._dsc_file.filename
474 check_fields_for_valid_utf8(dsc_fn, control)
477 if not re_field_package.match(control['Source']):
478 raise Reject('{0}: Invalid Source field'.format(dsc_fn))
479 if control['Source'] != changes['Source']:
480 raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
481 if control['Version'] != changes['Version']:
482 raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
485 self.check_filename(control, dsc_fn, re_file_dsc)
486 for f in source.files.itervalues():
487 self.check_filename(control, f.filename, re_file_source)
489 # check dependency field syntax
490 for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
491 value = control.get(field)
492 if value is not None:
493 if value.strip() == '':
494 raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
496 apt_pkg.parse_src_depends(value)
497 except Exception as e:
498 raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
500 rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
502 raise Reject("\n".join(rejects))
506 class SingleDistributionCheck(Check):
507 """Check that the .changes targets only a single distribution."""
508 def check(self, upload):
509 if len(upload.changes.distributions) != 1:
510 raise Reject("Only uploads to a single distribution are allowed.")
512 class ACLCheck(Check):
513 """Check the uploader is allowed to upload the packages in .changes"""
515 def _does_hijack(self, session, upload, suite):
516 # Try to catch hijacks.
517 # This doesn't work correctly. Uploads to experimental can still
518 # "hijack" binaries from unstable. Also one can hijack packages
519 # via buildds (but people who try this should not be DMs).
520 for binary_name in upload.changes.binary_names:
521 binaries = session.query(DBBinary).join(DBBinary.source) \
522 .filter(DBBinary.suites.contains(suite)) \
523 .filter(DBBinary.package == binary_name)
524 for binary in binaries:
525 if binary.source.source != upload.changes.changes['Source']:
526 return True, binary.package, binary.source.source
527 return False, None, None
529 def _check_acl(self, session, upload, acl):
530 source_name = upload.changes.source_name
532 if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
534 if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
537 if not acl.allow_new:
539 return False, "NEW uploads are not allowed"
540 for f in upload.changes.files.itervalues():
541 if f.section == 'byhand' or f.section.startswith("raw-"):
542 return False, "BYHAND uploads are not allowed"
543 if not acl.allow_source and upload.changes.source is not None:
544 return False, "sourceful uploads are not allowed"
545 binaries = upload.changes.binaries
546 if len(binaries) != 0:
547 if not acl.allow_binary:
548 return False, "binary uploads are not allowed"
549 if upload.changes.source is None and not acl.allow_binary_only:
550 return False, "binary-only uploads are not allowed"
551 if not acl.allow_binary_all:
552 uploaded_arches = set(upload.changes.architectures)
553 uploaded_arches.discard('source')
554 allowed_arches = set(a.arch_string for a in acl.architectures)
555 forbidden_arches = uploaded_arches - allowed_arches
556 if len(forbidden_arches) != 0:
557 return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
558 if not acl.allow_hijack:
559 for suite in upload.final_suites:
560 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
562 return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
564 acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
565 if acl.allow_per_source:
566 if acl_per_source is None:
567 return False, "not allowed to upload source package '{0}'".format(source_name)
568 if acl.deny_per_source and acl_per_source is not None:
569 return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
573 def check(self, upload):
574 session = upload.session
575 fingerprint = upload.fingerprint
576 keyring = fingerprint.keyring
579 raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
580 if not keyring.active:
581 raise Reject('Keyring {0} is not active'.format(keyring.name))
583 acl = fingerprint.acl or keyring.acl
585 raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
586 result, reason = self._check_acl(session, upload, acl)
588 raise RejectACL(acl, reason)
590 for acl in session.query(ACL).filter_by(is_global=True):
591 result, reason = self._check_acl(session, upload, acl)
593 raise RejectACL(acl, reason)
597 def per_suite_check(self, upload, suite):
602 result, reason = self._check_acl(upload.session, upload, acl)
605 accept = accept or result
607 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
610 class TransitionCheck(Check):
611 """check for a transition"""
612 def check(self, upload):
613 if 'source' not in upload.changes.architectures:
616 transitions = self.get_transitions()
617 if transitions is None:
620 session = upload.session
622 control = upload.changes.changes
623 source = re_field_source.match(control['Source']).group('package')
625 for trans in transitions:
626 t = transitions[trans]
627 transition_source = t["source"]
630 # Will be None if nothing is in testing.
631 current = get_source_in_suite(transition_source, "testing", session)
632 if current is not None:
633 compare = apt_pkg.version_compare(current.version, expected)
635 if current is None or compare < 0:
636 # This is still valid, the current version in testing is older than
637 # the new version we wait for, or there is none in testing yet
639 # Check if the source we look at is affected by this.
640 if source in t['packages']:
641 # The source is affected, lets reject it.
643 rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
645 if current is not None:
646 currentlymsg = "at version {0}".format(current.version)
648 currentlymsg = "not present in testing"
650 rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
652 rejectmsg += "\n".join(textwrap.wrap("""Your package
653 is part of a testing transition designed to get {0} migrated (it is
654 currently {1}, we need version {2}). This transition is managed by the
655 Release Team, and {3} is the Release-Team member responsible for it.
656 Please mail debian-release@lists.debian.org or contact {3} directly if you
657 need further assistance. You might want to upload to experimental until this
658 transition is done.""".format(transition_source, currentlymsg, expected,t["rm"])))
660 raise Reject(rejectmsg)
664 def get_transitions(self):
666 path = cnf.get('Dinstall::ReleaseTransitions', '')
667 if path == '' or not os.path.exists(path):
670 contents = file(path, 'r').read()
672 transitions = yaml.safe_load(contents)
674 except yaml.YAMLError as msg:
675 utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
679 class NoSourceOnlyCheck(Check):
680 def is_source_only_upload(self, upload):
681 changes = upload.changes
682 if changes.source is not None and len(changes.binaries) == 0:
686 """Check for source-only upload
688 Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
689 set. Otherwise they are rejected.
691 Source-only uploads are only accepted for source packages having a
692 Package-List field that also lists architectures per package. This
693 check can be disabled via
694 Dinstall::AllowSourceOnlyUploadsWithoutPackageList.
696 Source-only uploads to NEW are only allowed if
697 Dinstall::AllowSourceOnlyNew is set.
699 Uploads not including architecture-independent packages are only
700 allowed if Dinstall::AllowNoArchIndepUploads is set.
703 def check(self, upload):
704 if not self.is_source_only_upload(upload):
707 allow_source_only_uploads = Config().find_b('Dinstall::AllowSourceOnlyUploads')
708 allow_source_only_uploads_without_package_list = Config().find_b('Dinstall::AllowSourceOnlyUploadsWithoutPackageList')
709 allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew')
710 allow_no_arch_indep_uploads = Config().find_b('Dinstall::AllowNoArchIndepUploads')
711 changes = upload.changes
713 if not allow_source_only_uploads:
714 raise Reject('Source-only uploads are not allowed.')
715 if not allow_source_only_uploads_without_package_list \
716 and changes.source.package_list.fallback:
717 raise Reject('Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.')
718 if not allow_source_only_new and upload.new:
719 raise Reject('Source-only uploads to NEW are not allowed.')
721 if not allow_no_arch_indep_uploads \
722 and 'all' not in changes.architectures \
723 and 'experimental' not in changes.distributions \
724 and changes.source.package_list.has_arch_indep_packages():
725 raise Reject('Uploads not including architecture-independent packages are not allowed.')
729 class LintianCheck(Check):
730 """Check package using lintian"""
731 def check(self, upload):
732 changes = upload.changes
734 # Only check sourceful uploads.
735 if changes.source is None:
737 # Only check uploads to unstable or experimental.
738 if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
742 if 'Dinstall::LintianTags' not in cnf:
744 tagfile = cnf['Dinstall::LintianTags']
746 with open(tagfile, 'r') as sourcefile:
747 sourcecontent = sourcefile.read()
749 lintiantags = yaml.safe_load(sourcecontent)['lintian']
750 except yaml.YAMLError as msg:
751 raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
753 fd, temp_filename = utils.temp_filename(mode=0o644)
754 temptagfile = os.fdopen(fd, 'w')
755 for tags in lintiantags.itervalues():
757 print >>temptagfile, tag
760 changespath = os.path.join(upload.directory, changes.filename)
765 user = cnf.get('Dinstall::UnprivUser') or None
767 cmd.extend(['sudo', '-H', '-u', user])
769 cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
770 output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT)
771 except subprocess.CalledProcessError as e:
772 result = e.returncode
775 os.unlink(temp_filename)
778 utils.warn("lintian failed for %s [return code: %s]." % \
779 (changespath, result))
780 utils.warn(utils.prefix_multi_line_string(output, \
781 " [possible output:] "))
783 parsed_tags = lintian.parse_lintian_output(output)
784 rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
785 if len(rejects) != 0:
786 raise Reject('\n'.join(rejects))
790 class SourceFormatCheck(Check):
791 """Check source format is allowed in the target suite"""
792 def per_suite_check(self, upload, suite):
793 source = upload.changes.source
794 session = upload.session
798 source_format = source.dsc['Format']
799 query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
800 if query.first() is None:
801 raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
803 class SuiteArchitectureCheck(Check):
804 def per_suite_check(self, upload, suite):
805 session = upload.session
806 for arch in upload.changes.architectures:
807 query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
808 if query.first() is None:
809 raise Reject('Architecture {0} is not allowed in suite {1}'.format(arch, suite.suite_name))
813 class VersionCheck(Check):
814 """Check version constraints"""
815 def _highest_source_version(self, session, source_name, suite):
816 db_source = session.query(DBSource).filter_by(source=source_name) \
817 .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
818 if db_source is None:
821 return db_source.version
823 def _highest_binary_version(self, session, binary_name, suite, architecture):
824 db_binary = session.query(DBBinary).filter_by(package=binary_name) \
825 .filter(DBBinary.suites.contains(suite)) \
826 .join(DBBinary.architecture) \
827 .filter(Architecture.arch_string.in_(['all', architecture])) \
828 .order_by(DBBinary.version.desc()).first()
829 if db_binary is None:
832 return db_binary.version
834 def _version_checks(self, upload, suite, other_suite, op, op_name):
835 session = upload.session
837 if upload.changes.source is not None:
838 source_name = upload.changes.source.dsc['Source']
839 source_version = upload.changes.source.dsc['Version']
840 v = self._highest_source_version(session, source_name, other_suite)
841 if v is not None and not op(version_compare(source_version, v)):
842 raise Reject("Version check failed:\n"
843 "Your upload included the source package {0}, version {1},\n"
844 "however {3} already has version {2}.\n"
845 "Uploads to {5} must have a {4} version than present in {3}."
846 .format(source_name, source_version, v, other_suite.suite_name, op_name, suite.suite_name))
848 for binary in upload.changes.binaries:
849 binary_name = binary.control['Package']
850 binary_version = binary.control['Version']
851 architecture = binary.control['Architecture']
852 v = self._highest_binary_version(session, binary_name, other_suite, architecture)
853 if v is not None and not op(version_compare(binary_version, v)):
854 raise Reject("Version check failed:\n"
855 "Your upload included the binary package {0}, version {1}, for {2},\n"
856 "however {4} already has version {3}.\n"
857 "Uploads to {6} must have a {5} version than present in {4}."
858 .format(binary_name, binary_version, architecture, v, other_suite.suite_name, op_name, suite.suite_name))
860 def per_suite_check(self, upload, suite):
861 session = upload.session
863 vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
864 .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
865 must_be_newer_than = [ vc.reference for vc in vc_newer ]
866 # Must be newer than old versions in `suite`
867 must_be_newer_than.append(suite)
869 for s in must_be_newer_than:
870 self._version_checks(upload, suite, s, lambda result: result > 0, 'higher')
872 vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
873 must_be_older_than = [ vc.reference for vc in vc_older ]
875 for s in must_be_older_than:
876 self._version_checks(upload, suite, s, lambda result: result < 0, 'lower')