]> git.decadent.org.uk Git - dak.git/blob - daklib/checks.py
Merge remote-tracking branch 'origin/master'
[dak.git] / daklib / checks.py
1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
2 #
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
6 #
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
11 #
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 # GNU General Public License for more details.
16 #
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21 """module provided pre-acceptance tests
22
23 Please read the documentation for the L{Check} class for the interface.
24 """
25
26 from daklib.config import Config
27 from daklib.dbconn import *
28 import daklib.dbconn as dbconn
29 from daklib.regexes import *
30 from daklib.textutils import fix_maintainer, ParseMaintError
31 import daklib.lintian as lintian
32 import daklib.utils as utils
33 from daklib.upload import InvalidHashException
34
35 import apt_inst
36 import apt_pkg
37 from apt_pkg import version_compare
38 import errno
39 import os
40 import time
41 import yaml
42
43 # TODO: replace by subprocess
44 import commands
45
46 class Reject(Exception):
47     """exception raised by failing checks"""
48     pass
49
50 class RejectStupidMaintainerException(Exception):
51     """exception raised by failing the external hashes check"""
52
53     def __str__(self):
54         return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
55
56 class RejectACL(Reject):
57     """exception raise by failing ACL checks"""
58     def __init__(self, acl, reason):
59         self.acl = acl
60         self.reason = reason
61
62     def __str__(self):
63         return "ACL {0}: {1}".format(self.acl.name, self.reason)
64
65 class Check(object):
66     """base class for checks
67
68     checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
69     raise a L{daklib.checks.Reject} exception including a human-readable
70     description why the upload should be rejected.
71     """
72     def check(self, upload):
73         """do checks
74
75         @type  upload: L{daklib.archive.ArchiveUpload}
76         @param upload: upload to check
77
78         @raise daklib.checks.Reject: upload should be rejected
79         """
80         raise NotImplemented
81     def per_suite_check(self, upload, suite):
82         """do per-suite checks
83
84         @type  upload: L{daklib.archive.ArchiveUpload}
85         @param upload: upload to check
86
87         @type  suite: L{daklib.dbconn.Suite}
88         @param suite: suite to check
89
90         @raise daklib.checks.Reject: upload should be rejected
91         """
92         raise NotImplemented
93     @property
94     def forcable(self):
95         """allow to force ignore failing test
96
97         C{True} if it is acceptable to force ignoring a failing test,
98         C{False} otherwise
99         """
100         return False
101
102 class SignatureCheck(Check):
103     """Check signature of changes and dsc file (if included in upload)
104
105     Make sure the signature is valid and done by a known user.
106     """
107     def check(self, upload):
108         changes = upload.changes
109         if not changes.valid_signature:
110             raise Reject("Signature for .changes not valid.")
111         if changes.source is not None:
112             if not changes.source.valid_signature:
113                 raise Reject("Signature for .dsc not valid.")
114             if changes.source.primary_fingerprint != changes.primary_fingerprint:
115                 raise Reject(".changes and .dsc not signed by the same key.")
116         if upload.fingerprint is None or upload.fingerprint.uid is None:
117             raise Reject(".changes signed by unknown key.")
118
119 class ChangesCheck(Check):
120     """Check changes file for syntax errors."""
121     def check(self, upload):
122         changes = upload.changes
123         control = changes.changes
124         fn = changes.filename
125
126         for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
127             if field not in control:
128                 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
129
130         source_match = re_field_source.match(control['Source'])
131         if not source_match:
132             raise Reject('{0}: Invalid Source field'.format(fn))
133         version_match = re_field_version.match(control['Version'])
134         if not version_match:
135             raise Reject('{0}: Invalid Version field'.format(fn))
136         version_without_epoch = version_match.group('without_epoch')
137
138         match = re_file_changes.match(fn)
139         if not match:
140             raise Reject('{0}: Does not match re_file_changes'.format(fn))
141         if match.group('package') != source_match.group('package'):
142             raise Reject('{0}: Filename does not match Source field'.format(fn))
143         if match.group('version') != version_without_epoch:
144             raise Reject('{0}: Filename does not match Version field'.format(fn))
145
146         for bn in changes.binary_names:
147             if not re_field_package.match(bn):
148                 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
149
150         if 'source' in changes.architectures and changes.source is None:
151             raise Reject("Changes has architecture source, but no source found.")
152         if changes.source is not None and 'source' not in changes.architectures:
153             raise Reject("Upload includes source, but changes does not say so.")
154
155         try:
156             fix_maintainer(changes.changes['Maintainer'])
157         except ParseMaintError as e:
158             raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
159
160         try:
161             changed_by = changes.changes.get('Changed-By')
162             if changed_by is not None:
163                 fix_maintainer(changed_by)
164         except ParseMaintError as e:
165             raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
166
167         if len(changes.files) == 0:
168             raise Reject("Changes includes no files.")
169
170         for bugnum in changes.closed_bugs:
171             if not re_isanum.match(bugnum):
172                 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
173
174         return True
175
176 class HashesCheck(Check):
177     """Check hashes in .changes and .dsc are valid."""
178     def check(self, upload):
179         what = None
180         try:
181             changes = upload.changes
182             what = changes.filename
183             for f in changes.files.itervalues():
184                 f.check(upload.directory)
185             source = changes.source
186             if source is not None:
187                 what = source.filename
188                 for f in source.files.itervalues():
189                     f.check(upload.directory)
190         except IOError as e:
191             if e.errno == errno.ENOENT:
192                 raise Reject('{0} refers to non-existing file: {1}\n'
193                              'Perhaps you need to include it in your upload?'
194                              .format(what, os.path.basename(e.filename)))
195             raise
196         except InvalidHashException as e:
197             raise Reject('{0}: {1}'.format(what, unicode(e)))
198
199 class ExternalHashesCheck(Check):
200     """Checks hashes in .changes and .dsc against an external database."""
201     def check_single(self, session, f):
202         q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
203         (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
204
205         if not ext_size:
206             return
207
208         if ext_size != f.size:
209             raise RejectStupidMaintainerException(f.filename, 'size', f.size, ext_size)
210
211         if ext_md5sum != f.md5sum:
212             raise RejectStupidMaintainerException(f.filename, 'md5sum', f.md5sum, ext_md5sum)
213
214         if ext_sha1sum != f.sha1sum:
215             raise RejectStupidMaintainerException(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
216
217         if ext_sha256sum != f.sha256sum:
218             raise RejectStupidMaintainerException(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
219
220     def check(self, upload):
221         cnf = Config()
222
223         if not cnf.use_extfiles:
224             return
225
226         session = upload.session
227         changes = upload.changes
228
229         for f in changes.files.itervalues():
230             self.check_single(session, f)
231         source = changes.source
232         if source is not None:
233             for f in source.files.itervalues():
234                 self.check_single(session, f)
235
236 class BinaryCheck(Check):
237     """Check binary packages for syntax errors."""
238     def check(self, upload):
239         for binary in upload.changes.binaries:
240             self.check_binary(upload, binary)
241
242         binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
243         for bn in binary_names:
244             if bn not in upload.changes.binary_names:
245                 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
246
247         return True
248
249     def check_binary(self, upload, binary):
250         fn = binary.hashed_file.filename
251         control = binary.control
252
253         for field in ('Package', 'Architecture', 'Version', 'Description'):
254             if field not in control:
255                 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
256
257         # check fields
258
259         package = control['Package']
260         if not re_field_package.match(package):
261             raise Reject('{0}: Invalid Package field'.format(fn))
262
263         version = control['Version']
264         version_match = re_field_version.match(version)
265         if not version_match:
266             raise Reject('{0}: Invalid Version field'.format(fn))
267         version_without_epoch = version_match.group('without_epoch')
268
269         architecture = control['Architecture']
270         if architecture not in upload.changes.architectures:
271             raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
272         if architecture == 'source':
273             raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
274
275         source = control.get('Source')
276         if source is not None and not re_field_source.match(source):
277             raise Reject('{0}: Invalid Source field'.format(fn))
278
279         # check filename
280
281         match = re_file_binary.match(fn)
282         if package != match.group('package'):
283             raise Reject('{0}: filename does not match Package field'.format(fn))
284         if version_without_epoch != match.group('version'):
285             raise Reject('{0}: filename does not match Version field'.format(fn))
286         if architecture != match.group('architecture'):
287             raise Reject('{0}: filename does not match Architecture field'.format(fn))
288
289         # check dependency field syntax
290
291         for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
292                       'Provides', 'Recommends', 'Replaces', 'Suggests'):
293             value = control.get(field)
294             if value is not None:
295                 if value.strip() == '':
296                     raise Reject('{0}: empty {1} field'.format(fn, field))
297                 try:
298                     apt_pkg.parse_depends(value)
299                 except:
300                     raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
301
302         for field in ('Built-Using',):
303             value = control.get(field)
304             if value is not None:
305                 if value.strip() == '':
306                     raise Reject('{0}: empty {1} field'.format(fn, field))
307                 try:
308                     apt_pkg.parse_src_depends(value)
309                 except:
310                     raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
311
312 class BinaryTimestampCheck(Check):
313     """check timestamps of files in binary packages
314
315     Files in the near future cause ugly warnings and extreme time travel
316     can cause errors on extraction.
317     """
318     def check(self, upload):
319         cnf = Config()
320         future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
321         past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1984'), '%Y'))
322
323         class TarTime(object):
324             def __init__(self):
325                 self.future_files = dict()
326                 self.past_files = dict()
327             def callback(self, member, data):
328                 if member.mtime > future_cutoff:
329                     future_files[member.name] = member.mtime
330                 elif member.mtime < past_cutoff:
331                     past_files[member.name] = member.mtime
332
333         def format_reason(filename, direction, files):
334             reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
335             for fn, ts in files.iteritems():
336                 reason += "  {0} ({1})".format(fn, time.ctime(ts))
337             return reason
338
339         for binary in upload.changes.binaries:
340             filename = binary.hashed_file.filename
341             path = os.path.join(upload.directory, filename)
342             deb = apt_inst.DebFile(path)
343             tar = TarTime()
344             deb.control.go(tar.callback)
345             if tar.future_files:
346                 raise Reject(format_reason(filename, 'future', tar.future_files))
347             if tar.past_files:
348                 raise Reject(format_reason(filename, 'past', tar.past_files))
349
350 class SourceCheck(Check):
351     """Check source package for syntax errors."""
352     def check_filename(self, control, filename, regex):
353         # In case we have an .orig.tar.*, we have to strip the Debian revison
354         # from the version number. So handle this special case first.
355         is_orig = True
356         match = re_file_orig.match(filename)
357         if not match:
358             is_orig = False
359             match = regex.match(filename)
360
361         if not match:
362             raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
363         if match.group('package') != control['Source']:
364             raise Reject('{0}: filename does not match Source field'.format(filename))
365
366         version = control['Version']
367         if is_orig:
368             version = re_field_version_upstream.match(version).group('upstream')
369         version_match =  re_field_version.match(version)
370         version_without_epoch = version_match.group('without_epoch')
371         if match.group('version') != version_without_epoch:
372             raise Reject('{0}: filename does not match Version field'.format(filename))
373
374     def check(self, upload):
375         if upload.changes.source is None:
376             return True
377
378         changes = upload.changes.changes
379         source = upload.changes.source
380         control = source.dsc
381         dsc_fn = source._dsc_file.filename
382
383         # check fields
384         if not re_field_package.match(control['Source']):
385             raise Reject('{0}: Invalid Source field'.format(dsc_fn))
386         if control['Source'] != changes['Source']:
387             raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
388         if control['Version'] != changes['Version']:
389             raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
390
391         # check filenames
392         self.check_filename(control, dsc_fn, re_file_dsc)
393         for f in source.files.itervalues():
394             self.check_filename(control, f.filename, re_file_source)
395
396         # check dependency field syntax
397         for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
398             value = control.get(field)
399             if value is not None:
400                 if value.strip() == '':
401                     raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
402                 try:
403                     apt_pkg.parse_src_depends(value)
404                 except Exception as e:
405                     raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
406
407         rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
408         if len(rejects) > 0:
409             raise Reject("\n".join(rejects))
410
411         return True
412
413 class SingleDistributionCheck(Check):
414     """Check that the .changes targets only a single distribution."""
415     def check(self, upload):
416         if len(upload.changes.distributions) != 1:
417             raise Reject("Only uploads to a single distribution are allowed.")
418
419 class ACLCheck(Check):
420     """Check the uploader is allowed to upload the packages in .changes"""
421
422     def _does_hijack(self, session, upload, suite):
423         # Try to catch hijacks.
424         # This doesn't work correctly. Uploads to experimental can still
425         # "hijack" binaries from unstable. Also one can hijack packages
426         # via buildds (but people who try this should not be DMs).
427         for binary_name in upload.changes.binary_names:
428             binaries = session.query(DBBinary).join(DBBinary.source) \
429                 .filter(DBBinary.suites.contains(suite)) \
430                 .filter(DBBinary.package == binary_name)
431             for binary in binaries:
432                 if binary.source.source != upload.changes.changes['Source']:
433                     return True, binary, binary.source.source
434         return False, None, None
435
436     def _check_acl(self, session, upload, acl):
437         source_name = upload.changes.source_name
438
439         if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
440             return None, None
441         if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
442             return None, None
443
444         if not acl.allow_new:
445             if upload.new:
446                 return False, "NEW uploads are not allowed"
447             for f in upload.changes.files.itervalues():
448                 if f.section == 'byhand' or f.section.startswith("raw-"):
449                     return False, "BYHAND uploads are not allowed"
450         if not acl.allow_source and upload.changes.source is not None:
451             return False, "sourceful uploads are not allowed"
452         binaries = upload.changes.binaries
453         if len(binaries) != 0:
454             if not acl.allow_binary:
455                 return False, "binary uploads are not allowed"
456             if upload.changes.source is None and not acl.allow_binary_only:
457                 return False, "binary-only uploads are not allowed"
458             if not acl.allow_binary_all:
459                 uploaded_arches = set(upload.changes.architectures)
460                 uploaded_arches.discard('source')
461                 allowed_arches = set(a.arch_string for a in acl.architectures)
462                 forbidden_arches = uploaded_arches - allowed_arches
463                 if len(forbidden_arches) != 0:
464                     return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
465         if not acl.allow_hijack:
466             for suite in upload.final_suites:
467                 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
468                 if does_hijack:
469                     return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
470
471         acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
472         if acl.allow_per_source:
473             if acl_per_source is None:
474                 return False, "not allowed to upload source package '{0}'".format(source_name)
475         if acl.deny_per_source and acl_per_source is not None:
476             return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
477
478         return True, None
479
480     def check(self, upload):
481         session = upload.session
482         fingerprint = upload.fingerprint
483         keyring = fingerprint.keyring
484
485         if keyring is None:
486             raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
487         if not keyring.active:
488             raise Reject('Keyring {0} is not active'.format(keyring.name))
489
490         acl = fingerprint.acl or keyring.acl
491         if acl is None:
492             raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
493         result, reason = self._check_acl(session, upload, acl)
494         if not result:
495             raise RejectACL(acl, reason)
496
497         for acl in session.query(ACL).filter_by(is_global=True):
498             result, reason = self._check_acl(session, upload, acl)
499             if result == False:
500                 raise RejectACL(acl, reason)
501
502         return True
503
504     def per_suite_check(self, upload, suite):
505         acls = suite.acls
506         if len(acls) != 0:
507             accept = False
508             for acl in acls:
509                 result, reason = self._check_acl(upload.session, upload, acl)
510                 if result == False:
511                     raise Reject(reason)
512                 accept = accept or result
513             if not accept:
514                 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
515         return True
516
517 class TransitionCheck(Check):
518     """check for a transition"""
519     def check(self, upload):
520         if 'source' not in upload.changes.architectures:
521             return True
522
523         transitions = self.get_transitions()
524         if transitions is None:
525             return True
526
527         control = upload.changes.changes
528         source = re_field_source.match(control['Source']).group('package')
529
530         for trans in transitions:
531             t = transitions[trans]
532             source = t["source"]
533             expected = t["new"]
534
535             # Will be None if nothing is in testing.
536             current = get_source_in_suite(source, "testing", session)
537             if current is not None:
538                 compare = apt_pkg.version_compare(current.version, expected)
539
540             if current is None or compare < 0:
541                 # This is still valid, the current version in testing is older than
542                 # the new version we wait for, or there is none in testing yet
543
544                 # Check if the source we look at is affected by this.
545                 if source in t['packages']:
546                     # The source is affected, lets reject it.
547
548                     rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
549
550                     if current is not None:
551                         currentlymsg = "at version {0}".format(current.version)
552                     else:
553                         currentlymsg = "not present in testing"
554
555                     rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
556
557                     rejectmsg += "\n".join(textwrap.wrap("""Your package
558 is part of a testing transition designed to get {0} migrated (it is
559 currently {1}, we need version {2}).  This transition is managed by the
560 Release Team, and {3} is the Release-Team member responsible for it.
561 Please mail debian-release@lists.debian.org or contact {3} directly if you
562 need further assistance.  You might want to upload to experimental until this
563 transition is done.""".format(source, currentlymsg, expected,t["rm"])))
564
565                     raise Reject(rejectmsg)
566
567         return True
568
569     def get_transitions(self):
570         cnf = Config()
571         path = cnf.get('Dinstall::ReleaseTransitions', '')
572         if path == '' or not os.path.exists(path):
573             return None
574
575         contents = file(path, 'r').read()
576         try:
577             transitions = yaml.load(contents)
578             return transitions
579         except yaml.YAMLError as msg:
580             utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
581
582         return None
583
584 class NoSourceOnlyCheck(Check):
585     """Check for source-only upload
586
587     Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
588     set. Otherwise they are rejected.
589     """
590     def check(self, upload):
591         if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
592             return True
593         changes = upload.changes
594         if changes.source is not None and len(changes.binaries) == 0:
595             raise Reject('Source-only uploads are not allowed.')
596         return True
597
598 class LintianCheck(Check):
599     """Check package using lintian"""
600     def check(self, upload):
601         changes = upload.changes
602
603         # Only check sourceful uploads.
604         if changes.source is None:
605             return True
606         # Only check uploads to unstable or experimental.
607         if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
608             return True
609
610         cnf = Config()
611         if 'Dinstall::LintianTags' not in cnf:
612             return True
613         tagfile = cnf['Dinstall::LintianTags']
614
615         with open(tagfile, 'r') as sourcefile:
616             sourcecontent = sourcefile.read()
617         try:
618             lintiantags = yaml.load(sourcecontent)['lintian']
619         except yaml.YAMLError as msg:
620             raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
621
622         fd, temp_filename = utils.temp_filename(mode=0o644)
623         temptagfile = os.fdopen(fd, 'w')
624         for tags in lintiantags.itervalues():
625             for tag in tags:
626                 print >>temptagfile, tag
627         temptagfile.close()
628
629         changespath = os.path.join(upload.directory, changes.filename)
630         try:
631             if cnf.unprivgroup:
632                 cmd = "sudo -H -u {0} -- /usr/bin/lintian --show-overrides --tags-from-file {1} {2}".format(cnf.unprivgroup, temp_filename, changespath)
633             else:
634                 cmd = "/usr/bin/lintian --show-overrides --tags-from-file {0} {1}".format(temp_filename, changespath)
635             result, output = commands.getstatusoutput(cmd)
636         finally:
637             os.unlink(temp_filename)
638
639         if result == 2:
640             utils.warn("lintian failed for %s [return code: %s]." % \
641                 (changespath, result))
642             utils.warn(utils.prefix_multi_line_string(output, \
643                 " [possible output:] "))
644
645         parsed_tags = lintian.parse_lintian_output(output)
646         rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
647         if len(rejects) != 0:
648             raise Reject('\n'.join(rejects))
649
650         return True
651
652 class SourceFormatCheck(Check):
653     """Check source format is allowed in the target suite"""
654     def per_suite_check(self, upload, suite):
655         source = upload.changes.source
656         session = upload.session
657         if source is None:
658             return True
659
660         source_format = source.dsc['Format']
661         query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
662         if query.first() is None:
663             raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
664
665 class SuiteArchitectureCheck(Check):
666     def per_suite_check(self, upload, suite):
667         session = upload.session
668         for arch in upload.changes.architectures:
669             query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
670             if query.first() is None:
671                 raise Reject('Architecture {0} is not allowed in suite {2}'.format(arch, suite.suite_name))
672
673         return True
674
675 class VersionCheck(Check):
676     """Check version constraints"""
677     def _highest_source_version(self, session, source_name, suite):
678         db_source = session.query(DBSource).filter_by(source=source_name) \
679             .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
680         if db_source is None:
681             return None
682         else:
683             return db_source.version
684
685     def _highest_binary_version(self, session, binary_name, suite, architecture):
686         db_binary = session.query(DBBinary).filter_by(package=binary_name) \
687             .filter(DBBinary.suites.contains(suite)) \
688             .join(DBBinary.architecture) \
689             .filter(Architecture.arch_string.in_(['all', architecture])) \
690             .order_by(DBBinary.version.desc()).first()
691         if db_binary is None:
692             return None
693         else:
694             return db_binary.version
695
696     def _version_checks(self, upload, suite, op):
697         session = upload.session
698
699         if upload.changes.source is not None:
700             source_name = upload.changes.source.dsc['Source']
701             source_version = upload.changes.source.dsc['Version']
702             v = self._highest_source_version(session, source_name, suite)
703             if v is not None and not op(version_compare(source_version, v)):
704                 raise Reject('Version check failed (source={0}, version={1}, other-version={2}, suite={3})'.format(source_name, source_version, v, suite.suite_name))
705
706         for binary in upload.changes.binaries:
707             binary_name = binary.control['Package']
708             binary_version = binary.control['Version']
709             architecture = binary.control['Architecture']
710             v = self._highest_binary_version(session, binary_name, suite, architecture)
711             if v is not None and not op(version_compare(binary_version, v)):
712                 raise Reject('Version check failed (binary={0}, version={1}, other-version={2}, suite={3})'.format(binary_name, binary_version, v, suite.suite_name))
713
714     def per_suite_check(self, upload, suite):
715         session = upload.session
716
717         vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
718             .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
719         must_be_newer_than = [ vc.reference for vc in vc_newer ]
720         # Must be newer than old versions in `suite`
721         must_be_newer_than.append(suite)
722
723         for s in must_be_newer_than:
724             self._version_checks(upload, s, lambda result: result > 0)
725
726         vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
727         must_be_older_than = [ vc.reference for vc in vc_older ]
728
729         for s in must_be_older_than:
730             self._version_checks(upload, s, lambda result: result < 0)
731
732         return True
733
734     @property
735     def forcable(self):
736         return True