]> git.decadent.org.uk Git - dak.git/blob - daklib/checks.py
daklib/checks.py: add note to send warning for DMUA later
[dak.git] / daklib / checks.py
1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
2 #
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
6 #
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
11 #
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 # GNU General Public License for more details.
16 #
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21 """module provided pre-acceptance tests
22
23 Please read the documentation for the L{Check} class for the interface.
24 """
25
26 from daklib.config import Config
27 from daklib.dbconn import *
28 import daklib.dbconn as dbconn
29 from daklib.regexes import *
30 from daklib.textutils import fix_maintainer, ParseMaintError
31 import daklib.lintian as lintian
32 import daklib.utils as utils
33
34 import apt_inst
35 import apt_pkg
36 from apt_pkg import version_compare
37 import os
38 import time
39 import yaml
40
41 # TODO: replace by subprocess
42 import commands
43
44 class Reject(Exception):
45     """exception raised by failing checks"""
46     pass
47
48 class Check(object):
49     """base class for checks
50
51     checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
52     raise a L{daklib.checks.Reject} exception including a human-readable
53     description why the upload should be rejected.
54     """
55     def check(self, upload):
56         """do checks
57
58         @type  upload: L{daklib.archive.ArchiveUpload}
59         @param upload: upload to check
60
61         @raise daklib.checks.Reject: upload should be rejected
62         """
63         raise NotImplemented
64     def per_suite_check(self, upload, suite):
65         """do per-suite checks
66
67         @type  upload: L{daklib.archive.ArchiveUpload}
68         @param upload: upload to check
69
70         @type  suite: L{daklib.dbconn.Suite}
71         @param suite: suite to check
72
73         @raise daklib.checks.Reject: upload should be rejected
74         """
75         raise NotImplemented
76     @property
77     def forcable(self):
78         """allow to force ignore failing test
79
80         C{True} if it is acceptable to force ignoring a failing test,
81         C{False} otherwise
82         """
83         return False
84
85 class SignatureCheck(Check):
86     """Check signature of changes and dsc file (if included in upload)
87
88     Make sure the signature is valid and done by a known user.
89     """
90     def check(self, upload):
91         changes = upload.changes
92         if not changes.valid_signature:
93             raise Reject("Signature for .changes not valid.")
94         if changes.source is not None:
95             if not changes.source.valid_signature:
96                 raise Reject("Signature for .dsc not valid.")
97             if changes.source.primary_fingerprint != changes.primary_fingerprint:
98                 raise Reject(".changes and .dsc not signed by the same key.")
99         if upload.fingerprint is None or upload.fingerprint.uid is None:
100             raise Reject(".changes signed by unknown key.")
101
102 class ChangesCheck(Check):
103     """Check changes file for syntax errors."""
104     def check(self, upload):
105         changes = upload.changes
106         control = changes.changes
107         fn = changes.filename
108
109         for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
110             if field not in control:
111                 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
112
113         source_match = re_field_source.match(control['Source'])
114         if not source_match:
115             raise Reject('{0}: Invalid Source field'.format(fn))
116         version_match = re_field_version.match(control['Version'])
117         if not version_match:
118             raise Reject('{0}: Invalid Version field'.format(fn))
119         version_without_epoch = version_match.group('without_epoch')
120
121         match = re_file_changes.match(fn)
122         if not match:
123             raise Reject('{0}: Does not match re_file_changes'.format(fn))
124         if match.group('package') != source_match.group('package'):
125             raise Reject('{0}: Filename does not match Source field'.format(fn))
126         if match.group('version') != version_without_epoch:
127             raise Reject('{0}: Filename does not match Version field'.format(fn))
128
129         for bn in changes.binary_names:
130             if not re_field_package.match(bn):
131                 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
132
133         if 'source' in changes.architectures and changes.source is None:
134             raise Reject("Changes has architecture source, but no source found.")
135         if changes.source is not None and 'source' not in changes.architectures:
136             raise Reject("Upload includes source, but changes does not say so.")
137
138         try:
139             fix_maintainer(changes.changes['Maintainer'])
140         except ParseMaintError as e:
141             raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
142
143         try:
144             changed_by = changes.changes.get('Changed-By')
145             if changed_by is not None:
146                 fix_maintainer(changed_by)
147         except ParseMaintError as e:
148             raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
149
150         if len(changes.files) == 0:
151             raise Reject("Changes includes no files.")
152
153         for bugnum in changes.closed_bugs:
154             if not re_isanum.match(bugnum):
155                 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
156
157         return True
158
159 class HashesCheck(Check):
160     """Check hashes in .changes and .dsc are valid."""
161     def check(self, upload):
162         changes = upload.changes
163         for f in changes.files.itervalues():
164             f.check(upload.directory)
165             source = changes.source
166         if source is not None:
167             for f in source.files.itervalues():
168                 f.check(upload.directory)
169
170 class BinaryCheck(Check):
171     """Check binary packages for syntax errors."""
172     def check(self, upload):
173         for binary in upload.changes.binaries:
174             self.check_binary(upload, binary)
175
176         binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
177         for bn in binary_names:
178             if bn not in upload.changes.binary_names:
179                 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
180
181         return True
182
183     def check_binary(self, upload, binary):
184         fn = binary.hashed_file.filename
185         control = binary.control
186
187         for field in ('Package', 'Architecture', 'Version', 'Description'):
188             if field not in control:
189                 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
190
191         # check fields
192
193         package = control['Package']
194         if not re_field_package.match(package):
195             raise Reject('{0}: Invalid Package field'.format(fn))
196
197         version = control['Version']
198         version_match = re_field_version.match(version)
199         if not version_match:
200             raise Reject('{0}: Invalid Version field'.format(fn))
201         version_without_epoch = version_match.group('without_epoch')
202
203         architecture = control['Architecture']
204         if architecture not in upload.changes.architectures:
205             raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
206         if architecture == 'source':
207             raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
208
209         source = control.get('Source')
210         if source is not None and not re_field_source.match(source):
211             raise Reject('{0}: Invalid Source field'.format(fn))
212
213         # check filename
214
215         match = re_file_binary.match(fn)
216         if package != match.group('package'):
217             raise Reject('{0}: filename does not match Package field'.format(fn))
218         if version_without_epoch != match.group('version'):
219             raise Reject('{0}: filename does not match Version field'.format(fn))
220         if architecture != match.group('architecture'):
221             raise Reject('{0}: filename does not match Architecture field'.format(fn))
222
223         # check dependency field syntax
224
225         for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
226                       'Provides', 'Recommends', 'Replaces', 'Suggests'):
227             value = control.get(field)
228             if value is not None:
229                 if value.strip() == '':
230                     raise Reject('{0}: empty {1} field'.format(fn, field))
231                 try:
232                     apt_pkg.parse_depends(value)
233                 except:
234                     raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
235
236         for field in ('Built-Using',):
237             value = control.get(field)
238             if value is not None:
239                 if value.strip() == '':
240                     raise Reject('{0}: empty {1} field'.format(fn, field))
241                 try:
242                     apt_pkg.parse_src_depends(value)
243                 except:
244                     raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
245
246 class BinaryTimestampCheck(Check):
247     """check timestamps of files in binary packages
248
249     Files in the near future cause ugly warnings and extreme time travel
250     can cause errors on extraction.
251     """
252     def check(self, upload):
253         cnf = Config()
254         future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
255         past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1984'), '%Y'))
256
257         class TarTime(object):
258             def __init__(self):
259                 self.future_files = dict()
260                 self.past_files = dict()
261             def callback(self, member, data):
262                 if member.mtime > future_cutoff:
263                     future_files[member.name] = member.mtime
264                 elif member.mtime < past_cutoff:
265                     past_files[member.name] = member.mtime
266
267         def format_reason(filename, direction, files):
268             reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
269             for fn, ts in files.iteritems():
270                 reason += "  {0} ({1})".format(fn, time.ctime(ts))
271             return reason
272
273         for binary in upload.changes.binaries:
274             filename = binary.hashed_file.filename
275             path = os.path.join(upload.directory, filename)
276             deb = apt_inst.DebFile(path)
277             tar = TarTime()
278             deb.control.go(tar.callback)
279             if tar.future_files:
280                 raise Reject(format_reason(filename, 'future', tar.future_files))
281             if tar.past_files:
282                 raise Reject(format_reason(filename, 'past', tar.past_files))
283
284 class SourceCheck(Check):
285     """Check source package for syntax errors."""
286     def check_filename(self, control, filename, regex):
287         # In case we have an .orig.tar.*, we have to strip the Debian revison
288         # from the version number. So handle this special case first.
289         is_orig = True
290         match = re_file_orig.match(filename)
291         if not match:
292             is_orig = False
293             match = regex.match(filename)
294
295         if not match:
296             raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
297         if match.group('package') != control['Source']:
298             raise Reject('{0}: filename does not match Source field'.format(filename))
299
300         version = control['Version']
301         if is_orig:
302             version = re_field_version_upstream.match(version).group('upstream')
303         version_match =  re_field_version.match(version)
304         version_without_epoch = version_match.group('without_epoch')
305         if match.group('version') != version_without_epoch:
306             raise Reject('{0}: filename does not match Version field'.format(filename))
307
308     def check(self, upload):
309         if upload.changes.source is None:
310             return True
311
312         changes = upload.changes.changes
313         source = upload.changes.source
314         control = source.dsc
315         dsc_fn = source._dsc_file.filename
316
317         # check fields
318         if not re_field_package.match(control['Source']):
319             raise Reject('{0}: Invalid Source field'.format(dsc_fn))
320         if control['Source'] != changes['Source']:
321             raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
322         if control['Version'] != changes['Version']:
323             raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
324
325         # check filenames
326         self.check_filename(control, dsc_fn, re_file_dsc)
327         for f in source.files.itervalues():
328             self.check_filename(control, f.filename, re_file_source)
329
330         # check dependency field syntax
331         for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
332             value = control.get(field)
333             if value is not None:
334                 if value.strip() == '':
335                     raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
336                 try:
337                     apt_pkg.parse_src_depends(value)
338                 except Exception as e:
339                     raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
340
341         rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
342         if len(rejects) > 0:
343             raise Reject("\n".join(rejects))
344
345         return True
346
347 class SingleDistributionCheck(Check):
348     """Check that the .changes targets only a single distribution."""
349     def check(self, upload):
350         if len(upload.changes.distributions) != 1:
351             raise Reject("Only uploads to a single distribution are allowed.")
352
353 class ACLCheck(Check):
354     """Check the uploader is allowed to upload the packages in .changes"""
355
356     def _does_hijack(self, session, upload, suite):
357         # Try to catch hijacks.
358         # This doesn't work correctly. Uploads to experimental can still
359         # "hijack" binaries from unstable. Also one can hijack packages
360         # via buildds (but people who try this should not be DMs).
361         for binary_name in upload.changes.binary_names:
362             binaries = session.query(DBBinary).join(DBBinary.source) \
363                 .filter(DBBinary.suites.contains(suite)) \
364                 .filter(DBBinary.package == binary_name)
365             for binary in binaries:
366                 if binary.source.source != upload.changes.changes['Source']:
367                     return True, binary, binary.source.source
368         return False, None, None
369
370     def _check_acl(self, session, upload, acl):
371         source_name = upload.changes.source_name
372
373         if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
374             return None, None
375         if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
376             return None, None
377
378         if not acl.allow_new:
379             if upload.new:
380                 return False, "NEW uploads are not allowed"
381             for f in upload.changes.files.itervalues():
382                 if f.section == 'byhand' or f.section.startswith("raw-"):
383                     return False, "BYHAND uploads are not allowed"
384         if not acl.allow_source and upload.changes.source is not None:
385             return False, "sourceful uploads are not allowed"
386         binaries = upload.changes.binaries
387         if len(binaries) != 0:
388             if not acl.allow_binary:
389                 return False, "binary uploads are not allowed"
390             if upload.changes.source is None and not acl.allow_binary_only:
391                 return False, "binary-only uploads are not allowed"
392             if not acl.allow_binary_all:
393                 uploaded_arches = set(upload.changes.architectures)
394                 uploaded_arches.discard('source')
395                 allowed_arches = set(a.arch_string for a in acl.architectures)
396                 forbidden_arches = uploaded_arches - allowed_arches
397                 if len(forbidden_arches) != 0:
398                     return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
399         if not acl.allow_hijack:
400             for suite in upload.final_suites:
401                 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
402                 if does_hijack:
403                     return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
404
405         acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
406         if acl.allow_per_source:
407             # XXX: Drop DMUA part here and switch to new implementation.
408             # XXX: Send warning mail once users can set the new DMUA flag
409             dmua_status, dmua_reason = self._check_dmua(upload)
410             if not dmua_status:
411                 return False, dmua_reason
412             #if acl_per_source is None:
413             #    return False, "not allowed to upload source package '{0}'".format(source_name)
414         if acl.deny_per_source and acl_per_source is not None:
415             return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
416
417         return True, None
418
419     def _check_dmua(self, upload):
420         # This code is not very nice, but hopefully works until we can replace
421         # DM-Upload-Allowed, cf. https://lists.debian.org/debian-project/2012/06/msg00029.html
422         session = upload.session
423
424         # Check DM-Upload-Allowed
425         suites = upload.final_suites
426         assert len(suites) == 1
427         suite = list(suites)[0]
428
429         last_suites = ['unstable', 'experimental']
430         if suite.suite_name.endswith('-backports'):
431             last_suites = [suite.suite_name]
432         last = session.query(DBSource).filter_by(source=upload.changes.changes['Source']) \
433             .join(DBSource.suites).filter(Suite.suite_name.in_(last_suites)) \
434             .order_by(DBSource.version.desc()).limit(1).first()
435         if last is None:
436             return False, 'No existing source found in {0}'.format(' or '.join(last_suites))
437         if not last.dm_upload_allowed:
438             return False, 'DM-Upload-Allowed is not set in {0}={1}'.format(last.source, last.version)
439
440         # check current Changed-by is in last Maintainer or Uploaders
441         uploader_names = [ u.name for u in last.uploaders ]
442         changed_by_field = upload.changes.changes.get('Changed-By', upload.changes.changes['Maintainer'])
443         if changed_by_field not in uploader_names:
444             return False, '{0} is not an uploader for {1}={2}'.format(changed_by_field, last.source, last.version)
445
446         # check Changed-by is the DM
447         changed_by = fix_maintainer(changed_by_field)
448         uid = upload.fingerprint.uid
449         if uid is None:
450             return False, 'Unknown uid for fingerprint {0}'.format(upload.fingerprint.fingerprint)
451         if uid.uid != changed_by[3] and uid.name != changed_by[2]:
452             return False, 'DMs are not allowed to sponsor uploads (expected {0} <{1}> as maintainer, but got {2})'.format(uid.name, uid.uid, changed_by_field)
453
454         return True, None
455
456     def check(self, upload):
457         session = upload.session
458         fingerprint = upload.fingerprint
459         keyring = fingerprint.keyring
460
461         if keyring is None:
462             raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
463         if not keyring.active:
464             raise Reject('Keyring {0} is not active'.format(keyring.name))
465
466         acl = fingerprint.acl or keyring.acl
467         if acl is None:
468             raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
469         result, reason = self._check_acl(session, upload, acl)
470         if not result:
471             raise Reject(reason)
472
473         for acl in session.query(ACL).filter_by(is_global=True):
474             result, reason = self._check_acl(session, upload, acl)
475             if result == False:
476                 raise Reject(reason)
477
478         return True
479
480     def per_suite_check(self, upload, suite):
481         acls = suite.acls
482         if len(acls) != 0:
483             accept = False
484             for acl in acls:
485                 result, reason = self._check_acl(upload.session, upload, acl)
486                 if result == False:
487                     raise Reject(reason)
488                 accept = accept or result
489             if not accept:
490                 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
491         return True
492
493 class TransitionCheck(Check):
494     """check for a transition"""
495     def check(self, upload):
496         if 'source' not in upload.changes.architectures:
497             return True
498
499         transitions = self.get_transitions()
500         if transitions is None:
501             return True
502
503         control = upload.changes.changes
504         source = re_field_source.match(control['Source']).group('package')
505
506         for trans in transitions:
507             t = transitions[trans]
508             source = t["source"]
509             expected = t["new"]
510
511             # Will be None if nothing is in testing.
512             current = get_source_in_suite(source, "testing", session)
513             if current is not None:
514                 compare = apt_pkg.version_compare(current.version, expected)
515
516             if current is None or compare < 0:
517                 # This is still valid, the current version in testing is older than
518                 # the new version we wait for, or there is none in testing yet
519
520                 # Check if the source we look at is affected by this.
521                 if source in t['packages']:
522                     # The source is affected, lets reject it.
523
524                     rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
525
526                     if current is not None:
527                         currentlymsg = "at version {0}".format(current.version)
528                     else:
529                         currentlymsg = "not present in testing"
530
531                     rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
532
533                     rejectmsg += "\n".join(textwrap.wrap("""Your package
534 is part of a testing transition designed to get {0} migrated (it is
535 currently {1}, we need version {2}).  This transition is managed by the
536 Release Team, and {3} is the Release-Team member responsible for it.
537 Please mail debian-release@lists.debian.org or contact {3} directly if you
538 need further assistance.  You might want to upload to experimental until this
539 transition is done.""".format(source, currentlymsg, expected,t["rm"])))
540
541                     raise Reject(rejectmsg)
542
543         return True
544
545     def get_transitions(self):
546         cnf = Config()
547         path = cnf.get('Dinstall::ReleaseTransitions', '')
548         if path == '' or not os.path.exists(path):
549             return None
550
551         contents = file(path, 'r').read()
552         try:
553             transitions = yaml.load(contents)
554             return transitions
555         except yaml.YAMLError as msg:
556             utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
557
558         return None
559
560 class NoSourceOnlyCheck(Check):
561     """Check for source-only upload
562
563     Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
564     set. Otherwise they are rejected.
565     """
566     def check(self, upload):
567         if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
568             return True
569         changes = upload.changes
570         if changes.source is not None and len(changes.binaries) == 0:
571             raise Reject('Source-only uploads are not allowed.')
572         return True
573
574 class LintianCheck(Check):
575     """Check package using lintian"""
576     def check(self, upload):
577         changes = upload.changes
578
579         # Only check sourceful uploads.
580         if changes.source is None:
581             return True
582         # Only check uploads to unstable or experimental.
583         if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
584             return True
585
586         cnf = Config()
587         if 'Dinstall::LintianTags' not in cnf:
588             return True
589         tagfile = cnf['Dinstall::LintianTags']
590
591         with open(tagfile, 'r') as sourcefile:
592             sourcecontent = sourcefile.read()
593         try:
594             lintiantags = yaml.load(sourcecontent)['lintian']
595         except yaml.YAMLError as msg:
596             raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
597
598         fd, temp_filename = utils.temp_filename()
599         temptagfile = os.fdopen(fd, 'w')
600         for tags in lintiantags.itervalues():
601             for tag in tags:
602                 print >>temptagfile, tag
603         temptagfile.close()
604
605         changespath = os.path.join(upload.directory, changes.filename)
606         try:
607             # FIXME: no shell
608             cmd = "lintian --show-overrides --tags-from-file {0} {1}".format(temp_filename, changespath)
609             result, output = commands.getstatusoutput(cmd)
610         finally:
611             os.unlink(temp_filename)
612
613         if result == 2:
614             utils.warn("lintian failed for %s [return code: %s]." % \
615                 (changespath, result))
616             utils.warn(utils.prefix_multi_line_string(output, \
617                 " [possible output:] "))
618
619         parsed_tags = lintian.parse_lintian_output(output)
620         rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
621         if len(rejects) != 0:
622             raise Reject('\n'.join(rejects))
623
624         return True
625
626 class SourceFormatCheck(Check):
627     """Check source format is allowed in the target suite"""
628     def per_suite_check(self, upload, suite):
629         source = upload.changes.source
630         session = upload.session
631         if source is None:
632             return True
633
634         source_format = source.dsc['Format']
635         query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
636         if query.first() is None:
637             raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
638
639 class SuiteArchitectureCheck(Check):
640     def per_suite_check(self, upload, suite):
641         session = upload.session
642         for arch in upload.changes.architectures:
643             query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
644             if query.first() is None:
645                 raise Reject('Architecture {0} is not allowed in suite {2}'.format(arch, suite.suite_name))
646
647         return True
648
649 class VersionCheck(Check):
650     """Check version constraints"""
651     def _highest_source_version(self, session, source_name, suite):
652         db_source = session.query(DBSource).filter_by(source=source_name) \
653             .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
654         if db_source is None:
655             return None
656         else:
657             return db_source.version
658
659     def _highest_binary_version(self, session, binary_name, suite, architecture):
660         db_binary = session.query(DBBinary).filter_by(package=binary_name) \
661             .filter(DBBinary.suites.contains(suite)) \
662             .join(DBBinary.architecture) \
663             .filter(Architecture.arch_string.in_(['all', architecture])) \
664             .order_by(DBBinary.version.desc()).first()
665         if db_binary is None:
666             return None
667         else:
668             return db_binary.version
669
670     def _version_checks(self, upload, suite, op):
671         session = upload.session
672
673         if upload.changes.source is not None:
674             source_name = upload.changes.source.dsc['Source']
675             source_version = upload.changes.source.dsc['Version']
676             v = self._highest_source_version(session, source_name, suite)
677             if v is not None and not op(version_compare(source_version, v)):
678                 raise Reject('Version check failed (source={0}, version={1}, other-version={2}, suite={3})'.format(source_name, source_version, v, suite.suite_name))
679
680         for binary in upload.changes.binaries:
681             binary_name = binary.control['Package']
682             binary_version = binary.control['Version']
683             architecture = binary.control['Architecture']
684             v = self._highest_binary_version(session, binary_name, suite, architecture)
685             if v is not None and not op(version_compare(binary_version, v)):
686                 raise Reject('Version check failed (binary={0}, version={1}, other-version={2}, suite={3})'.format(binary_name, binary_version, v, suite.suite_name))
687
688     def per_suite_check(self, upload, suite):
689         session = upload.session
690
691         vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
692             .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
693         must_be_newer_than = [ vc.reference for vc in vc_newer ]
694         # Must be newer than old versions in `suite`
695         must_be_newer_than.append(suite)
696
697         for s in must_be_newer_than:
698             self._version_checks(upload, s, lambda result: result > 0)
699
700         vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
701         must_be_older_than = [ vc.reference for vc in vc_older ]
702
703         for s in must_be_older_than:
704             self._version_checks(upload, s, lambda result: result < 0)
705
706         return True
707
708     @property
709     def forcable(self):
710         return True