]> git.decadent.org.uk Git - dak.git/blob - daklib/checks.py
checks: Fix a syntax error in ExternalHashCheck
[dak.git] / daklib / checks.py
1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
2 #
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
6 #
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
11 #
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 # GNU General Public License for more details.
16 #
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21 """module provided pre-acceptance tests
22
23 Please read the documentation for the L{Check} class for the interface.
24 """
25
26 from daklib.config import Config
27 from daklib.dbconn import *
28 import daklib.dbconn as dbconn
29 from daklib.regexes import *
30 from daklib.textutils import fix_maintainer, ParseMaintError
31 import daklib.lintian as lintian
32 import daklib.utils as utils
33
34 import apt_inst
35 import apt_pkg
36 from apt_pkg import version_compare
37 import os
38 import time
39 import yaml
40
41 # TODO: replace by subprocess
42 import commands
43
44 class Reject(Exception):
45     """exception raised by failing checks"""
46     pass
47
48 class RejectStupidMaintainerException(Exception):
49     """exception raised by failing the external hashes check"""
50
51     def __str__(self):
52         return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
53
54 class Check(object):
55     """base class for checks
56
57     checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
58     raise a L{daklib.checks.Reject} exception including a human-readable
59     description why the upload should be rejected.
60     """
61     def check(self, upload):
62         """do checks
63
64         @type  upload: L{daklib.archive.ArchiveUpload}
65         @param upload: upload to check
66
67         @raise daklib.checks.Reject: upload should be rejected
68         """
69         raise NotImplemented
70     def per_suite_check(self, upload, suite):
71         """do per-suite checks
72
73         @type  upload: L{daklib.archive.ArchiveUpload}
74         @param upload: upload to check
75
76         @type  suite: L{daklib.dbconn.Suite}
77         @param suite: suite to check
78
79         @raise daklib.checks.Reject: upload should be rejected
80         """
81         raise NotImplemented
82     @property
83     def forcable(self):
84         """allow to force ignore failing test
85
86         C{True} if it is acceptable to force ignoring a failing test,
87         C{False} otherwise
88         """
89         return False
90
91 class SignatureCheck(Check):
92     """Check signature of changes and dsc file (if included in upload)
93
94     Make sure the signature is valid and done by a known user.
95     """
96     def check(self, upload):
97         changes = upload.changes
98         if not changes.valid_signature:
99             raise Reject("Signature for .changes not valid.")
100         if changes.source is not None:
101             if not changes.source.valid_signature:
102                 raise Reject("Signature for .dsc not valid.")
103             if changes.source.primary_fingerprint != changes.primary_fingerprint:
104                 raise Reject(".changes and .dsc not signed by the same key.")
105         if upload.fingerprint is None or upload.fingerprint.uid is None:
106             raise Reject(".changes signed by unknown key.")
107
108 class ChangesCheck(Check):
109     """Check changes file for syntax errors."""
110     def check(self, upload):
111         changes = upload.changes
112         control = changes.changes
113         fn = changes.filename
114
115         for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
116             if field not in control:
117                 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
118
119         source_match = re_field_source.match(control['Source'])
120         if not source_match:
121             raise Reject('{0}: Invalid Source field'.format(fn))
122         version_match = re_field_version.match(control['Version'])
123         if not version_match:
124             raise Reject('{0}: Invalid Version field'.format(fn))
125         version_without_epoch = version_match.group('without_epoch')
126
127         match = re_file_changes.match(fn)
128         if not match:
129             raise Reject('{0}: Does not match re_file_changes'.format(fn))
130         if match.group('package') != source_match.group('package'):
131             raise Reject('{0}: Filename does not match Source field'.format(fn))
132         if match.group('version') != version_without_epoch:
133             raise Reject('{0}: Filename does not match Version field'.format(fn))
134
135         for bn in changes.binary_names:
136             if not re_field_package.match(bn):
137                 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
138
139         if 'source' in changes.architectures and changes.source is None:
140             raise Reject("Changes has architecture source, but no source found.")
141         if changes.source is not None and 'source' not in changes.architectures:
142             raise Reject("Upload includes source, but changes does not say so.")
143
144         try:
145             fix_maintainer(changes.changes['Maintainer'])
146         except ParseMaintError as e:
147             raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
148
149         try:
150             changed_by = changes.changes.get('Changed-By')
151             if changed_by is not None:
152                 fix_maintainer(changed_by)
153         except ParseMaintError as e:
154             raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
155
156         if len(changes.files) == 0:
157             raise Reject("Changes includes no files.")
158
159         for bugnum in changes.closed_bugs:
160             if not re_isanum.match(bugnum):
161                 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
162
163         return True
164
165 class HashesCheck(Check):
166     """Check hashes in .changes and .dsc are valid."""
167     def check(self, upload):
168         changes = upload.changes
169         for f in changes.files.itervalues():
170             f.check(upload.directory)
171             source = changes.source
172         if source is not None:
173             for f in source.files.itervalues():
174                 f.check(upload.directory)
175
176 class ExternalHashesCheck(Check):
177     """Checks hashes in .changes and .dsc against an external database."""
178     def check_single(self, session, f):
179         q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
180         (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
181
182         if not ext_size:
183             return
184
185         if ext_size != f.size:
186             raise RejectStupidMaintainerException(f.filename, 'size', f.size, ext_size)
187
188         if ext_md5sum != f.md5sum:
189             raise RejectStupidMaintainerException(f.filename, 'md5sum', f.md5sum, ext_md5sum)
190
191         if ext_sha1sum != f.sha1sum:
192             raise RejectStupidMaintainerException(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
193
194         if ext_sha256sum != f.sha256sum:
195             raise RejectStupidMaintainerException(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
196
197     def check(self, upload):
198         cnf = Config()
199
200         if not cnf.use_extfiles:
201             return
202
203         session = upload.session
204         changes = upload.changes
205
206         for f in changes.files.itervalues():
207             self.check_single(session, f)
208         source = changes.source
209         if source is not None:
210             for f in source.files.itervalues():
211                 self.check_single(session, f)
212
213 class BinaryCheck(Check):
214     """Check binary packages for syntax errors."""
215     def check(self, upload):
216         for binary in upload.changes.binaries:
217             self.check_binary(upload, binary)
218
219         binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
220         for bn in binary_names:
221             if bn not in upload.changes.binary_names:
222                 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
223
224         return True
225
226     def check_binary(self, upload, binary):
227         fn = binary.hashed_file.filename
228         control = binary.control
229
230         for field in ('Package', 'Architecture', 'Version', 'Description'):
231             if field not in control:
232                 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
233
234         # check fields
235
236         package = control['Package']
237         if not re_field_package.match(package):
238             raise Reject('{0}: Invalid Package field'.format(fn))
239
240         version = control['Version']
241         version_match = re_field_version.match(version)
242         if not version_match:
243             raise Reject('{0}: Invalid Version field'.format(fn))
244         version_without_epoch = version_match.group('without_epoch')
245
246         architecture = control['Architecture']
247         if architecture not in upload.changes.architectures:
248             raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
249         if architecture == 'source':
250             raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
251
252         source = control.get('Source')
253         if source is not None and not re_field_source.match(source):
254             raise Reject('{0}: Invalid Source field'.format(fn))
255
256         # check filename
257
258         match = re_file_binary.match(fn)
259         if package != match.group('package'):
260             raise Reject('{0}: filename does not match Package field'.format(fn))
261         if version_without_epoch != match.group('version'):
262             raise Reject('{0}: filename does not match Version field'.format(fn))
263         if architecture != match.group('architecture'):
264             raise Reject('{0}: filename does not match Architecture field'.format(fn))
265
266         # check dependency field syntax
267
268         for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
269                       'Provides', 'Recommends', 'Replaces', 'Suggests'):
270             value = control.get(field)
271             if value is not None:
272                 if value.strip() == '':
273                     raise Reject('{0}: empty {1} field'.format(fn, field))
274                 try:
275                     apt_pkg.parse_depends(value)
276                 except:
277                     raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
278
279         for field in ('Built-Using',):
280             value = control.get(field)
281             if value is not None:
282                 if value.strip() == '':
283                     raise Reject('{0}: empty {1} field'.format(fn, field))
284                 try:
285                     apt_pkg.parse_src_depends(value)
286                 except:
287                     raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
288
289 class BinaryTimestampCheck(Check):
290     """check timestamps of files in binary packages
291
292     Files in the near future cause ugly warnings and extreme time travel
293     can cause errors on extraction.
294     """
295     def check(self, upload):
296         cnf = Config()
297         future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
298         past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1984'), '%Y'))
299
300         class TarTime(object):
301             def __init__(self):
302                 self.future_files = dict()
303                 self.past_files = dict()
304             def callback(self, member, data):
305                 if member.mtime > future_cutoff:
306                     future_files[member.name] = member.mtime
307                 elif member.mtime < past_cutoff:
308                     past_files[member.name] = member.mtime
309
310         def format_reason(filename, direction, files):
311             reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
312             for fn, ts in files.iteritems():
313                 reason += "  {0} ({1})".format(fn, time.ctime(ts))
314             return reason
315
316         for binary in upload.changes.binaries:
317             filename = binary.hashed_file.filename
318             path = os.path.join(upload.directory, filename)
319             deb = apt_inst.DebFile(path)
320             tar = TarTime()
321             deb.control.go(tar.callback)
322             if tar.future_files:
323                 raise Reject(format_reason(filename, 'future', tar.future_files))
324             if tar.past_files:
325                 raise Reject(format_reason(filename, 'past', tar.past_files))
326
327 class SourceCheck(Check):
328     """Check source package for syntax errors."""
329     def check_filename(self, control, filename, regex):
330         # In case we have an .orig.tar.*, we have to strip the Debian revison
331         # from the version number. So handle this special case first.
332         is_orig = True
333         match = re_file_orig.match(filename)
334         if not match:
335             is_orig = False
336             match = regex.match(filename)
337
338         if not match:
339             raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
340         if match.group('package') != control['Source']:
341             raise Reject('{0}: filename does not match Source field'.format(filename))
342
343         version = control['Version']
344         if is_orig:
345             version = re_field_version_upstream.match(version).group('upstream')
346         version_match =  re_field_version.match(version)
347         version_without_epoch = version_match.group('without_epoch')
348         if match.group('version') != version_without_epoch:
349             raise Reject('{0}: filename does not match Version field'.format(filename))
350
351     def check(self, upload):
352         if upload.changes.source is None:
353             return True
354
355         changes = upload.changes.changes
356         source = upload.changes.source
357         control = source.dsc
358         dsc_fn = source._dsc_file.filename
359
360         # check fields
361         if not re_field_package.match(control['Source']):
362             raise Reject('{0}: Invalid Source field'.format(dsc_fn))
363         if control['Source'] != changes['Source']:
364             raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
365         if control['Version'] != changes['Version']:
366             raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
367
368         # check filenames
369         self.check_filename(control, dsc_fn, re_file_dsc)
370         for f in source.files.itervalues():
371             self.check_filename(control, f.filename, re_file_source)
372
373         # check dependency field syntax
374         for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
375             value = control.get(field)
376             if value is not None:
377                 if value.strip() == '':
378                     raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
379                 try:
380                     apt_pkg.parse_src_depends(value)
381                 except Exception as e:
382                     raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
383
384         rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
385         if len(rejects) > 0:
386             raise Reject("\n".join(rejects))
387
388         return True
389
390 class SingleDistributionCheck(Check):
391     """Check that the .changes targets only a single distribution."""
392     def check(self, upload):
393         if len(upload.changes.distributions) != 1:
394             raise Reject("Only uploads to a single distribution are allowed.")
395
396 class ACLCheck(Check):
397     """Check the uploader is allowed to upload the packages in .changes"""
398
399     def _does_hijack(self, session, upload, suite):
400         # Try to catch hijacks.
401         # This doesn't work correctly. Uploads to experimental can still
402         # "hijack" binaries from unstable. Also one can hijack packages
403         # via buildds (but people who try this should not be DMs).
404         for binary_name in upload.changes.binary_names:
405             binaries = session.query(DBBinary).join(DBBinary.source) \
406                 .filter(DBBinary.suites.contains(suite)) \
407                 .filter(DBBinary.package == binary_name)
408             for binary in binaries:
409                 if binary.source.source != upload.changes.changes['Source']:
410                     return True, binary, binary.source.source
411         return False, None, None
412
413     def _check_acl(self, session, upload, acl):
414         source_name = upload.changes.source_name
415
416         if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
417             return None, None
418         if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
419             return None, None
420
421         if not acl.allow_new:
422             if upload.new:
423                 return False, "NEW uploads are not allowed"
424             for f in upload.changes.files.itervalues():
425                 if f.section == 'byhand' or f.section.startswith("raw-"):
426                     return False, "BYHAND uploads are not allowed"
427         if not acl.allow_source and upload.changes.source is not None:
428             return False, "sourceful uploads are not allowed"
429         binaries = upload.changes.binaries
430         if len(binaries) != 0:
431             if not acl.allow_binary:
432                 return False, "binary uploads are not allowed"
433             if upload.changes.source is None and not acl.allow_binary_only:
434                 return False, "binary-only uploads are not allowed"
435             if not acl.allow_binary_all:
436                 uploaded_arches = set(upload.changes.architectures)
437                 uploaded_arches.discard('source')
438                 allowed_arches = set(a.arch_string for a in acl.architectures)
439                 forbidden_arches = uploaded_arches - allowed_arches
440                 if len(forbidden_arches) != 0:
441                     return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
442         if not acl.allow_hijack:
443             for suite in upload.final_suites:
444                 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
445                 if does_hijack:
446                     return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
447
448         acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
449         if acl.allow_per_source:
450             # XXX: Drop DMUA part here and switch to new implementation.
451             # XXX: Send warning mail once users can set the new DMUA flag
452             dmua_status, dmua_reason = self._check_dmua(upload)
453             if not dmua_status:
454                 return False, dmua_reason
455             #if acl_per_source is None:
456             #    return False, "not allowed to upload source package '{0}'".format(source_name)
457         if acl.deny_per_source and acl_per_source is not None:
458             return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
459
460         return True, None
461
462     def _check_dmua(self, upload):
463         # This code is not very nice, but hopefully works until we can replace
464         # DM-Upload-Allowed, cf. https://lists.debian.org/debian-project/2012/06/msg00029.html
465         session = upload.session
466
467         # Check DM-Upload-Allowed
468         suites = upload.final_suites
469         assert len(suites) == 1
470         suite = list(suites)[0]
471
472         last_suites = ['unstable', 'experimental']
473         if suite.suite_name.endswith('-backports'):
474             last_suites = [suite.suite_name]
475         last = session.query(DBSource).filter_by(source=upload.changes.changes['Source']) \
476             .join(DBSource.suites).filter(Suite.suite_name.in_(last_suites)) \
477             .order_by(DBSource.version.desc()).limit(1).first()
478         if last is None:
479             return False, 'No existing source found in {0}'.format(' or '.join(last_suites))
480         if not last.dm_upload_allowed:
481             return False, 'DM-Upload-Allowed is not set in {0}={1}'.format(last.source, last.version)
482
483         # check current Changed-by is in last Maintainer or Uploaders
484         uploader_names = [ u.name for u in last.uploaders ]
485         changed_by_field = upload.changes.changes.get('Changed-By', upload.changes.changes['Maintainer'])
486         if changed_by_field not in uploader_names:
487             return False, '{0} is not an uploader for {1}={2}'.format(changed_by_field, last.source, last.version)
488
489         # check Changed-by is the DM
490         changed_by = fix_maintainer(changed_by_field)
491         uid = upload.fingerprint.uid
492         if uid is None:
493             return False, 'Unknown uid for fingerprint {0}'.format(upload.fingerprint.fingerprint)
494         if uid.uid != changed_by[3] and uid.name != changed_by[2]:
495             return False, 'DMs are not allowed to sponsor uploads (expected {0} <{1}> as maintainer, but got {2})'.format(uid.name, uid.uid, changed_by_field)
496
497         return True, None
498
499     def check(self, upload):
500         session = upload.session
501         fingerprint = upload.fingerprint
502         keyring = fingerprint.keyring
503
504         if keyring is None:
505             raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
506         if not keyring.active:
507             raise Reject('Keyring {0} is not active'.format(keyring.name))
508
509         acl = fingerprint.acl or keyring.acl
510         if acl is None:
511             raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
512         result, reason = self._check_acl(session, upload, acl)
513         if not result:
514             raise Reject(reason)
515
516         for acl in session.query(ACL).filter_by(is_global=True):
517             result, reason = self._check_acl(session, upload, acl)
518             if result == False:
519                 raise Reject(reason)
520
521         return True
522
523     def per_suite_check(self, upload, suite):
524         acls = suite.acls
525         if len(acls) != 0:
526             accept = False
527             for acl in acls:
528                 result, reason = self._check_acl(upload.session, upload, acl)
529                 if result == False:
530                     raise Reject(reason)
531                 accept = accept or result
532             if not accept:
533                 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
534         return True
535
536 class TransitionCheck(Check):
537     """check for a transition"""
538     def check(self, upload):
539         if 'source' not in upload.changes.architectures:
540             return True
541
542         transitions = self.get_transitions()
543         if transitions is None:
544             return True
545
546         control = upload.changes.changes
547         source = re_field_source.match(control['Source']).group('package')
548
549         for trans in transitions:
550             t = transitions[trans]
551             source = t["source"]
552             expected = t["new"]
553
554             # Will be None if nothing is in testing.
555             current = get_source_in_suite(source, "testing", session)
556             if current is not None:
557                 compare = apt_pkg.version_compare(current.version, expected)
558
559             if current is None or compare < 0:
560                 # This is still valid, the current version in testing is older than
561                 # the new version we wait for, or there is none in testing yet
562
563                 # Check if the source we look at is affected by this.
564                 if source in t['packages']:
565                     # The source is affected, lets reject it.
566
567                     rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
568
569                     if current is not None:
570                         currentlymsg = "at version {0}".format(current.version)
571                     else:
572                         currentlymsg = "not present in testing"
573
574                     rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
575
576                     rejectmsg += "\n".join(textwrap.wrap("""Your package
577 is part of a testing transition designed to get {0} migrated (it is
578 currently {1}, we need version {2}).  This transition is managed by the
579 Release Team, and {3} is the Release-Team member responsible for it.
580 Please mail debian-release@lists.debian.org or contact {3} directly if you
581 need further assistance.  You might want to upload to experimental until this
582 transition is done.""".format(source, currentlymsg, expected,t["rm"])))
583
584                     raise Reject(rejectmsg)
585
586         return True
587
588     def get_transitions(self):
589         cnf = Config()
590         path = cnf.get('Dinstall::ReleaseTransitions', '')
591         if path == '' or not os.path.exists(path):
592             return None
593
594         contents = file(path, 'r').read()
595         try:
596             transitions = yaml.load(contents)
597             return transitions
598         except yaml.YAMLError as msg:
599             utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
600
601         return None
602
603 class NoSourceOnlyCheck(Check):
604     """Check for source-only upload
605
606     Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
607     set. Otherwise they are rejected.
608     """
609     def check(self, upload):
610         if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
611             return True
612         changes = upload.changes
613         if changes.source is not None and len(changes.binaries) == 0:
614             raise Reject('Source-only uploads are not allowed.')
615         return True
616
617 class LintianCheck(Check):
618     """Check package using lintian"""
619     def check(self, upload):
620         changes = upload.changes
621
622         # Only check sourceful uploads.
623         if changes.source is None:
624             return True
625         # Only check uploads to unstable or experimental.
626         if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
627             return True
628
629         cnf = Config()
630         if 'Dinstall::LintianTags' not in cnf:
631             return True
632         tagfile = cnf['Dinstall::LintianTags']
633
634         with open(tagfile, 'r') as sourcefile:
635             sourcecontent = sourcefile.read()
636         try:
637             lintiantags = yaml.load(sourcecontent)['lintian']
638         except yaml.YAMLError as msg:
639             raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
640
641         fd, temp_filename = utils.temp_filename(mode=0o644)
642         temptagfile = os.fdopen(fd, 'w')
643         for tags in lintiantags.itervalues():
644             for tag in tags:
645                 print >>temptagfile, tag
646         temptagfile.close()
647
648         changespath = os.path.join(upload.directory, changes.filename)
649         try:
650             if cnf.unprivgroup:
651                 cmd = "sudo -H -u {0} -- /usr/bin/lintian --show-overrides --tags-from-file {1} {2}".format(cnf.unprivgroup, temp_filename, changespath)
652             else:
653                 cmd = "/usr/bin/lintian --show-overrides --tags-from-file {0} {1}".format(temp_filename, changespath)
654             result, output = commands.getstatusoutput(cmd)
655         finally:
656             os.unlink(temp_filename)
657
658         if result == 2:
659             utils.warn("lintian failed for %s [return code: %s]." % \
660                 (changespath, result))
661             utils.warn(utils.prefix_multi_line_string(output, \
662                 " [possible output:] "))
663
664         parsed_tags = lintian.parse_lintian_output(output)
665         rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
666         if len(rejects) != 0:
667             raise Reject('\n'.join(rejects))
668
669         return True
670
671 class SourceFormatCheck(Check):
672     """Check source format is allowed in the target suite"""
673     def per_suite_check(self, upload, suite):
674         source = upload.changes.source
675         session = upload.session
676         if source is None:
677             return True
678
679         source_format = source.dsc['Format']
680         query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
681         if query.first() is None:
682             raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
683
684 class SuiteArchitectureCheck(Check):
685     def per_suite_check(self, upload, suite):
686         session = upload.session
687         for arch in upload.changes.architectures:
688             query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
689             if query.first() is None:
690                 raise Reject('Architecture {0} is not allowed in suite {2}'.format(arch, suite.suite_name))
691
692         return True
693
694 class VersionCheck(Check):
695     """Check version constraints"""
696     def _highest_source_version(self, session, source_name, suite):
697         db_source = session.query(DBSource).filter_by(source=source_name) \
698             .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
699         if db_source is None:
700             return None
701         else:
702             return db_source.version
703
704     def _highest_binary_version(self, session, binary_name, suite, architecture):
705         db_binary = session.query(DBBinary).filter_by(package=binary_name) \
706             .filter(DBBinary.suites.contains(suite)) \
707             .join(DBBinary.architecture) \
708             .filter(Architecture.arch_string.in_(['all', architecture])) \
709             .order_by(DBBinary.version.desc()).first()
710         if db_binary is None:
711             return None
712         else:
713             return db_binary.version
714
715     def _version_checks(self, upload, suite, op):
716         session = upload.session
717
718         if upload.changes.source is not None:
719             source_name = upload.changes.source.dsc['Source']
720             source_version = upload.changes.source.dsc['Version']
721             v = self._highest_source_version(session, source_name, suite)
722             if v is not None and not op(version_compare(source_version, v)):
723                 raise Reject('Version check failed (source={0}, version={1}, other-version={2}, suite={3})'.format(source_name, source_version, v, suite.suite_name))
724
725         for binary in upload.changes.binaries:
726             binary_name = binary.control['Package']
727             binary_version = binary.control['Version']
728             architecture = binary.control['Architecture']
729             v = self._highest_binary_version(session, binary_name, suite, architecture)
730             if v is not None and not op(version_compare(binary_version, v)):
731                 raise Reject('Version check failed (binary={0}, version={1}, other-version={2}, suite={3})'.format(binary_name, binary_version, v, suite.suite_name))
732
733     def per_suite_check(self, upload, suite):
734         session = upload.session
735
736         vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
737             .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
738         must_be_newer_than = [ vc.reference for vc in vc_newer ]
739         # Must be newer than old versions in `suite`
740         must_be_newer_than.append(suite)
741
742         for s in must_be_newer_than:
743             self._version_checks(upload, s, lambda result: result > 0)
744
745         vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
746         must_be_older_than = [ vc.reference for vc in vc_older ]
747
748         for s in must_be_older_than:
749             self._version_checks(upload, s, lambda result: result < 0)
750
751         return True
752
753     @property
754     def forcable(self):
755         return True