]> git.decadent.org.uk Git - dak.git/blob - daklib/checks.py
Merge branch 'new-dm'
[dak.git] / daklib / checks.py
1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
2 #
3 # Parts based on code that is
4 # Copyright (C) 2001-2006, James Troup <james@nocrew.org>
5 # Copyright (C) 2009-2010, Joerg Jaspert <joerg@debian.org>
6 #
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
11 #
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 # GNU General Public License for more details.
16 #
17 # You should have received a copy of the GNU General Public License along
18 # with this program; if not, write to the Free Software Foundation, Inc.,
19 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
20
21 """module provided pre-acceptance tests
22
23 Please read the documentation for the L{Check} class for the interface.
24 """
25
26 from daklib.config import Config
27 from daklib.dbconn import *
28 import daklib.dbconn as dbconn
29 from daklib.regexes import *
30 from daklib.textutils import fix_maintainer, ParseMaintError
31 import daklib.lintian as lintian
32 import daklib.utils as utils
33 from daklib.upload import InvalidHashException
34
35 import apt_inst
36 import apt_pkg
37 from apt_pkg import version_compare
38 import errno
39 import os
40 import time
41 import yaml
42
43 # TODO: replace by subprocess
44 import commands
45
46 class Reject(Exception):
47     """exception raised by failing checks"""
48     pass
49
50 class RejectStupidMaintainerException(Exception):
51     """exception raised by failing the external hashes check"""
52
53     def __str__(self):
54         return "'%s' has mismatching %s from the external files db ('%s' [current] vs '%s' [external])" % self.args[:4]
55
56 class Check(object):
57     """base class for checks
58
59     checks are called by L{daklib.archive.ArchiveUpload}. Failing tests should
60     raise a L{daklib.checks.Reject} exception including a human-readable
61     description why the upload should be rejected.
62     """
63     def check(self, upload):
64         """do checks
65
66         @type  upload: L{daklib.archive.ArchiveUpload}
67         @param upload: upload to check
68
69         @raise daklib.checks.Reject: upload should be rejected
70         """
71         raise NotImplemented
72     def per_suite_check(self, upload, suite):
73         """do per-suite checks
74
75         @type  upload: L{daklib.archive.ArchiveUpload}
76         @param upload: upload to check
77
78         @type  suite: L{daklib.dbconn.Suite}
79         @param suite: suite to check
80
81         @raise daklib.checks.Reject: upload should be rejected
82         """
83         raise NotImplemented
84     @property
85     def forcable(self):
86         """allow to force ignore failing test
87
88         C{True} if it is acceptable to force ignoring a failing test,
89         C{False} otherwise
90         """
91         return False
92
93 class SignatureCheck(Check):
94     """Check signature of changes and dsc file (if included in upload)
95
96     Make sure the signature is valid and done by a known user.
97     """
98     def check(self, upload):
99         changes = upload.changes
100         if not changes.valid_signature:
101             raise Reject("Signature for .changes not valid.")
102         if changes.source is not None:
103             if not changes.source.valid_signature:
104                 raise Reject("Signature for .dsc not valid.")
105             if changes.source.primary_fingerprint != changes.primary_fingerprint:
106                 raise Reject(".changes and .dsc not signed by the same key.")
107         if upload.fingerprint is None or upload.fingerprint.uid is None:
108             raise Reject(".changes signed by unknown key.")
109
110 class ChangesCheck(Check):
111     """Check changes file for syntax errors."""
112     def check(self, upload):
113         changes = upload.changes
114         control = changes.changes
115         fn = changes.filename
116
117         for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
118             if field not in control:
119                 raise Reject('{0}: misses mandatory field {1}'.format(fn, field))
120
121         source_match = re_field_source.match(control['Source'])
122         if not source_match:
123             raise Reject('{0}: Invalid Source field'.format(fn))
124         version_match = re_field_version.match(control['Version'])
125         if not version_match:
126             raise Reject('{0}: Invalid Version field'.format(fn))
127         version_without_epoch = version_match.group('without_epoch')
128
129         match = re_file_changes.match(fn)
130         if not match:
131             raise Reject('{0}: Does not match re_file_changes'.format(fn))
132         if match.group('package') != source_match.group('package'):
133             raise Reject('{0}: Filename does not match Source field'.format(fn))
134         if match.group('version') != version_without_epoch:
135             raise Reject('{0}: Filename does not match Version field'.format(fn))
136
137         for bn in changes.binary_names:
138             if not re_field_package.match(bn):
139                 raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))
140
141         if 'source' in changes.architectures and changes.source is None:
142             raise Reject("Changes has architecture source, but no source found.")
143         if changes.source is not None and 'source' not in changes.architectures:
144             raise Reject("Upload includes source, but changes does not say so.")
145
146         try:
147             fix_maintainer(changes.changes['Maintainer'])
148         except ParseMaintError as e:
149             raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))
150
151         try:
152             changed_by = changes.changes.get('Changed-By')
153             if changed_by is not None:
154                 fix_maintainer(changed_by)
155         except ParseMaintError as e:
156             raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))
157
158         if len(changes.files) == 0:
159             raise Reject("Changes includes no files.")
160
161         for bugnum in changes.closed_bugs:
162             if not re_isanum.match(bugnum):
163                 raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))
164
165         return True
166
167 class HashesCheck(Check):
168     """Check hashes in .changes and .dsc are valid."""
169     def check(self, upload):
170         what = None
171         try:
172             changes = upload.changes
173             what = changes.filename
174             for f in changes.files.itervalues():
175                 f.check(upload.directory)
176             source = changes.source
177             if source is not None:
178                 what = source.filename
179                 for f in source.files.itervalues():
180                     f.check(upload.directory)
181         except IOError as e:
182             if e.errno == errno.ENOENT:
183                 raise Reject('{0} refers to non-existing file: {1}\n'
184                              'Perhaps you need to include it in your upload?'
185                              .format(what, os.path.basename(e.filename)))
186             raise
187         except InvalidHashException as e:
188             raise Reject('{0}: {1}'.format(what, unicode(e)))
189
190 class ExternalHashesCheck(Check):
191     """Checks hashes in .changes and .dsc against an external database."""
192     def check_single(self, session, f):
193         q = session.execute("SELECT size, md5sum, sha1sum, sha256sum FROM external_files WHERE filename LIKE '%%/%s'" % f.filename)
194         (ext_size, ext_md5sum, ext_sha1sum, ext_sha256sum) = q.fetchone() or (None, None, None, None)
195
196         if not ext_size:
197             return
198
199         if ext_size != f.size:
200             raise RejectStupidMaintainerException(f.filename, 'size', f.size, ext_size)
201
202         if ext_md5sum != f.md5sum:
203             raise RejectStupidMaintainerException(f.filename, 'md5sum', f.md5sum, ext_md5sum)
204
205         if ext_sha1sum != f.sha1sum:
206             raise RejectStupidMaintainerException(f.filename, 'sha1sum', f.sha1sum, ext_sha1sum)
207
208         if ext_sha256sum != f.sha256sum:
209             raise RejectStupidMaintainerException(f.filename, 'sha256sum', f.sha256sum, ext_sha256sum)
210
211     def check(self, upload):
212         cnf = Config()
213
214         if not cnf.use_extfiles:
215             return
216
217         session = upload.session
218         changes = upload.changes
219
220         for f in changes.files.itervalues():
221             self.check_single(session, f)
222         source = changes.source
223         if source is not None:
224             for f in source.files.itervalues():
225                 self.check_single(session, f)
226
227 class BinaryCheck(Check):
228     """Check binary packages for syntax errors."""
229     def check(self, upload):
230         for binary in upload.changes.binaries:
231             self.check_binary(upload, binary)
232
233         binary_names = set([ binary.control['Package'] for binary in upload.changes.binaries ])
234         for bn in binary_names:
235             if bn not in upload.changes.binary_names:
236                 raise Reject('Package {0} is not mentioned in Binary field in changes'.format(bn))
237
238         return True
239
240     def check_binary(self, upload, binary):
241         fn = binary.hashed_file.filename
242         control = binary.control
243
244         for field in ('Package', 'Architecture', 'Version', 'Description'):
245             if field not in control:
246                 raise Reject('{0}: Missing mandatory field {0}.'.format(fn, field))
247
248         # check fields
249
250         package = control['Package']
251         if not re_field_package.match(package):
252             raise Reject('{0}: Invalid Package field'.format(fn))
253
254         version = control['Version']
255         version_match = re_field_version.match(version)
256         if not version_match:
257             raise Reject('{0}: Invalid Version field'.format(fn))
258         version_without_epoch = version_match.group('without_epoch')
259
260         architecture = control['Architecture']
261         if architecture not in upload.changes.architectures:
262             raise Reject('{0}: Architecture not in Architecture field in changes file'.format(fn))
263         if architecture == 'source':
264             raise Reject('{0}: Architecture "source" invalid for binary packages'.format(fn))
265
266         source = control.get('Source')
267         if source is not None and not re_field_source.match(source):
268             raise Reject('{0}: Invalid Source field'.format(fn))
269
270         # check filename
271
272         match = re_file_binary.match(fn)
273         if package != match.group('package'):
274             raise Reject('{0}: filename does not match Package field'.format(fn))
275         if version_without_epoch != match.group('version'):
276             raise Reject('{0}: filename does not match Version field'.format(fn))
277         if architecture != match.group('architecture'):
278             raise Reject('{0}: filename does not match Architecture field'.format(fn))
279
280         # check dependency field syntax
281
282         for field in ('Breaks', 'Conflicts', 'Depends', 'Enhances', 'Pre-Depends',
283                       'Provides', 'Recommends', 'Replaces', 'Suggests'):
284             value = control.get(field)
285             if value is not None:
286                 if value.strip() == '':
287                     raise Reject('{0}: empty {1} field'.format(fn, field))
288                 try:
289                     apt_pkg.parse_depends(value)
290                 except:
291                     raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
292
293         for field in ('Built-Using',):
294             value = control.get(field)
295             if value is not None:
296                 if value.strip() == '':
297                     raise Reject('{0}: empty {1} field'.format(fn, field))
298                 try:
299                     apt_pkg.parse_src_depends(value)
300                 except:
301                     raise Reject('{0}: APT could not parse {1} field'.format(fn, field))
302
303 class BinaryTimestampCheck(Check):
304     """check timestamps of files in binary packages
305
306     Files in the near future cause ugly warnings and extreme time travel
307     can cause errors on extraction.
308     """
309     def check(self, upload):
310         cnf = Config()
311         future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
312         past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1984'), '%Y'))
313
314         class TarTime(object):
315             def __init__(self):
316                 self.future_files = dict()
317                 self.past_files = dict()
318             def callback(self, member, data):
319                 if member.mtime > future_cutoff:
320                     future_files[member.name] = member.mtime
321                 elif member.mtime < past_cutoff:
322                     past_files[member.name] = member.mtime
323
324         def format_reason(filename, direction, files):
325             reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
326             for fn, ts in files.iteritems():
327                 reason += "  {0} ({1})".format(fn, time.ctime(ts))
328             return reason
329
330         for binary in upload.changes.binaries:
331             filename = binary.hashed_file.filename
332             path = os.path.join(upload.directory, filename)
333             deb = apt_inst.DebFile(path)
334             tar = TarTime()
335             deb.control.go(tar.callback)
336             if tar.future_files:
337                 raise Reject(format_reason(filename, 'future', tar.future_files))
338             if tar.past_files:
339                 raise Reject(format_reason(filename, 'past', tar.past_files))
340
341 class SourceCheck(Check):
342     """Check source package for syntax errors."""
343     def check_filename(self, control, filename, regex):
344         # In case we have an .orig.tar.*, we have to strip the Debian revison
345         # from the version number. So handle this special case first.
346         is_orig = True
347         match = re_file_orig.match(filename)
348         if not match:
349             is_orig = False
350             match = regex.match(filename)
351
352         if not match:
353             raise Reject('{0}: does not match regular expression for source filenames'.format(filename))
354         if match.group('package') != control['Source']:
355             raise Reject('{0}: filename does not match Source field'.format(filename))
356
357         version = control['Version']
358         if is_orig:
359             version = re_field_version_upstream.match(version).group('upstream')
360         version_match =  re_field_version.match(version)
361         version_without_epoch = version_match.group('without_epoch')
362         if match.group('version') != version_without_epoch:
363             raise Reject('{0}: filename does not match Version field'.format(filename))
364
365     def check(self, upload):
366         if upload.changes.source is None:
367             return True
368
369         changes = upload.changes.changes
370         source = upload.changes.source
371         control = source.dsc
372         dsc_fn = source._dsc_file.filename
373
374         # check fields
375         if not re_field_package.match(control['Source']):
376             raise Reject('{0}: Invalid Source field'.format(dsc_fn))
377         if control['Source'] != changes['Source']:
378             raise Reject('{0}: Source field does not match Source field in changes'.format(dsc_fn))
379         if control['Version'] != changes['Version']:
380             raise Reject('{0}: Version field does not match Version field in changes'.format(dsc_fn))
381
382         # check filenames
383         self.check_filename(control, dsc_fn, re_file_dsc)
384         for f in source.files.itervalues():
385             self.check_filename(control, f.filename, re_file_source)
386
387         # check dependency field syntax
388         for field in ('Build-Conflicts', 'Build-Conflicts-Indep', 'Build-Depends', 'Build-Depends-Arch', 'Build-Depends-Indep'):
389             value = control.get(field)
390             if value is not None:
391                 if value.strip() == '':
392                     raise Reject('{0}: empty {1} field'.format(dsc_fn, field))
393                 try:
394                     apt_pkg.parse_src_depends(value)
395                 except Exception as e:
396                     raise Reject('{0}: APT could not parse {1} field: {2}'.format(dsc_fn, field, e))
397
398         rejects = utils.check_dsc_files(dsc_fn, control, source.files.keys())
399         if len(rejects) > 0:
400             raise Reject("\n".join(rejects))
401
402         return True
403
404 class SingleDistributionCheck(Check):
405     """Check that the .changes targets only a single distribution."""
406     def check(self, upload):
407         if len(upload.changes.distributions) != 1:
408             raise Reject("Only uploads to a single distribution are allowed.")
409
410 class ACLCheck(Check):
411     """Check the uploader is allowed to upload the packages in .changes"""
412
413     def _does_hijack(self, session, upload, suite):
414         # Try to catch hijacks.
415         # This doesn't work correctly. Uploads to experimental can still
416         # "hijack" binaries from unstable. Also one can hijack packages
417         # via buildds (but people who try this should not be DMs).
418         for binary_name in upload.changes.binary_names:
419             binaries = session.query(DBBinary).join(DBBinary.source) \
420                 .filter(DBBinary.suites.contains(suite)) \
421                 .filter(DBBinary.package == binary_name)
422             for binary in binaries:
423                 if binary.source.source != upload.changes.changes['Source']:
424                     return True, binary, binary.source.source
425         return False, None, None
426
427     def _check_acl(self, session, upload, acl):
428         source_name = upload.changes.source_name
429
430         if acl.match_fingerprint and upload.fingerprint not in acl.fingerprints:
431             return None, None
432         if acl.match_keyring is not None and upload.fingerprint.keyring != acl.match_keyring:
433             return None, None
434
435         if not acl.allow_new:
436             if upload.new:
437                 return False, "NEW uploads are not allowed"
438             for f in upload.changes.files.itervalues():
439                 if f.section == 'byhand' or f.section.startswith("raw-"):
440                     return False, "BYHAND uploads are not allowed"
441         if not acl.allow_source and upload.changes.source is not None:
442             return False, "sourceful uploads are not allowed"
443         binaries = upload.changes.binaries
444         if len(binaries) != 0:
445             if not acl.allow_binary:
446                 return False, "binary uploads are not allowed"
447             if upload.changes.source is None and not acl.allow_binary_only:
448                 return False, "binary-only uploads are not allowed"
449             if not acl.allow_binary_all:
450                 uploaded_arches = set(upload.changes.architectures)
451                 uploaded_arches.discard('source')
452                 allowed_arches = set(a.arch_string for a in acl.architectures)
453                 forbidden_arches = uploaded_arches - allowed_arches
454                 if len(forbidden_arches) != 0:
455                     return False, "uploads for architecture(s) {0} are not allowed".format(", ".join(forbidden_arches))
456         if not acl.allow_hijack:
457             for suite in upload.final_suites:
458                 does_hijack, hijacked_binary, hijacked_from = self._does_hijack(session, upload, suite)
459                 if does_hijack:
460                     return False, "hijacks are not allowed (binary={0}, other-source={1})".format(hijacked_binary, hijacked_from)
461
462         acl_per_source = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=upload.fingerprint, source=source_name).first()
463         if acl.allow_per_source:
464             # XXX: Drop DMUA part here and switch to new implementation.
465             # XXX: Send warning mail once users can set the new DMUA flag
466             dmua_status, dmua_reason = self._check_dmua(upload)
467             if acl_per_source is None:
468                 if not dmua_status:
469                     return False, dmua_reason
470                 else:
471                     upload.warn('DM flag not set, but accepted as DMUA was set.')
472             #if acl_per_source is None:
473             #    return False, "not allowed to upload source package '{0}'".format(source_name)
474         if acl.deny_per_source and acl_per_source is not None:
475             return False, acl_per_source.reason or "forbidden to upload source package '{0}'".format(source_name)
476
477         return True, None
478
479     def _check_dmua(self, upload):
480         # This code is not very nice, but hopefully works until we can replace
481         # DM-Upload-Allowed, cf. https://lists.debian.org/debian-project/2012/06/msg00029.html
482         session = upload.session
483
484         # Check DM-Upload-Allowed
485         suites = upload.final_suites
486         assert len(suites) == 1
487         suite = list(suites)[0]
488
489         last_suites = ['unstable', 'experimental']
490         if suite.suite_name.endswith('-backports'):
491             last_suites = [suite.suite_name]
492         last = session.query(DBSource).filter_by(source=upload.changes.changes['Source']) \
493             .join(DBSource.suites).filter(Suite.suite_name.in_(last_suites)) \
494             .order_by(DBSource.version.desc()).limit(1).first()
495         if last is None:
496             return False, 'No existing source found in {0}'.format(' or '.join(last_suites))
497         if not last.dm_upload_allowed:
498             return False, 'DM-Upload-Allowed is not set in {0}={1}'.format(last.source, last.version)
499
500         # check current Changed-by is in last Maintainer or Uploaders
501         uploader_names = [ u.name for u in last.uploaders ]
502         changed_by_field = upload.changes.changes.get('Changed-By', upload.changes.changes['Maintainer'])
503         if changed_by_field not in uploader_names:
504             return False, '{0} is not an uploader for {1}={2}'.format(changed_by_field, last.source, last.version)
505
506         # check Changed-by is the DM
507         changed_by = fix_maintainer(changed_by_field)
508         uid = upload.fingerprint.uid
509         if uid is None:
510             return False, 'Unknown uid for fingerprint {0}'.format(upload.fingerprint.fingerprint)
511         if uid.uid != changed_by[3] and uid.name != changed_by[2]:
512             return False, 'DMs are not allowed to sponsor uploads (expected {0} <{1}> as maintainer, but got {2})'.format(uid.name, uid.uid, changed_by_field)
513
514         return True, None
515
516     def check(self, upload):
517         session = upload.session
518         fingerprint = upload.fingerprint
519         keyring = fingerprint.keyring
520
521         if keyring is None:
522             raise Reject('No keyring for fingerprint {0}'.format(fingerprint.fingerprint))
523         if not keyring.active:
524             raise Reject('Keyring {0} is not active'.format(keyring.name))
525
526         acl = fingerprint.acl or keyring.acl
527         if acl is None:
528             raise Reject('No ACL for fingerprint {0}'.format(fingerprint.fingerprint))
529         result, reason = self._check_acl(session, upload, acl)
530         if not result:
531             raise Reject(reason)
532
533         for acl in session.query(ACL).filter_by(is_global=True):
534             result, reason = self._check_acl(session, upload, acl)
535             if result == False:
536                 raise Reject(reason)
537
538         return True
539
540     def per_suite_check(self, upload, suite):
541         acls = suite.acls
542         if len(acls) != 0:
543             accept = False
544             for acl in acls:
545                 result, reason = self._check_acl(upload.session, upload, acl)
546                 if result == False:
547                     raise Reject(reason)
548                 accept = accept or result
549             if not accept:
550                 raise Reject('Not accepted by any per-suite acl (suite={0})'.format(suite.suite_name))
551         return True
552
553 class TransitionCheck(Check):
554     """check for a transition"""
555     def check(self, upload):
556         if 'source' not in upload.changes.architectures:
557             return True
558
559         transitions = self.get_transitions()
560         if transitions is None:
561             return True
562
563         control = upload.changes.changes
564         source = re_field_source.match(control['Source']).group('package')
565
566         for trans in transitions:
567             t = transitions[trans]
568             source = t["source"]
569             expected = t["new"]
570
571             # Will be None if nothing is in testing.
572             current = get_source_in_suite(source, "testing", session)
573             if current is not None:
574                 compare = apt_pkg.version_compare(current.version, expected)
575
576             if current is None or compare < 0:
577                 # This is still valid, the current version in testing is older than
578                 # the new version we wait for, or there is none in testing yet
579
580                 # Check if the source we look at is affected by this.
581                 if source in t['packages']:
582                     # The source is affected, lets reject it.
583
584                     rejectmsg = "{0}: part of the {1} transition.\n\n".format(source, trans)
585
586                     if current is not None:
587                         currentlymsg = "at version {0}".format(current.version)
588                     else:
589                         currentlymsg = "not present in testing"
590
591                     rejectmsg += "Transition description: {0}\n\n".format(t["reason"])
592
593                     rejectmsg += "\n".join(textwrap.wrap("""Your package
594 is part of a testing transition designed to get {0} migrated (it is
595 currently {1}, we need version {2}).  This transition is managed by the
596 Release Team, and {3} is the Release-Team member responsible for it.
597 Please mail debian-release@lists.debian.org or contact {3} directly if you
598 need further assistance.  You might want to upload to experimental until this
599 transition is done.""".format(source, currentlymsg, expected,t["rm"])))
600
601                     raise Reject(rejectmsg)
602
603         return True
604
605     def get_transitions(self):
606         cnf = Config()
607         path = cnf.get('Dinstall::ReleaseTransitions', '')
608         if path == '' or not os.path.exists(path):
609             return None
610
611         contents = file(path, 'r').read()
612         try:
613             transitions = yaml.load(contents)
614             return transitions
615         except yaml.YAMLError as msg:
616             utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))
617
618         return None
619
620 class NoSourceOnlyCheck(Check):
621     """Check for source-only upload
622
623     Source-only uploads are only allowed if Dinstall::AllowSourceOnlyUploads is
624     set. Otherwise they are rejected.
625     """
626     def check(self, upload):
627         if Config().find_b("Dinstall::AllowSourceOnlyUploads"):
628             return True
629         changes = upload.changes
630         if changes.source is not None and len(changes.binaries) == 0:
631             raise Reject('Source-only uploads are not allowed.')
632         return True
633
634 class LintianCheck(Check):
635     """Check package using lintian"""
636     def check(self, upload):
637         changes = upload.changes
638
639         # Only check sourceful uploads.
640         if changes.source is None:
641             return True
642         # Only check uploads to unstable or experimental.
643         if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
644             return True
645
646         cnf = Config()
647         if 'Dinstall::LintianTags' not in cnf:
648             return True
649         tagfile = cnf['Dinstall::LintianTags']
650
651         with open(tagfile, 'r') as sourcefile:
652             sourcecontent = sourcefile.read()
653         try:
654             lintiantags = yaml.load(sourcecontent)['lintian']
655         except yaml.YAMLError as msg:
656             raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))
657
658         fd, temp_filename = utils.temp_filename(mode=0o644)
659         temptagfile = os.fdopen(fd, 'w')
660         for tags in lintiantags.itervalues():
661             for tag in tags:
662                 print >>temptagfile, tag
663         temptagfile.close()
664
665         changespath = os.path.join(upload.directory, changes.filename)
666         try:
667             if cnf.unprivgroup:
668                 cmd = "sudo -H -u {0} -- /usr/bin/lintian --show-overrides --tags-from-file {1} {2}".format(cnf.unprivgroup, temp_filename, changespath)
669             else:
670                 cmd = "/usr/bin/lintian --show-overrides --tags-from-file {0} {1}".format(temp_filename, changespath)
671             result, output = commands.getstatusoutput(cmd)
672         finally:
673             os.unlink(temp_filename)
674
675         if result == 2:
676             utils.warn("lintian failed for %s [return code: %s]." % \
677                 (changespath, result))
678             utils.warn(utils.prefix_multi_line_string(output, \
679                 " [possible output:] "))
680
681         parsed_tags = lintian.parse_lintian_output(output)
682         rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
683         if len(rejects) != 0:
684             raise Reject('\n'.join(rejects))
685
686         return True
687
688 class SourceFormatCheck(Check):
689     """Check source format is allowed in the target suite"""
690     def per_suite_check(self, upload, suite):
691         source = upload.changes.source
692         session = upload.session
693         if source is None:
694             return True
695
696         source_format = source.dsc['Format']
697         query = session.query(SrcFormat).filter_by(format_name=source_format).filter(SrcFormat.suites.contains(suite))
698         if query.first() is None:
699             raise Reject('source format {0} is not allowed in suite {1}'.format(source_format, suite.suite_name))
700
701 class SuiteArchitectureCheck(Check):
702     def per_suite_check(self, upload, suite):
703         session = upload.session
704         for arch in upload.changes.architectures:
705             query = session.query(Architecture).filter_by(arch_string=arch).filter(Architecture.suites.contains(suite))
706             if query.first() is None:
707                 raise Reject('Architecture {0} is not allowed in suite {2}'.format(arch, suite.suite_name))
708
709         return True
710
711 class VersionCheck(Check):
712     """Check version constraints"""
713     def _highest_source_version(self, session, source_name, suite):
714         db_source = session.query(DBSource).filter_by(source=source_name) \
715             .filter(DBSource.suites.contains(suite)).order_by(DBSource.version.desc()).first()
716         if db_source is None:
717             return None
718         else:
719             return db_source.version
720
721     def _highest_binary_version(self, session, binary_name, suite, architecture):
722         db_binary = session.query(DBBinary).filter_by(package=binary_name) \
723             .filter(DBBinary.suites.contains(suite)) \
724             .join(DBBinary.architecture) \
725             .filter(Architecture.arch_string.in_(['all', architecture])) \
726             .order_by(DBBinary.version.desc()).first()
727         if db_binary is None:
728             return None
729         else:
730             return db_binary.version
731
732     def _version_checks(self, upload, suite, op):
733         session = upload.session
734
735         if upload.changes.source is not None:
736             source_name = upload.changes.source.dsc['Source']
737             source_version = upload.changes.source.dsc['Version']
738             v = self._highest_source_version(session, source_name, suite)
739             if v is not None and not op(version_compare(source_version, v)):
740                 raise Reject('Version check failed (source={0}, version={1}, other-version={2}, suite={3})'.format(source_name, source_version, v, suite.suite_name))
741
742         for binary in upload.changes.binaries:
743             binary_name = binary.control['Package']
744             binary_version = binary.control['Version']
745             architecture = binary.control['Architecture']
746             v = self._highest_binary_version(session, binary_name, suite, architecture)
747             if v is not None and not op(version_compare(binary_version, v)):
748                 raise Reject('Version check failed (binary={0}, version={1}, other-version={2}, suite={3})'.format(binary_name, binary_version, v, suite.suite_name))
749
750     def per_suite_check(self, upload, suite):
751         session = upload.session
752
753         vc_newer = session.query(dbconn.VersionCheck).filter_by(suite=suite) \
754             .filter(dbconn.VersionCheck.check.in_(['MustBeNewerThan', 'Enhances']))
755         must_be_newer_than = [ vc.reference for vc in vc_newer ]
756         # Must be newer than old versions in `suite`
757         must_be_newer_than.append(suite)
758
759         for s in must_be_newer_than:
760             self._version_checks(upload, s, lambda result: result > 0)
761
762         vc_older = session.query(dbconn.VersionCheck).filter_by(suite=suite, check='MustBeOlderThan')
763         must_be_older_than = [ vc.reference for vc in vc_older ]
764
765         for s in must_be_older_than:
766             self._version_checks(upload, s, lambda result: result < 0)
767
768         return True
769
770     @property
771     def forcable(self):
772         return True