]> git.decadent.org.uk Git - dak.git/blob - daklib/archive.py
Also import re_bin_only_nmu from daklib.regexes
[dak.git] / daklib / archive.py
1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
2 #
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU General Public License along
14 # with this program; if not, write to the Free Software Foundation, Inc.,
15 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
16
17 """module to manipulate the archive
18
19 This module provides classes to manipulate the archive.
20 """
21
22 from .dbconn import *
23 import daklib.checks as checks
24 from daklib.config import Config
25 import daklib.upload as upload
26 import daklib.utils as utils
27 from .fstransactions import FilesystemTransaction
28 from .regexes import re_changelog_versions, re_bin_only_nmu
29
30 import apt_pkg
31 from datetime import datetime
32 import os
33 import shutil
34 import subprocess
35 from sqlalchemy.orm.exc import NoResultFound
36 import tempfile
37 import traceback
38
39 class ArchiveException(Exception):
40     pass
41
42 class HashMismatchException(ArchiveException):
43     pass
44
45 class ArchiveTransaction(object):
46     """manipulate the archive in a transaction
47     """
48     def __init__(self):
49         self.fs = FilesystemTransaction()
50         self.session = DBConn().session()
51
52     def get_file(self, hashed_file, source_name):
53         """Look for file `hashed_file` in database
54
55         Args:
56            hashed_file (daklib.upload.HashedFile): file to look for in the database
57
58         Raises:
59            KeyError: file was not found in the database
60            HashMismatchException: hash mismatch
61
62         Returns:
63            `daklib.dbconn.PoolFile` object for the database
64         """
65         poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
66         try:
67             poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
68             if poolfile.filesize != hashed_file.size or poolfile.md5sum != hashed_file.md5sum or poolfile.sha1sum != hashed_file.sha1sum or poolfile.sha256sum != hashed_file.sha256sum:
69                 raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename))
70             return poolfile
71         except NoResultFound:
72             raise KeyError('{0} not found in database.'.format(poolname))
73
74     def _install_file(self, directory, hashed_file, archive, component, source_name):
75         """Install a file
76
77         Will not give an error when the file is already present.
78
79         Returns:
80            `daklib.dbconn.PoolFile` object for the new file
81         """
82         session = self.session
83
84         poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
85         try:
86             poolfile = self.get_file(hashed_file, source_name)
87         except KeyError:
88             poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
89             poolfile.md5sum = hashed_file.md5sum
90             poolfile.sha1sum = hashed_file.sha1sum
91             poolfile.sha256sum = hashed_file.sha256sum
92             session.add(poolfile)
93             session.flush()
94
95         try:
96             session.query(ArchiveFile).filter_by(archive=archive, component=component, file=poolfile).one()
97         except NoResultFound:
98             archive_file = ArchiveFile(archive, component, poolfile)
99             session.add(archive_file)
100             session.flush()
101
102             path = os.path.join(archive.path, 'pool', component.component_name, poolname)
103             hashed_file_path = os.path.join(directory, hashed_file.filename)
104             self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
105
106         return poolfile
107
108     def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
109         """Install a binary package
110
111         Args:
112            directory (str): directory the binary package is located in
113            binary (daklib.upload.Binary): binary package to install
114            suite (daklib.dbconn.Suite): target suite
115            component (daklib.dbconn.Component): target component
116
117         Kwargs:
118            allow_tainted (bool): allow to copy additional files from tainted archives
119            fingerprint (daklib.dbconn.Fingerprint): optional fingerprint
120            source_suites (list of daklib.dbconn.Suite or True): suites to copy
121               the source from if they are not in `suite` or True to allow
122               copying from any suite.
123               This can also be a SQLAlchemy (sub)query object.
124            extra_source_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from
125
126         Returns:
127            `daklib.dbconn.DBBinary` object for the new package
128         """
129         session = self.session
130         control = binary.control
131         maintainer = get_or_set_maintainer(control['Maintainer'], session)
132         architecture = get_architecture(control['Architecture'], session)
133
134         (source_name, source_version) = binary.source
135         source_query = session.query(DBSource).filter_by(source=source_name, version=source_version)
136         source = source_query.filter(DBSource.suites.contains(suite)).first()
137         if source is None:
138             if source_suites != True:
139                 source_query = source_query.filter(DBSource.suites.any(source_suites))
140             source = source_query.first()
141             if source is None:
142                 raise ArchiveException('{0}: trying to install to {1}, but could not find source'.format(binary.hashed_file.filename, suite.suite_name))
143             self.copy_source(source, suite, component)
144
145         db_file = self._install_file(directory, binary.hashed_file, suite.archive, component, source_name)
146
147         unique = dict(
148             package=control['Package'],
149             version=control['Version'],
150             architecture=architecture,
151             )
152         rest = dict(
153             source=source,
154             maintainer=maintainer,
155             poolfile=db_file,
156             binarytype=binary.type,
157             fingerprint=fingerprint,
158             )
159
160         try:
161             db_binary = session.query(DBBinary).filter_by(**unique).one()
162             for key, value in rest.iteritems():
163                 if getattr(db_binary, key) != value:
164                     raise ArchiveException('{0}: Does not match binary in database.'.format(binary.hashed_file.filename))
165         except NoResultFound:
166             db_binary = DBBinary(**unique)
167             for key, value in rest.iteritems():
168                 setattr(db_binary, key, value)
169             session.add(db_binary)
170             session.flush()
171             import_metadata_into_db(db_binary, session)
172
173             self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives)
174
175         if suite not in db_binary.suites:
176             db_binary.suites.append(suite)
177
178         session.flush()
179
180         return db_binary
181
182     def _ensure_extra_source_exists(self, filename, source, archive, extra_archives=None):
183         """ensure source exists in the given archive
184
185         This is intended to be used to check that Built-Using sources exist.
186
187         Args:
188            filename (str): filename to use in error messages
189            source (daklib.dbconn.DBSource): source to look for
190            archive (daklib.dbconn.Archive): archive to look in
191
192         Kwargs:
193            extra_archives (list of daklib.dbconn.Archive): list of archives to copy
194                the source package from if it is not yet present in `archive`
195         """
196         session = self.session
197         db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first()
198         if db_file is not None:
199             return True
200
201         # Try to copy file from one extra archive
202         if extra_archives is None:
203             extra_archives = []
204         db_file = session.query(ArchiveFile).filter_by(file=source.poolfile).filter(ArchiveFile.archive_id.in_([ a.archive_id for a in extra_archives])).first()
205         if db_file is None:
206             raise ArchiveException('{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.'.format(filename, source.source, source.version, archive.archive_name))
207
208         source_archive = db_file.archive
209         for dsc_file in source.srcfiles:
210             af = session.query(ArchiveFile).filter_by(file=dsc_file.poolfile, archive=source_archive, component=db_file.component).one()
211             # We were given an explicit list of archives so it is okay to copy from tainted archives.
212             self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
213
214     def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
215         """Add Built-Using sources to `db_binary.extra_sources`
216         """
217         session = self.session
218         built_using = control.get('Built-Using', None)
219
220         if built_using is not None:
221             for dep in apt_pkg.parse_depends(built_using):
222                 assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
223                 bu_source_name, bu_source_version, comp = dep[0]
224                 assert comp == '=', 'Built-Using must contain strict dependencies'
225
226                 bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
227                 if bu_source is None:
228                     raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
229
230                 self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
231
232                 db_binary.extra_sources.append(bu_source)
233
234     def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
235         """Install a source package
236
237         Args:
238            directory (str): directory the source package is located in
239            source (daklib.upload.Source): source package to install
240            suite (daklib.dbconn.Suite): target suite
241            component (daklib.dbconn.Component): target component
242            changed_by (daklib.dbconn.Maintainer): person who prepared this version of the package
243
244         Kwargs:
245            allow_tainted (bool): allow to copy additional files from tainted archives
246            fingerprint (daklib.dbconn.Fingerprint): optional fingerprint
247
248         Returns:
249            `daklib.dbconn.DBSource` object for the new source
250         """
251         session = self.session
252         archive = suite.archive
253         control = source.dsc
254         maintainer = get_or_set_maintainer(control['Maintainer'], session)
255         source_name = control['Source']
256
257         ### Add source package to database
258
259         # We need to install the .dsc first as the DBSource object refers to it.
260         db_file_dsc = self._install_file(directory, source._dsc_file, archive, component, source_name)
261
262         unique = dict(
263             source=source_name,
264             version=control['Version'],
265             )
266         rest = dict(
267             maintainer=maintainer,
268             changedby=changed_by,
269             #install_date=datetime.now().date(),
270             poolfile=db_file_dsc,
271             fingerprint=fingerprint,
272             dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
273             )
274
275         created = False
276         try:
277             db_source = session.query(DBSource).filter_by(**unique).one()
278             for key, value in rest.iteritems():
279                 if getattr(db_source, key) != value:
280                     raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename))
281         except NoResultFound:
282             created = True
283             db_source = DBSource(**unique)
284             for key, value in rest.iteritems():
285                 setattr(db_source, key, value)
286             # XXX: set as default in postgres?
287             db_source.install_date = datetime.now().date()
288             session.add(db_source)
289             session.flush()
290
291             # Add .dsc file. Other files will be added later.
292             db_dsc_file = DSCFile()
293             db_dsc_file.source = db_source
294             db_dsc_file.poolfile = db_file_dsc
295             session.add(db_dsc_file)
296             session.flush()
297
298         if suite in db_source.suites:
299             return db_source
300
301         db_source.suites.append(suite)
302
303         if not created:
304             return db_source
305
306         ### Now add remaining files and copy them to the archive.
307
308         for hashed_file in source.files.itervalues():
309             hashed_file_path = os.path.join(directory, hashed_file.filename)
310             if os.path.exists(hashed_file_path):
311                 db_file = self._install_file(directory, hashed_file, archive, component, source_name)
312                 session.add(db_file)
313             else:
314                 db_file = self.get_file(hashed_file, source_name)
315                 self._copy_file(db_file, archive, component, allow_tainted=allow_tainted)
316
317             db_dsc_file = DSCFile()
318             db_dsc_file.source = db_source
319             db_dsc_file.poolfile = db_file
320             session.add(db_dsc_file)
321
322         session.flush()
323
324         # Importing is safe as we only arrive here when we did not find the source already installed earlier.
325         import_metadata_into_db(db_source, session)
326
327         # Uploaders are the maintainer and co-maintainers from the Uploaders field
328         db_source.uploaders.append(maintainer)
329         if 'Uploaders' in control:
330             def split_uploaders(field):
331                 import re
332                 for u in re.sub(">[ ]*,", ">\t", field).split("\t"):
333                     yield u.strip()
334
335             for u in split_uploaders(control['Uploaders']):
336                 db_source.uploaders.append(get_or_set_maintainer(u, session))
337         session.flush()
338
339         return db_source
340
341     def _copy_file(self, db_file, archive, component, allow_tainted=False):
342         """Copy a file to the given archive and component
343
344         Args:
345            db_file (daklib.dbconn.PoolFile): file to copy
346            archive (daklib.dbconn.Archive): target archive
347            component (daklib.dbconn.Component): target component
348
349         Kwargs:
350            allow_tainted (bool): allow to copy from tainted archives (such as NEW)
351         """
352         session = self.session
353
354         if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
355             query = session.query(ArchiveFile).filter_by(file=db_file, component=component)
356             if not allow_tainted:
357                 query = query.join(Archive).filter(Archive.tainted == False)
358
359             source_af = query.first()
360             if source_af is None:
361                 raise ArchiveException('cp: Could not find {0} in component {1} in any archive.'.format(db_file.filename, component.component_name))
362             target_af = ArchiveFile(archive, component, db_file)
363             session.add(target_af)
364             session.flush()
365             self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
366
367     def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
368         """Copy a binary package to the given suite and component
369
370         Args:
371            db_binary (daklib.dbconn.DBBinary): binary to copy
372            suite (daklib.dbconn.Suite): target suite
373            component (daklib.dbconn.Component): target component
374
375         Kwargs:
376            allow_tainted (bool): allow to copy from tainted archives (such as NEW)
377            extra_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from
378         """
379         session = self.session
380         archive = suite.archive
381         if archive.tainted:
382             allow_tainted = True
383
384         # make sure built-using packages are present in target archive
385         filename = db_binary.poolfile.filename
386         for db_source in db_binary.extra_sources:
387             self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives)
388
389         # copy binary
390         db_file = db_binary.poolfile
391         self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
392         if suite not in db_binary.suites:
393             db_binary.suites.append(suite)
394         self.session.flush()
395
396     def copy_source(self, db_source, suite, component, allow_tainted=False):
397         """Copy a source package to the given suite and component
398
399         Args:
400            db_source (daklib.dbconn.DBSource): source to copy
401            suite (daklib.dbconn.Suite): target suite
402            component (daklib.dbconn.Component): target component
403
404         Kwargs:
405            allow_tainted (bool): allow to copy from tainted archives (such as NEW)
406         """
407         archive = suite.archive
408         if archive.tainted:
409             allow_tainted = True
410         for db_dsc_file in db_source.srcfiles:
411             self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted)
412         if suite not in db_source.suites:
413             db_source.suites.append(suite)
414         self.session.flush()
415
416     def remove_file(self, db_file, archive, component):
417         """Remove a file from a given archive and component
418
419         Args:
420            db_file (daklib.dbconn.PoolFile): file to remove
421            archive (daklib.dbconn.Archive): archive to remove the file from
422            component (daklib.dbconn.Component): component to remove the file from
423         """
424         af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component)
425         self.fs.unlink(af.path)
426         self.session.delete(af)
427
428     def remove_binary(self, binary, suite):
429         """Remove a binary from a given suite and component
430
431         Args:
432            binary (daklib.dbconn.DBBinary): binary to remove
433            suite (daklib.dbconn.Suite): suite to remove the package from
434         """
435         binary.suites.remove(suite)
436         self.session.flush()
437
438     def remove_source(self, source, suite):
439         """Remove a source from a given suite and component
440
441         Raises:
442            ArchiveException: source package is still referenced by other
443                              binaries in the suite
444
445         Args:
446            binary (daklib.dbconn.DBSource): source to remove
447            suite (daklib.dbconn.Suite): suite to remove the package from
448         """
449         session = self.session
450
451         query = session.query(DBBinary).filter_by(source=source) \
452             .filter(DBBinary.suites.contains(suite))
453         if query.first() is not None:
454             raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name))
455
456         source.suites.remove(suite)
457         session.flush()
458
459     def commit(self):
460         """commit changes"""
461         try:
462             self.session.commit()
463             self.fs.commit()
464         finally:
465             self.session.rollback()
466             self.fs.rollback()
467
468     def rollback(self):
469         """rollback changes"""
470         self.session.rollback()
471         self.fs.rollback()
472
473     def __enter__(self):
474         return self
475
476     def __exit__(self, type, value, traceback):
477         if type is None:
478             self.commit()
479         else:
480             self.rollback()
481         return None
482
483 class ArchiveUpload(object):
484     """handle an upload
485
486     This class can be used in a with-statement:
487
488        with ArchiveUpload(...) as upload:
489           ...
490
491     Doing so will automatically run any required cleanup and also rollback the
492     transaction if it was not committed.
493
494     Attributes:
495        changes (daklib.upload.Changes): upload to process
496        directory (str): directory with temporary copy of files. set by `prepare`
497        fingerprint (daklib.dbconn.Fingerprint): fingerprint used to sign the upload
498        new (bool): upload is NEW. set by `check`
499        reject_reasons (list of str): reasons why the upload cannot be accepted
500        session: database session
501        transaction (daklib.archive.ArchiveTransaction): transaction used to handle the upload
502        warnings (list of str): warnings (NOT USED YET)
503     """
504     def __init__(self, directory, changes, keyrings):
505         self.transaction = ArchiveTransaction()
506         self.session = self.transaction.session
507
508         self.original_directory = directory
509         self.original_changes = changes
510         self.changes = None
511         self.directory = None
512         self.keyrings = keyrings
513
514         self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one()
515
516         self.reject_reasons = []
517         self.warnings = []
518         self.final_suites = None
519         self.new = False
520
521         self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
522         self._new = self._new_queue.suite
523
524     def prepare(self):
525         """prepare upload for further processing
526
527         This copies the files involved to a temporary directory.  If you use
528         this method directly, you have to remove the directory given by the
529         `directory` attribute later on your own.
530
531         Instead of using the method directly, you can also use a with-statement:
532
533            with ArchiveUpload(...) as upload:
534               ...
535
536         This will automatically handle any required cleanup.
537         """
538         assert self.directory is None
539         assert self.original_changes.valid_signature
540
541         cnf = Config()
542         session = self.transaction.session
543
544         self.directory = tempfile.mkdtemp(dir=cnf.get('Dir::TempPath'))
545         with FilesystemTransaction() as fs:
546             src = os.path.join(self.original_directory, self.original_changes.filename)
547             dst = os.path.join(self.directory, self.original_changes.filename)
548             fs.copy(src, dst)
549
550             self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
551
552             for f in self.changes.files.itervalues():
553                 src = os.path.join(self.original_directory, f.filename)
554                 dst = os.path.join(self.directory, f.filename)
555                 fs.copy(src, dst)
556
557             source = self.changes.source
558             if source is not None:
559                 for f in source.files.itervalues():
560                     src = os.path.join(self.original_directory, f.filename)
561                     dst = os.path.join(self.directory, f.filename)
562                     if f.filename not in self.changes.files:
563                         db_file = self.transaction.get_file(f, source.dsc['Source'])
564                         db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
565                         fs.copy(db_archive_file.path, dst, symlink=True)
566
567     def unpacked_source(self):
568         """Path to unpacked source
569
570         Get path to the unpacked source. This method does unpack the source
571         into a temporary directory under `self.directory` if it has not
572         been done so already.
573
574         Returns:
575            String giving the path to the unpacked source directory
576            or None if no source was included in the upload.
577         """
578         assert self.directory is not None
579
580         source = self.changes.source
581         if source is None:
582             return None
583         dsc_path = os.path.join(self.directory, source._dsc_file.filename)
584
585         sourcedir = os.path.join(self.directory, 'source')
586         if not os.path.exists(sourcedir):
587             subprocess.check_call(["dpkg-source", "--no-copy", "-x", dsc_path, sourcedir], shell=False)
588         if not os.path.isdir(sourcedir):
589             raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
590         return sourcedir
591
592     def _map_suite(self, suite_name):
593         for rule in Config().value_list("SuiteMappings"):
594             fields = rule.split()
595             rtype = fields[0]
596             if rtype == "map" or rtype == "silent-map":
597                 (src, dst) = fields[1:3]
598                 if src == suite_name:
599                     suite_name = dst
600                     if rtype != "silent-map":
601                         self.warnings.append('Mapping {0} to {0}.'.format(src, dst))
602             elif rtype == "ignore":
603                 ignored = fields[1]
604                 if suite_name == ignored:
605                     self.warnings.append('Ignoring target suite {0}.'.format(ignored))
606                     suite_name = None
607             elif rtype == "reject":
608                 rejected = fields[1]
609                 if suite_name == rejected:
610                     self.reject_reasons.append('Uploads to {0} are not accepted.'.format(suite))
611             ## XXX: propup-version and map-unreleased not yet implemented
612         return suite_name
613
614     def _mapped_suites(self):
615         """Get target suites after mappings
616
617         Returns:
618            list of daklib.dbconn.Suite giving the mapped target suites of this upload
619         """
620         session = self.session
621
622         suite_names = []
623         for dist in self.changes.distributions:
624             suite_name = self._map_suite(dist)
625             if suite_name is not None:
626                 suite_names.append(suite_name)
627
628         suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
629         return suites
630
631     def _check_new(self, suite):
632         """Check if upload is NEW
633
634         An upload is NEW if it has binary or source packages that do not have
635         an override in `suite` OR if it references files ONLY in a tainted
636         archive (eg. when it references files in NEW).
637
638         Returns:
639            True if the upload is NEW, False otherwise
640         """
641         session = self.session
642
643         # Check for missing overrides
644         for b in self.changes.binaries:
645             override = self._binary_override(suite, b)
646             if override is None:
647                 return True
648
649         if self.changes.source is not None:
650             override = self._source_override(suite, self.changes.source)
651             if override is None:
652                 return True
653
654         # Check if we reference a file only in a tainted archive
655         files = self.changes.files.values()
656         if self.changes.source is not None:
657             files.extend(self.changes.source.files.values())
658         for f in files:
659             query = session.query(ArchiveFile).join(PoolFile).filter(PoolFile.sha1sum == f.sha1sum)
660             query_untainted = query.join(Archive).filter(Archive.tainted == False)
661
662             in_archive = (query.first() is not None)
663             in_untainted_archive = (query_untainted.first() is not None)
664
665             if in_archive and not in_untainted_archive:
666                 return True
667
668     def _final_suites(self):
669         session = self.session
670
671         mapped_suites = self._mapped_suites()
672         final_suites = set()
673
674         for suite in mapped_suites:
675             overridesuite = suite
676             if suite.overridesuite is not None:
677                 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
678             if self._check_new(overridesuite):
679                 self.new = True
680             final_suites.add(suite)
681
682         return final_suites
683
684     def _binary_override(self, suite, binary):
685         """Get override entry for a binary
686
687         Args:
688            suite (daklib.dbconn.Suite)
689            binary (daklib.upload.Binary)
690
691         Returns:
692            daklib.dbconn.Override or None
693         """
694         if suite.overridesuite is not None:
695             suite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
696
697         query = self.session.query(Override).filter_by(suite=suite, package=binary.control['Package']) \
698                 .join(Component).filter(Component.component_name == binary.component) \
699                 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
700
701         try:
702             return query.one()
703         except NoResultFound:
704             return None
705
706     def _source_override(self, suite, source):
707         """Get override entry for a source
708
709         Args:
710            suite (daklib.dbconn.Suite)
711            source (daklib.upload.Source)
712
713         Returns:
714            daklib.dbconn.Override or None
715         """
716         if suite.overridesuite is not None:
717             suite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
718
719         # XXX: component for source?
720         query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
721                 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
722
723         try:
724             return query.one()
725         except NoResultFound:
726             return None
727
728     def check(self, force=False):
729         """run checks against the upload
730
731         Args:
732            force (bool): ignore failing forcable checks
733
734         Returns:
735            True if all checks passed, False otherwise
736         """
737         # XXX: needs to be better structured.
738         assert self.changes.valid_signature
739
740         try:
741             for chk in (
742                     checks.SignatureCheck,
743                     checks.ChangesCheck,
744                     checks.HashesCheck,
745                     checks.SourceCheck,
746                     checks.BinaryCheck,
747                     checks.ACLCheck,
748                     checks.SingleDistributionCheck,
749                     checks.NoSourceOnlyCheck,
750                     checks.LintianCheck,
751                     ):
752                 chk().check(self)
753
754             final_suites = self._final_suites()
755             if len(final_suites) == 0:
756                 self.reject_reasons.append('Ended with no suite to install to.')
757                 return False
758
759             for chk in (
760                     checks.SourceFormatCheck,
761                     checks.SuiteArchitectureCheck,
762                     checks.VersionCheck,
763                     ):
764                 for suite in final_suites:
765                     chk().per_suite_check(self, suite)
766
767             if len(self.reject_reasons) != 0:
768                 return False
769
770             self.final_suites = final_suites
771             return True
772         except checks.Reject as e:
773             self.reject_reasons.append(unicode(e))
774         except Exception as e:
775             self.reject_reasons.append("Processing raised an exception: {0}.\n{1}".format(e, traceback.format_exc()))
776         return False
777
778     def _install_to_suite(self, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None):
779         """Install upload to the given suite
780
781         Args:
782            suite (daklib.dbconn.Suite): suite to install the package into.
783               This is the real suite, ie. after any redirection to NEW or a policy queue
784            source_component_func: function to get the `daklib.dbconn.Component`
785               for a `daklib.upload.Source` object
786            binary_component_func: function to get the `daklib.dbconn.Component`
787               for a `daklib.upload.Binary` object
788
789         Kwargs:
790            source_suites: see `daklib.archive.ArchiveTransaction.install_binary`
791            extra_source_archives: see `daklib.archive.ArchiveTransaction.install_binary`
792
793         Returns:
794            tuple with two elements. The first is a `daklib.dbconn.DBSource`
795            object for the install source or None if no source was included.
796            The second is a list of `daklib.dbconn.DBBinary` objects for the
797            installed binary packages.
798         """
799         # XXX: move this function to ArchiveTransaction?
800
801         control = self.changes.changes
802         changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session)
803
804         if source_suites is None:
805             source_suites = self.session.query(Suite).join(VersionCheck, VersionCheck.reference_id == Suite.suite_id).filter(VersionCheck.suite == suite).subquery()
806
807         source = self.changes.source
808         if source is not None:
809             component = source_component_func(source)
810             db_source = self.transaction.install_source(self.directory, source, suite, component, changed_by, fingerprint=self.fingerprint)
811         else:
812             db_source = None
813
814         db_binaries = []
815         for binary in self.changes.binaries:
816             component = binary_component_func(binary)
817             db_binary = self.transaction.install_binary(self.directory, binary, suite, component, fingerprint=self.fingerprint, source_suites=source_suites, extra_source_archives=extra_source_archives)
818             db_binaries.append(db_binary)
819
820         if suite.copychanges:
821             src = os.path.join(self.directory, self.changes.filename)
822             dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
823             self.transaction.fs.copy(src, dst)
824
825         return (db_source, db_binaries)
826
827     def _install_changes(self):
828         assert self.changes.valid_signature
829         control = self.changes.changes
830         session = self.transaction.session
831         config = Config()
832
833         changelog_id = None
834         # Only add changelog for sourceful uploads and binNMUs
835         if 'source' in self.changes.architectures or re_bin_only_nmu.search(control['Version']):
836             query = 'INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id'
837             changelog_id = session.execute(query, {'changelog': control['Changes']}).scalar()
838             assert changelog_id is not None
839
840         db_changes = DBChange()
841         db_changes.changesname = self.changes.filename
842         db_changes.source = control['Source']
843         db_changes.binaries = control.get('Binary', None)
844         db_changes.architecture = control['Architecture']
845         db_changes.version = control['Version']
846         db_changes.distribution = control['Distribution']
847         db_changes.urgency = control['Urgency']
848         db_changes.maintainer = control['Maintainer']
849         db_changes.changedby = control.get('Changed-By', control['Maintainer'])
850         db_changes.date = control['Date']
851         db_changes.fingerprint = self.fingerprint.fingerprint
852         db_changes.changelog_id = changelog_id
853         db_changes.closes = self.changes.closed_bugs
854
855         self.transaction.session.add(db_changes)
856         self.transaction.session.flush()
857
858         return db_changes
859
860     def _install_policy(self, policy_queue, target_suite, db_changes, db_source, db_binaries):
861         u = PolicyQueueUpload()
862         u.policy_queue = policy_queue
863         u.target_suite = target_suite
864         u.changes = db_changes
865         u.source = db_source
866         u.binaries = db_binaries
867         self.transaction.session.add(u)
868         self.transaction.session.flush()
869
870         dst = os.path.join(policy_queue.path, self.changes.filename)
871         self.transaction.fs.copy(self.changes.path, dst)
872
873         return u
874
875     def try_autobyhand(self):
876         """Try AUTOBYHAND
877
878         Try to handle byhand packages automatically.
879
880         Returns:
881            list of `daklib.upload.hashed_file` for the remaining byhand packages
882         """
883         assert len(self.reject_reasons) == 0
884         assert self.changes.valid_signature
885         assert self.final_suites is not None
886
887         byhand = self.changes.byhand_files
888         if len(byhand) == 0:
889             return True
890
891         suites = list(self.final_suites)
892         assert len(suites) == 1, "BYHAND uploads must be to a single suite"
893         suite = suites[0]
894
895         cnf = Config()
896         control = self.changes.changes
897         automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
898
899         remaining = []
900         for f in byhand:
901             package, version, archext = f.filename.split('_', 2)
902             arch, ext = archext.split('.', 1)
903
904             rule = automatic_byhand_packages.get(package)
905             if rule is None:
906                 remaining.append(f)
907                 continue
908
909             if rule['Source'] != control['Source'] or rule['Section'] != f.section or rule['Extension'] != ext:
910                 remaining.append(f)
911                 continue
912
913             script = rule['Script']
914             retcode = subprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename)], shell=False)
915             if retcode != 0:
916                 print "W: error processing {0}.".format(f.filename)
917                 remaining.append(f)
918
919         return len(remaining) == 0
920
921     def _install_byhand(self, policy_queue_upload, hashed_file):
922         """
923         Args:
924            policy_queue_upload (daklib.dbconn.PolicyQueueUpload): XXX
925            hashed_file (daklib.upload.HashedFile): XXX
926         """
927         fs = self.transaction.fs
928         session = self.transaction.session
929         policy_queue = policy_queue_upload.policy_queue
930
931         byhand_file = PolicyQueueByhandFile()
932         byhand_file.upload = policy_queue_upload
933         byhand_file.filename = hashed_file.filename
934         session.add(byhand_file)
935         session.flush()
936
937         src = os.path.join(self.directory, hashed_file.filename)
938         dst = os.path.join(policy_queue.path, hashed_file.filename)
939         fs.copy(src, dst)
940
941         return byhand_file
942
943     def _do_bts_versiontracking(self):
944         cnf = Config()
945         fs = self.transaction.fs
946
947         btsdir = cnf.get('Dir::BTSVersionTrack')
948         if btsdir is None or btsdir == '':
949             return
950
951         base = os.path.join(btsdir, self.changes.filename[:-8])
952
953         # version history
954         sourcedir = self.unpacked_source()
955         if sourcedir is not None:
956             fh = open(os.path.join(sourcedir, 'debian', 'changelog'), 'r')
957             versions = fs.create("{0}.versions".format(base), mode=0o644)
958             for line in fh.readlines():
959                 if re_changelog_versions.match(line):
960                     versions.write(line)
961             fh.close()
962             versions.close()
963
964         # binary -> source mapping
965         debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
966         for binary in self.changes.binaries:
967             control = binary.control
968             source_package, source_version = binary.source
969             line = " ".join([control['Package'], control['Version'], source_package, source_version])
970             print >>debinfo, line
971         debinfo.close()
972
973     def install(self):
974         """install upload
975
976         Install upload to a suite or policy queue.  This method does *not*
977         handle uploads to NEW.
978
979         You need to have called the `check` method before calling this method.
980         """
981         assert len(self.reject_reasons) == 0
982         assert self.changes.valid_signature
983         assert self.final_suites is not None
984         assert not self.new
985
986         db_changes = self._install_changes()
987
988         for suite in self.final_suites:
989             overridesuite = suite
990             if suite.overridesuite is not None:
991                 overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
992
993             redirected_suite = suite
994             if suite.policy_queue is not None:
995                 redirected_suite = suite.policy_queue.suite
996
997             source_component_func = lambda source: self._source_override(overridesuite, source).component
998             binary_component_func = lambda binary: self._binary_override(overridesuite, binary).component
999
1000             (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, extra_source_archives=[suite.archive])
1001
1002             if suite.policy_queue is not None:
1003                 self._install_policy(suite.policy_queue, suite, db_changes, db_source, db_binaries)
1004
1005             # copy to build queues
1006             if suite.policy_queue is None or suite.policy_queue.send_to_build_queues:
1007                 for build_queue in suite.copy_queues:
1008                     self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, extra_source_archives=[suite.archive])
1009
1010         self._do_bts_versiontracking()
1011
1012     def install_to_new(self):
1013         """install upload to NEW
1014
1015         Install upload to NEW.  This method does *not* handle regular uploads
1016         to suites or policy queues.
1017
1018         You need to have called the `check` method before calling this method.
1019         """
1020         # Uploads to NEW are special as we don't have overrides.
1021         assert len(self.reject_reasons) == 0
1022         assert self.changes.valid_signature
1023         assert self.final_suites is not None
1024
1025         source = self.changes.source
1026         binaries = self.changes.binaries
1027         byhand = self.changes.byhand_files
1028
1029         new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
1030         if len(byhand) > 0:
1031             new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
1032         new_suite = new_queue.suite
1033
1034         # we need a suite to guess components
1035         suites = list(self.final_suites)
1036         assert len(suites) == 1, "NEW uploads must be to a single suite"
1037         suite = suites[0]
1038
1039         def binary_component_func(binary):
1040             override = self._binary_override(suite, binary)
1041             if override is not None:
1042                 return override.component
1043             component_name = binary.component
1044             component = self.session.query(Component).filter_by(component_name=component_name).one()
1045             return component
1046
1047         # guess source component
1048         # XXX: should be moved into an extra method
1049         binary_component_names = set()
1050         for binary in binaries:
1051             component = binary_component_func(binary)
1052             binary_component_names.add(component.component_name)
1053         source_component_name = None
1054         for guess in ('main', 'contrib', 'non-free'):
1055             if guess in binary_component_names:
1056                 source_component_name = guess
1057                 break
1058         if source_component_name is None:
1059             raise Exception('Could not guess source component.')
1060         source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
1061         source_component_func = lambda source: source_component
1062
1063         db_changes = self._install_changes()
1064         (db_source, db_binaries) = self._install_to_suite(new_suite, source_component_func, binary_component_func, source_suites=True, extra_source_archives=[suite.archive])
1065         policy_upload = self._install_policy(new_queue, suite, db_changes, db_source, db_binaries)
1066
1067         for f in byhand:
1068             self._install_byhand(policy_upload, f)
1069
1070         self._do_bts_versiontracking()
1071
1072     def commit(self):
1073         """commit changes"""
1074         self.transaction.commit()
1075
1076     def rollback(self):
1077         """rollback changes"""
1078         self.transaction.rollback()
1079
1080     def __enter__(self):
1081         self.prepare()
1082         return self
1083
1084     def __exit__(self, type, value, traceback):
1085         if self.directory is not None:
1086             shutil.rmtree(self.directory)
1087             self.directory = None
1088         self.changes = None
1089         self.transaction.rollback()
1090         return None