]> git.decadent.org.uk Git - dak.git/blob - daklib/archive.py
daklib/archive.py (install_binary): fix use of source_suites
[dak.git] / daklib / archive.py
1 # Copyright (C) 2012, Ansgar Burchardt <ansgar@debian.org>
2 #
3 # This program is free software; you can redistribute it and/or modify
4 # it under the terms of the GNU General Public License as published by
5 # the Free Software Foundation; either version 2 of the License, or
6 # (at your option) any later version.
7 #
8 # This program is distributed in the hope that it will be useful,
9 # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11 # GNU General Public License for more details.
12 #
13 # You should have received a copy of the GNU General Public License along
14 # with this program; if not, write to the Free Software Foundation, Inc.,
15 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
16
17 """module to manipulate the archive
18
19 This module provides classes to manipulate the archive.
20 """
21
22 from .dbconn import *
23 import daklib.checks as checks
24 from daklib.config import Config
25 import daklib.upload as upload
26 import daklib.utils as utils
27 from .fstransactions import FilesystemTransaction
28 from .regexes import re_changelog_versions, re_bin_only_nmu
29
30 import apt_pkg
31 from datetime import datetime
32 import os
33 import shutil
34 import subprocess
35 from sqlalchemy.orm.exc import NoResultFound
36 import tempfile
37 import traceback
38
39 class ArchiveException(Exception):
40     pass
41
42 class HashMismatchException(ArchiveException):
43     pass
44
45 class ArchiveTransaction(object):
46     """manipulate the archive in a transaction
47     """
48     def __init__(self):
49         self.fs = FilesystemTransaction()
50         self.session = DBConn().session()
51
52     def get_file(self, hashed_file, source_name):
53         """Look for file `hashed_file` in database
54
55         Args:
56            hashed_file (daklib.upload.HashedFile): file to look for in the database
57
58         Raises:
59            KeyError: file was not found in the database
60            HashMismatchException: hash mismatch
61
62         Returns:
63            `daklib.dbconn.PoolFile` object for the database
64         """
65         poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
66         try:
67             poolfile = self.session.query(PoolFile).filter_by(filename=poolname).one()
68             if poolfile.filesize != hashed_file.size or poolfile.md5sum != hashed_file.md5sum or poolfile.sha1sum != hashed_file.sha1sum or poolfile.sha256sum != hashed_file.sha256sum:
69                 raise HashMismatchException('{0}: Does not match file already existing in the pool.'.format(hashed_file.filename))
70             return poolfile
71         except NoResultFound:
72             raise KeyError('{0} not found in database.'.format(poolname))
73
74     def _install_file(self, directory, hashed_file, archive, component, source_name):
75         """Install a file
76
77         Will not give an error when the file is already present.
78
79         Returns:
80            `daklib.dbconn.PoolFile` object for the new file
81         """
82         session = self.session
83
84         poolname = os.path.join(utils.poolify(source_name), hashed_file.filename)
85         try:
86             poolfile = self.get_file(hashed_file, source_name)
87         except KeyError:
88             poolfile = PoolFile(filename=poolname, filesize=hashed_file.size)
89             poolfile.md5sum = hashed_file.md5sum
90             poolfile.sha1sum = hashed_file.sha1sum
91             poolfile.sha256sum = hashed_file.sha256sum
92             session.add(poolfile)
93             session.flush()
94
95         try:
96             session.query(ArchiveFile).filter_by(archive=archive, component=component, file=poolfile).one()
97         except NoResultFound:
98             archive_file = ArchiveFile(archive, component, poolfile)
99             session.add(archive_file)
100             session.flush()
101
102             path = os.path.join(archive.path, 'pool', component.component_name, poolname)
103             hashed_file_path = os.path.join(directory, hashed_file.filename)
104             self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
105
106         return poolfile
107
108     def install_binary(self, directory, binary, suite, component, allow_tainted=False, fingerprint=None, source_suites=None, extra_source_archives=None):
109         """Install a binary package
110
111         Args:
112            directory (str): directory the binary package is located in
113            binary (daklib.upload.Binary): binary package to install
114            suite (daklib.dbconn.Suite): target suite
115            component (daklib.dbconn.Component): target component
116
117         Kwargs:
118            allow_tainted (bool): allow to copy additional files from tainted archives
119            fingerprint (daklib.dbconn.Fingerprint): optional fingerprint
120            source_suites (list of daklib.dbconn.Suite or True): suites to copy
121               the source from if they are not in `suite` or True to allow
122               copying from any suite.
123               This can also be a SQLAlchemy (sub)query object.
124            extra_source_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from
125
126         Returns:
127            `daklib.dbconn.DBBinary` object for the new package
128         """
129         session = self.session
130         control = binary.control
131         maintainer = get_or_set_maintainer(control['Maintainer'], session)
132         architecture = get_architecture(control['Architecture'], session)
133
134         (source_name, source_version) = binary.source
135         source_query = session.query(DBSource).filter_by(source=source_name, version=source_version)
136         source = source_query.filter(DBSource.suites.contains(suite)).first()
137         if source is None:
138             if source_suites != True:
139                 source_query = source_query.join(DBSource.suites) \
140                     .filter(Suite.suite_id == source_suites.c.id)
141             source = source_query.first()
142             if source is None:
143                 raise ArchiveException('{0}: trying to install to {1}, but could not find source'.format(binary.hashed_file.filename, suite.suite_name))
144             self.copy_source(source, suite, component)
145
146         db_file = self._install_file(directory, binary.hashed_file, suite.archive, component, source_name)
147
148         unique = dict(
149             package=control['Package'],
150             version=control['Version'],
151             architecture=architecture,
152             )
153         rest = dict(
154             source=source,
155             maintainer=maintainer,
156             poolfile=db_file,
157             binarytype=binary.type,
158             fingerprint=fingerprint,
159             )
160
161         try:
162             db_binary = session.query(DBBinary).filter_by(**unique).one()
163             for key, value in rest.iteritems():
164                 if getattr(db_binary, key) != value:
165                     raise ArchiveException('{0}: Does not match binary in database.'.format(binary.hashed_file.filename))
166         except NoResultFound:
167             db_binary = DBBinary(**unique)
168             for key, value in rest.iteritems():
169                 setattr(db_binary, key, value)
170             session.add(db_binary)
171             session.flush()
172             import_metadata_into_db(db_binary, session)
173
174             self._add_built_using(db_binary, binary.hashed_file.filename, control, suite, extra_archives=extra_source_archives)
175
176         if suite not in db_binary.suites:
177             db_binary.suites.append(suite)
178
179         session.flush()
180
181         return db_binary
182
183     def _ensure_extra_source_exists(self, filename, source, archive, extra_archives=None):
184         """ensure source exists in the given archive
185
186         This is intended to be used to check that Built-Using sources exist.
187
188         Args:
189            filename (str): filename to use in error messages
190            source (daklib.dbconn.DBSource): source to look for
191            archive (daklib.dbconn.Archive): archive to look in
192
193         Kwargs:
194            extra_archives (list of daklib.dbconn.Archive): list of archives to copy
195                the source package from if it is not yet present in `archive`
196         """
197         session = self.session
198         db_file = session.query(ArchiveFile).filter_by(file=source.poolfile, archive=archive).first()
199         if db_file is not None:
200             return True
201
202         # Try to copy file from one extra archive
203         if extra_archives is None:
204             extra_archives = []
205         db_file = session.query(ArchiveFile).filter_by(file=source.poolfile).filter(ArchiveFile.archive_id.in_([ a.archive_id for a in extra_archives])).first()
206         if db_file is None:
207             raise ArchiveException('{0}: Built-Using refers to package {1} (= {2}) not in target archive {3}.'.format(filename, source.source, source.version, archive.archive_name))
208
209         source_archive = db_file.archive
210         for dsc_file in source.srcfiles:
211             af = session.query(ArchiveFile).filter_by(file=dsc_file.poolfile, archive=source_archive, component=db_file.component).one()
212             # We were given an explicit list of archives so it is okay to copy from tainted archives.
213             self._copy_file(af.file, archive, db_file.component, allow_tainted=True)
214
215     def _add_built_using(self, db_binary, filename, control, suite, extra_archives=None):
216         """Add Built-Using sources to `db_binary.extra_sources`
217         """
218         session = self.session
219         built_using = control.get('Built-Using', None)
220
221         if built_using is not None:
222             for dep in apt_pkg.parse_depends(built_using):
223                 assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
224                 bu_source_name, bu_source_version, comp = dep[0]
225                 assert comp == '=', 'Built-Using must contain strict dependencies'
226
227                 bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
228                 if bu_source is None:
229                     raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
230
231                 self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
232
233                 db_binary.extra_sources.append(bu_source)
234
235     def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
236         """Install a source package
237
238         Args:
239            directory (str): directory the source package is located in
240            source (daklib.upload.Source): source package to install
241            suite (daklib.dbconn.Suite): target suite
242            component (daklib.dbconn.Component): target component
243            changed_by (daklib.dbconn.Maintainer): person who prepared this version of the package
244
245         Kwargs:
246            allow_tainted (bool): allow to copy additional files from tainted archives
247            fingerprint (daklib.dbconn.Fingerprint): optional fingerprint
248
249         Returns:
250            `daklib.dbconn.DBSource` object for the new source
251         """
252         session = self.session
253         archive = suite.archive
254         control = source.dsc
255         maintainer = get_or_set_maintainer(control['Maintainer'], session)
256         source_name = control['Source']
257
258         ### Add source package to database
259
260         # We need to install the .dsc first as the DBSource object refers to it.
261         db_file_dsc = self._install_file(directory, source._dsc_file, archive, component, source_name)
262
263         unique = dict(
264             source=source_name,
265             version=control['Version'],
266             )
267         rest = dict(
268             maintainer=maintainer,
269             changedby=changed_by,
270             #install_date=datetime.now().date(),
271             poolfile=db_file_dsc,
272             fingerprint=fingerprint,
273             dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
274             )
275
276         created = False
277         try:
278             db_source = session.query(DBSource).filter_by(**unique).one()
279             for key, value in rest.iteritems():
280                 if getattr(db_source, key) != value:
281                     raise ArchiveException('{0}: Does not match source in database.'.format(source._dsc_file.filename))
282         except NoResultFound:
283             created = True
284             db_source = DBSource(**unique)
285             for key, value in rest.iteritems():
286                 setattr(db_source, key, value)
287             # XXX: set as default in postgres?
288             db_source.install_date = datetime.now().date()
289             session.add(db_source)
290             session.flush()
291
292             # Add .dsc file. Other files will be added later.
293             db_dsc_file = DSCFile()
294             db_dsc_file.source = db_source
295             db_dsc_file.poolfile = db_file_dsc
296             session.add(db_dsc_file)
297             session.flush()
298
299         if suite in db_source.suites:
300             return db_source
301
302         db_source.suites.append(suite)
303
304         if not created:
305             return db_source
306
307         ### Now add remaining files and copy them to the archive.
308
309         for hashed_file in source.files.itervalues():
310             hashed_file_path = os.path.join(directory, hashed_file.filename)
311             if os.path.exists(hashed_file_path):
312                 db_file = self._install_file(directory, hashed_file, archive, component, source_name)
313                 session.add(db_file)
314             else:
315                 db_file = self.get_file(hashed_file, source_name)
316                 self._copy_file(db_file, archive, component, allow_tainted=allow_tainted)
317
318             db_dsc_file = DSCFile()
319             db_dsc_file.source = db_source
320             db_dsc_file.poolfile = db_file
321             session.add(db_dsc_file)
322
323         session.flush()
324
325         # Importing is safe as we only arrive here when we did not find the source already installed earlier.
326         import_metadata_into_db(db_source, session)
327
328         # Uploaders are the maintainer and co-maintainers from the Uploaders field
329         db_source.uploaders.append(maintainer)
330         if 'Uploaders' in control:
331             def split_uploaders(field):
332                 import re
333                 for u in re.sub(">[ ]*,", ">\t", field).split("\t"):
334                     yield u.strip()
335
336             for u in split_uploaders(control['Uploaders']):
337                 db_source.uploaders.append(get_or_set_maintainer(u, session))
338         session.flush()
339
340         return db_source
341
342     def _copy_file(self, db_file, archive, component, allow_tainted=False):
343         """Copy a file to the given archive and component
344
345         Args:
346            db_file (daklib.dbconn.PoolFile): file to copy
347            archive (daklib.dbconn.Archive): target archive
348            component (daklib.dbconn.Component): target component
349
350         Kwargs:
351            allow_tainted (bool): allow to copy from tainted archives (such as NEW)
352         """
353         session = self.session
354
355         if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
356             query = session.query(ArchiveFile).filter_by(file=db_file, component=component)
357             if not allow_tainted:
358                 query = query.join(Archive).filter(Archive.tainted == False)
359
360             source_af = query.first()
361             if source_af is None:
362                 raise ArchiveException('cp: Could not find {0} in component {1} in any archive.'.format(db_file.filename, component.component_name))
363             target_af = ArchiveFile(archive, component, db_file)
364             session.add(target_af)
365             session.flush()
366             self.fs.copy(source_af.path, target_af.path, link=False, mode=archive.mode)
367
368     def copy_binary(self, db_binary, suite, component, allow_tainted=False, extra_archives=None):
369         """Copy a binary package to the given suite and component
370
371         Args:
372            db_binary (daklib.dbconn.DBBinary): binary to copy
373            suite (daklib.dbconn.Suite): target suite
374            component (daklib.dbconn.Component): target component
375
376         Kwargs:
377            allow_tainted (bool): allow to copy from tainted archives (such as NEW)
378            extra_archives (list of daklib.dbconn.Archive): extra archives to copy Built-Using sources from
379         """
380         session = self.session
381         archive = suite.archive
382         if archive.tainted:
383             allow_tainted = True
384
385         # make sure built-using packages are present in target archive
386         filename = db_binary.poolfile.filename
387         for db_source in db_binary.extra_sources:
388             self._ensure_extra_source_exists(filename, db_source, archive, extra_archives=extra_archives)
389
390         # copy binary
391         db_file = db_binary.poolfile
392         self._copy_file(db_file, suite.archive, component, allow_tainted=allow_tainted)
393         if suite not in db_binary.suites:
394             db_binary.suites.append(suite)
395         self.session.flush()
396
397     def copy_source(self, db_source, suite, component, allow_tainted=False):
398         """Copy a source package to the given suite and component
399
400         Args:
401            db_source (daklib.dbconn.DBSource): source to copy
402            suite (daklib.dbconn.Suite): target suite
403            component (daklib.dbconn.Component): target component
404
405         Kwargs:
406            allow_tainted (bool): allow to copy from tainted archives (such as NEW)
407         """
408         archive = suite.archive
409         if archive.tainted:
410             allow_tainted = True
411         for db_dsc_file in db_source.srcfiles:
412             self._copy_file(db_dsc_file.poolfile, archive, component, allow_tainted=allow_tainted)
413         if suite not in db_source.suites:
414             db_source.suites.append(suite)
415         self.session.flush()
416
417     def remove_file(self, db_file, archive, component):
418         """Remove a file from a given archive and component
419
420         Args:
421            db_file (daklib.dbconn.PoolFile): file to remove
422            archive (daklib.dbconn.Archive): archive to remove the file from
423            component (daklib.dbconn.Component): component to remove the file from
424         """
425         af = self.session.query(ArchiveFile).filter_by(file=db_file, archive=archive, component=component)
426         self.fs.unlink(af.path)
427         self.session.delete(af)
428
429     def remove_binary(self, binary, suite):
430         """Remove a binary from a given suite and component
431
432         Args:
433            binary (daklib.dbconn.DBBinary): binary to remove
434            suite (daklib.dbconn.Suite): suite to remove the package from
435         """
436         binary.suites.remove(suite)
437         self.session.flush()
438
439     def remove_source(self, source, suite):
440         """Remove a source from a given suite and component
441
442         Raises:
443            ArchiveException: source package is still referenced by other
444                              binaries in the suite
445
446         Args:
447            binary (daklib.dbconn.DBSource): source to remove
448            suite (daklib.dbconn.Suite): suite to remove the package from
449         """
450         session = self.session
451
452         query = session.query(DBBinary).filter_by(source=source) \
453             .filter(DBBinary.suites.contains(suite))
454         if query.first() is not None:
455             raise ArchiveException('src:{0} is still used by binaries in suite {1}'.format(source.source, suite.suite_name))
456
457         source.suites.remove(suite)
458         session.flush()
459
460     def commit(self):
461         """commit changes"""
462         try:
463             self.session.commit()
464             self.fs.commit()
465         finally:
466             self.session.rollback()
467             self.fs.rollback()
468
469     def rollback(self):
470         """rollback changes"""
471         self.session.rollback()
472         self.fs.rollback()
473
474     def __enter__(self):
475         return self
476
477     def __exit__(self, type, value, traceback):
478         if type is None:
479             self.commit()
480         else:
481             self.rollback()
482         return None
483
484 class ArchiveUpload(object):
485     """handle an upload
486
487     This class can be used in a with-statement:
488
489        with ArchiveUpload(...) as upload:
490           ...
491
492     Doing so will automatically run any required cleanup and also rollback the
493     transaction if it was not committed.
494
495     Attributes:
496        changes (daklib.upload.Changes): upload to process
497        directory (str): directory with temporary copy of files. set by `prepare`
498        fingerprint (daklib.dbconn.Fingerprint): fingerprint used to sign the upload
499        new (bool): upload is NEW. set by `check`
500        reject_reasons (list of str): reasons why the upload cannot be accepted
501        session: database session
502        transaction (daklib.archive.ArchiveTransaction): transaction used to handle the upload
503        warnings (list of str): warnings (NOT USED YET)
504     """
505     def __init__(self, directory, changes, keyrings):
506         self.transaction = ArchiveTransaction()
507         self.session = self.transaction.session
508
509         self.original_directory = directory
510         self.original_changes = changes
511         self.changes = None
512         self.directory = None
513         self.keyrings = keyrings
514
515         self.fingerprint = self.session.query(Fingerprint).filter_by(fingerprint=changes.primary_fingerprint).one()
516
517         self.reject_reasons = []
518         self.warnings = []
519         self.final_suites = None
520         self.new = False
521
522         self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
523         self._new = self._new_queue.suite
524
525     def prepare(self):
526         """prepare upload for further processing
527
528         This copies the files involved to a temporary directory.  If you use
529         this method directly, you have to remove the directory given by the
530         `directory` attribute later on your own.
531
532         Instead of using the method directly, you can also use a with-statement:
533
534            with ArchiveUpload(...) as upload:
535               ...
536
537         This will automatically handle any required cleanup.
538         """
539         assert self.directory is None
540         assert self.original_changes.valid_signature
541
542         cnf = Config()
543         session = self.transaction.session
544
545         self.directory = tempfile.mkdtemp(dir=cnf.get('Dir::TempPath'))
546         with FilesystemTransaction() as fs:
547             src = os.path.join(self.original_directory, self.original_changes.filename)
548             dst = os.path.join(self.directory, self.original_changes.filename)
549             fs.copy(src, dst)
550
551             self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
552
553             for f in self.changes.files.itervalues():
554                 src = os.path.join(self.original_directory, f.filename)
555                 dst = os.path.join(self.directory, f.filename)
556                 fs.copy(src, dst)
557
558             source = self.changes.source
559             if source is not None:
560                 for f in source.files.itervalues():
561                     src = os.path.join(self.original_directory, f.filename)
562                     dst = os.path.join(self.directory, f.filename)
563                     if f.filename not in self.changes.files:
564                         db_file = self.transaction.get_file(f, source.dsc['Source'])
565                         db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
566                         fs.copy(db_archive_file.path, dst, symlink=True)
567
568     def unpacked_source(self):
569         """Path to unpacked source
570
571         Get path to the unpacked source. This method does unpack the source
572         into a temporary directory under `self.directory` if it has not
573         been done so already.
574
575         Returns:
576            String giving the path to the unpacked source directory
577            or None if no source was included in the upload.
578         """
579         assert self.directory is not None
580
581         source = self.changes.source
582         if source is None:
583             return None
584         dsc_path = os.path.join(self.directory, source._dsc_file.filename)
585
586         sourcedir = os.path.join(self.directory, 'source')
587         if not os.path.exists(sourcedir):
588             subprocess.check_call(["dpkg-source", "--no-copy", "-x", dsc_path, sourcedir], shell=False)
589         if not os.path.isdir(sourcedir):
590             raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
591         return sourcedir
592
593     def _map_suite(self, suite_name):
594         for rule in Config().value_list("SuiteMappings"):
595             fields = rule.split()
596             rtype = fields[0]
597             if rtype == "map" or rtype == "silent-map":
598                 (src, dst) = fields[1:3]
599                 if src == suite_name:
600                     suite_name = dst
601                     if rtype != "silent-map":
602                         self.warnings.append('Mapping {0} to {0}.'.format(src, dst))
603             elif rtype == "ignore":
604                 ignored = fields[1]
605                 if suite_name == ignored:
606                     self.warnings.append('Ignoring target suite {0}.'.format(ignored))
607                     suite_name = None
608             elif rtype == "reject":
609                 rejected = fields[1]
610                 if suite_name == rejected:
611                     self.reject_reasons.append('Uploads to {0} are not accepted.'.format(suite))
612             ## XXX: propup-version and map-unreleased not yet implemented
613         return suite_name
614
615     def _mapped_suites(self):
616         """Get target suites after mappings
617
618         Returns:
619            list of daklib.dbconn.Suite giving the mapped target suites of this upload
620         """
621         session = self.session
622
623         suite_names = []
624         for dist in self.changes.distributions:
625             suite_name = self._map_suite(dist)
626             if suite_name is not None:
627                 suite_names.append(suite_name)
628
629         suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
630         return suites
631
632     def _check_new(self, suite):
633         """Check if upload is NEW
634
635         An upload is NEW if it has binary or source packages that do not have
636         an override in `suite` OR if it references files ONLY in a tainted
637         archive (eg. when it references files in NEW).
638
639         Returns:
640            True if the upload is NEW, False otherwise
641         """
642         session = self.session
643
644         # Check for missing overrides
645         for b in self.changes.binaries:
646             override = self._binary_override(suite, b)
647             if override is None:
648                 return True
649
650         if self.changes.source is not None:
651             override = self._source_override(suite, self.changes.source)
652             if override is None:
653                 return True
654
655         # Check if we reference a file only in a tainted archive
656         files = self.changes.files.values()
657         if self.changes.source is not None:
658             files.extend(self.changes.source.files.values())
659         for f in files:
660             query = session.query(ArchiveFile).join(PoolFile).filter(PoolFile.sha1sum == f.sha1sum)
661             query_untainted = query.join(Archive).filter(Archive.tainted == False)
662
663             in_archive = (query.first() is not None)
664             in_untainted_archive = (query_untainted.first() is not None)
665
666             if in_archive and not in_untainted_archive:
667                 return True
668
669     def _final_suites(self):
670         session = self.session
671
672         mapped_suites = self._mapped_suites()
673         final_suites = set()
674
675         for suite in mapped_suites:
676             overridesuite = suite
677             if suite.overridesuite is not None:
678                 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
679             if self._check_new(overridesuite):
680                 self.new = True
681             final_suites.add(suite)
682
683         return final_suites
684
685     def _binary_override(self, suite, binary):
686         """Get override entry for a binary
687
688         Args:
689            suite (daklib.dbconn.Suite)
690            binary (daklib.upload.Binary)
691
692         Returns:
693            daklib.dbconn.Override or None
694         """
695         if suite.overridesuite is not None:
696             suite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
697
698         query = self.session.query(Override).filter_by(suite=suite, package=binary.control['Package']) \
699                 .join(Component).filter(Component.component_name == binary.component) \
700                 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
701
702         try:
703             return query.one()
704         except NoResultFound:
705             return None
706
707     def _source_override(self, suite, source):
708         """Get override entry for a source
709
710         Args:
711            suite (daklib.dbconn.Suite)
712            source (daklib.upload.Source)
713
714         Returns:
715            daklib.dbconn.Override or None
716         """
717         if suite.overridesuite is not None:
718             suite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
719
720         # XXX: component for source?
721         query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
722                 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
723
724         try:
725             return query.one()
726         except NoResultFound:
727             return None
728
729     def check(self, force=False):
730         """run checks against the upload
731
732         Args:
733            force (bool): ignore failing forcable checks
734
735         Returns:
736            True if all checks passed, False otherwise
737         """
738         # XXX: needs to be better structured.
739         assert self.changes.valid_signature
740
741         try:
742             for chk in (
743                     checks.SignatureCheck,
744                     checks.ChangesCheck,
745                     checks.HashesCheck,
746                     checks.SourceCheck,
747                     checks.BinaryCheck,
748                     checks.ACLCheck,
749                     checks.SingleDistributionCheck,
750                     checks.NoSourceOnlyCheck,
751                     checks.LintianCheck,
752                     ):
753                 chk().check(self)
754
755             final_suites = self._final_suites()
756             if len(final_suites) == 0:
757                 self.reject_reasons.append('Ended with no suite to install to.')
758                 return False
759
760             for chk in (
761                     checks.SourceFormatCheck,
762                     checks.SuiteArchitectureCheck,
763                     checks.VersionCheck,
764                     ):
765                 for suite in final_suites:
766                     chk().per_suite_check(self, suite)
767
768             if len(self.reject_reasons) != 0:
769                 return False
770
771             self.final_suites = final_suites
772             return True
773         except checks.Reject as e:
774             self.reject_reasons.append(unicode(e))
775         except Exception as e:
776             self.reject_reasons.append("Processing raised an exception: {0}.\n{1}".format(e, traceback.format_exc()))
777         return False
778
779     def _install_to_suite(self, suite, source_component_func, binary_component_func, source_suites=None, extra_source_archives=None):
780         """Install upload to the given suite
781
782         Args:
783            suite (daklib.dbconn.Suite): suite to install the package into.
784               This is the real suite, ie. after any redirection to NEW or a policy queue
785            source_component_func: function to get the `daklib.dbconn.Component`
786               for a `daklib.upload.Source` object
787            binary_component_func: function to get the `daklib.dbconn.Component`
788               for a `daklib.upload.Binary` object
789
790         Kwargs:
791            source_suites: see `daklib.archive.ArchiveTransaction.install_binary`
792            extra_source_archives: see `daklib.archive.ArchiveTransaction.install_binary`
793
794         Returns:
795            tuple with two elements. The first is a `daklib.dbconn.DBSource`
796            object for the install source or None if no source was included.
797            The second is a list of `daklib.dbconn.DBBinary` objects for the
798            installed binary packages.
799         """
800         # XXX: move this function to ArchiveTransaction?
801
802         control = self.changes.changes
803         changed_by = get_or_set_maintainer(control.get('Changed-By', control['Maintainer']), self.session)
804
805         if source_suites is None:
806             source_suites = self.session.query(Suite).join((VersionCheck, VersionCheck.reference_id == Suite.suite_id)).filter(VersionCheck.suite == suite).subquery()
807
808         source = self.changes.source
809         if source is not None:
810             component = source_component_func(source)
811             db_source = self.transaction.install_source(self.directory, source, suite, component, changed_by, fingerprint=self.fingerprint)
812         else:
813             db_source = None
814
815         db_binaries = []
816         for binary in self.changes.binaries:
817             component = binary_component_func(binary)
818             db_binary = self.transaction.install_binary(self.directory, binary, suite, component, fingerprint=self.fingerprint, source_suites=source_suites, extra_source_archives=extra_source_archives)
819             db_binaries.append(db_binary)
820
821         if suite.copychanges:
822             src = os.path.join(self.directory, self.changes.filename)
823             dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
824             self.transaction.fs.copy(src, dst)
825
826         return (db_source, db_binaries)
827
828     def _install_changes(self):
829         assert self.changes.valid_signature
830         control = self.changes.changes
831         session = self.transaction.session
832         config = Config()
833
834         changelog_id = None
835         # Only add changelog for sourceful uploads and binNMUs
836         if 'source' in self.changes.architectures or re_bin_only_nmu.search(control['Version']):
837             query = 'INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id'
838             changelog_id = session.execute(query, {'changelog': control['Changes']}).scalar()
839             assert changelog_id is not None
840
841         db_changes = DBChange()
842         db_changes.changesname = self.changes.filename
843         db_changes.source = control['Source']
844         db_changes.binaries = control.get('Binary', None)
845         db_changes.architecture = control['Architecture']
846         db_changes.version = control['Version']
847         db_changes.distribution = control['Distribution']
848         db_changes.urgency = control['Urgency']
849         db_changes.maintainer = control['Maintainer']
850         db_changes.changedby = control.get('Changed-By', control['Maintainer'])
851         db_changes.date = control['Date']
852         db_changes.fingerprint = self.fingerprint.fingerprint
853         db_changes.changelog_id = changelog_id
854         db_changes.closes = self.changes.closed_bugs
855
856         self.transaction.session.add(db_changes)
857         self.transaction.session.flush()
858
859         return db_changes
860
861     def _install_policy(self, policy_queue, target_suite, db_changes, db_source, db_binaries):
862         u = PolicyQueueUpload()
863         u.policy_queue = policy_queue
864         u.target_suite = target_suite
865         u.changes = db_changes
866         u.source = db_source
867         u.binaries = db_binaries
868         self.transaction.session.add(u)
869         self.transaction.session.flush()
870
871         dst = os.path.join(policy_queue.path, self.changes.filename)
872         self.transaction.fs.copy(self.changes.path, dst)
873
874         return u
875
876     def try_autobyhand(self):
877         """Try AUTOBYHAND
878
879         Try to handle byhand packages automatically.
880
881         Returns:
882            list of `daklib.upload.hashed_file` for the remaining byhand packages
883         """
884         assert len(self.reject_reasons) == 0
885         assert self.changes.valid_signature
886         assert self.final_suites is not None
887
888         byhand = self.changes.byhand_files
889         if len(byhand) == 0:
890             return True
891
892         suites = list(self.final_suites)
893         assert len(suites) == 1, "BYHAND uploads must be to a single suite"
894         suite = suites[0]
895
896         cnf = Config()
897         control = self.changes.changes
898         automatic_byhand_packages = cnf.subtree("AutomaticByHandPackages")
899
900         remaining = []
901         for f in byhand:
902             package, version, archext = f.filename.split('_', 2)
903             arch, ext = archext.split('.', 1)
904
905             rule = automatic_byhand_packages.get(package)
906             if rule is None:
907                 remaining.append(f)
908                 continue
909
910             if rule['Source'] != control['Source'] or rule['Section'] != f.section or rule['Extension'] != ext:
911                 remaining.append(f)
912                 continue
913
914             script = rule['Script']
915             retcode = subprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename)], shell=False)
916             if retcode != 0:
917                 print "W: error processing {0}.".format(f.filename)
918                 remaining.append(f)
919
920         return len(remaining) == 0
921
922     def _install_byhand(self, policy_queue_upload, hashed_file):
923         """
924         Args:
925            policy_queue_upload (daklib.dbconn.PolicyQueueUpload): XXX
926            hashed_file (daklib.upload.HashedFile): XXX
927         """
928         fs = self.transaction.fs
929         session = self.transaction.session
930         policy_queue = policy_queue_upload.policy_queue
931
932         byhand_file = PolicyQueueByhandFile()
933         byhand_file.upload = policy_queue_upload
934         byhand_file.filename = hashed_file.filename
935         session.add(byhand_file)
936         session.flush()
937
938         src = os.path.join(self.directory, hashed_file.filename)
939         dst = os.path.join(policy_queue.path, hashed_file.filename)
940         fs.copy(src, dst)
941
942         return byhand_file
943
944     def _do_bts_versiontracking(self):
945         cnf = Config()
946         fs = self.transaction.fs
947
948         btsdir = cnf.get('Dir::BTSVersionTrack')
949         if btsdir is None or btsdir == '':
950             return
951
952         base = os.path.join(btsdir, self.changes.filename[:-8])
953
954         # version history
955         sourcedir = self.unpacked_source()
956         if sourcedir is not None:
957             fh = open(os.path.join(sourcedir, 'debian', 'changelog'), 'r')
958             versions = fs.create("{0}.versions".format(base), mode=0o644)
959             for line in fh.readlines():
960                 if re_changelog_versions.match(line):
961                     versions.write(line)
962             fh.close()
963             versions.close()
964
965         # binary -> source mapping
966         debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
967         for binary in self.changes.binaries:
968             control = binary.control
969             source_package, source_version = binary.source
970             line = " ".join([control['Package'], control['Version'], source_package, source_version])
971             print >>debinfo, line
972         debinfo.close()
973
974     def install(self):
975         """install upload
976
977         Install upload to a suite or policy queue.  This method does *not*
978         handle uploads to NEW.
979
980         You need to have called the `check` method before calling this method.
981         """
982         assert len(self.reject_reasons) == 0
983         assert self.changes.valid_signature
984         assert self.final_suites is not None
985         assert not self.new
986
987         db_changes = self._install_changes()
988
989         for suite in self.final_suites:
990             overridesuite = suite
991             if suite.overridesuite is not None:
992                 overridesuite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
993
994             redirected_suite = suite
995             if suite.policy_queue is not None:
996                 redirected_suite = suite.policy_queue.suite
997
998             source_component_func = lambda source: self._source_override(overridesuite, source).component
999             binary_component_func = lambda binary: self._binary_override(overridesuite, binary).component
1000
1001             (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, extra_source_archives=[suite.archive])
1002
1003             if suite.policy_queue is not None:
1004                 self._install_policy(suite.policy_queue, suite, db_changes, db_source, db_binaries)
1005
1006             # copy to build queues
1007             if suite.policy_queue is None or suite.policy_queue.send_to_build_queues:
1008                 for build_queue in suite.copy_queues:
1009                     self._install_to_suite(build_queue.suite, source_component_func, binary_component_func, extra_source_archives=[suite.archive])
1010
1011         self._do_bts_versiontracking()
1012
1013     def install_to_new(self):
1014         """install upload to NEW
1015
1016         Install upload to NEW.  This method does *not* handle regular uploads
1017         to suites or policy queues.
1018
1019         You need to have called the `check` method before calling this method.
1020         """
1021         # Uploads to NEW are special as we don't have overrides.
1022         assert len(self.reject_reasons) == 0
1023         assert self.changes.valid_signature
1024         assert self.final_suites is not None
1025
1026         source = self.changes.source
1027         binaries = self.changes.binaries
1028         byhand = self.changes.byhand_files
1029
1030         new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
1031         if len(byhand) > 0:
1032             new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
1033         new_suite = new_queue.suite
1034
1035         # we need a suite to guess components
1036         suites = list(self.final_suites)
1037         assert len(suites) == 1, "NEW uploads must be to a single suite"
1038         suite = suites[0]
1039
1040         def binary_component_func(binary):
1041             override = self._binary_override(suite, binary)
1042             if override is not None:
1043                 return override.component
1044             component_name = binary.component
1045             component = self.session.query(Component).filter_by(component_name=component_name).one()
1046             return component
1047
1048         # guess source component
1049         # XXX: should be moved into an extra method
1050         binary_component_names = set()
1051         for binary in binaries:
1052             component = binary_component_func(binary)
1053             binary_component_names.add(component.component_name)
1054         source_component_name = None
1055         for guess in ('main', 'contrib', 'non-free'):
1056             if guess in binary_component_names:
1057                 source_component_name = guess
1058                 break
1059         if source_component_name is None:
1060             raise Exception('Could not guess source component.')
1061         source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
1062         source_component_func = lambda source: source_component
1063
1064         db_changes = self._install_changes()
1065         (db_source, db_binaries) = self._install_to_suite(new_suite, source_component_func, binary_component_func, source_suites=True, extra_source_archives=[suite.archive])
1066         policy_upload = self._install_policy(new_queue, suite, db_changes, db_source, db_binaries)
1067
1068         for f in byhand:
1069             self._install_byhand(policy_upload, f)
1070
1071         self._do_bts_versiontracking()
1072
1073     def commit(self):
1074         """commit changes"""
1075         self.transaction.commit()
1076
1077     def rollback(self):
1078         """rollback changes"""
1079         self.transaction.rollback()
1080
1081     def __enter__(self):
1082         self.prepare()
1083         return self
1084
1085     def __exit__(self, type, value, traceback):
1086         if self.directory is not None:
1087             shutil.rmtree(self.directory)
1088             self.directory = None
1089         self.changes = None
1090         self.transaction.rollback()
1091         return None