]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/archive.py
Add by-hash support
[dak.git] / daklib / archive.py
index 81719a8731871ef2a4e24903328f518d80774f57..bfab2c64e832ba63f64806a633bb0fc177cbc8b7 100644 (file)
@@ -26,13 +26,14 @@ import daklib.upload as upload
 import daklib.utils as utils
 from daklib.fstransactions import FilesystemTransaction
 from daklib.regexes import re_changelog_versions, re_bin_only_nmu
 import daklib.utils as utils
 from daklib.fstransactions import FilesystemTransaction
 from daklib.regexes import re_changelog_versions, re_bin_only_nmu
+import daklib.daksubprocess
 
 import apt_pkg
 from datetime import datetime
 import os
 import shutil
 
 import apt_pkg
 from datetime import datetime
 import os
 import shutil
-import subprocess
 from sqlalchemy.orm.exc import NoResultFound
 from sqlalchemy.orm.exc import NoResultFound
+from sqlalchemy.orm import object_session
 import sqlalchemy.exc
 import tempfile
 import traceback
 import sqlalchemy.exc
 import tempfile
 import traceback
@@ -109,7 +110,7 @@ class ArchiveTransaction(object):
             session.flush()
 
             path = os.path.join(archive.path, 'pool', component.component_name, poolname)
             session.flush()
 
             path = os.path.join(archive.path, 'pool', component.component_name, poolname)
-            hashed_file_path = os.path.join(directory, hashed_file.filename)
+            hashed_file_path = os.path.join(directory, hashed_file.input_filename)
             self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
 
         return poolfile
             self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
 
         return poolfile
@@ -175,6 +176,10 @@ class ArchiveTransaction(object):
             maintainer=maintainer,
             poolfile=db_file,
             binarytype=binary.type,
             maintainer=maintainer,
             poolfile=db_file,
             binarytype=binary.type,
+            )
+        # Other attributes that are ignored for purposes of equality with
+        # an existing source
+        rest2 = dict(
             fingerprint=fingerprint,
             )
 
             fingerprint=fingerprint,
             )
 
@@ -187,6 +192,8 @@ class ArchiveTransaction(object):
             db_binary = DBBinary(**unique)
             for key, value in rest.iteritems():
                 setattr(db_binary, key, value)
             db_binary = DBBinary(**unique)
             for key, value in rest.iteritems():
                 setattr(db_binary, key, value)
+            for key, value in rest2.iteritems():
+                setattr(db_binary, key, value)
             session.add(db_binary)
             session.flush()
             import_metadata_into_db(db_binary, session)
             session.add(db_binary)
             session.flush()
             import_metadata_into_db(db_binary, session)
@@ -240,51 +247,18 @@ class ArchiveTransaction(object):
         """Add Built-Using sources to C{db_binary.extra_sources}
         """
         session = self.session
         """Add Built-Using sources to C{db_binary.extra_sources}
         """
         session = self.session
-        built_using = control.get('Built-Using', None)
-
-        if built_using is not None:
-            for dep in apt_pkg.parse_depends(built_using):
-                assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
-                bu_source_name, bu_source_version, comp = dep[0]
-                assert comp == '=', 'Built-Using must contain strict dependencies'
-
-                bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
-                if bu_source is None:
-                    raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
-
-                self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
-
-                db_binary.extra_sources.append(bu_source)
-
-    def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
-        """Install a source package
-
-        @type  directory: str
-        @param directory: directory the source package is located in
-
-        @type  source: L{daklib.upload.Source}
-        @param source: source package to install
-
-        @type  suite: L{daklib.dbconn.Suite}
-        @param suite: target suite
-
-        @type  component: L{daklib.dbconn.Component}
-        @param component: target component
 
 
-        @type  changed_by: L{daklib.dbconn.Maintainer}
-        @param changed_by: person who prepared this version of the package
+        for bu_source_name, bu_source_version in daklib.utils.parse_built_using(control):
+            bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
+            if bu_source is None:
+                raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
 
 
-        @type  allow_tainted: bool
-        @param allow_tainted: allow to copy additional files from tainted archives
+            self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
 
 
-        @type  fingerprint: L{daklib.dbconn.Fingerprint}
-        @param fingerprint: optional fingerprint
+            db_binary.extra_sources.append(bu_source)
 
 
-        @rtype:  L{daklib.dbconn.DBSource}
-        @return: database object for the new source
-        """
+    def install_source_to_archive(self, directory, source, archive, component, changed_by, allow_tainted=False, fingerprint=None):
         session = self.session
         session = self.session
-        archive = suite.archive
         control = source.dsc
         maintainer = get_or_set_maintainer(control['Maintainer'], session)
         source_name = control['Source']
         control = source.dsc
         maintainer = get_or_set_maintainer(control['Maintainer'], session)
         source_name = control['Source']
@@ -300,12 +274,15 @@ class ArchiveTransaction(object):
             )
         rest = dict(
             maintainer=maintainer,
             )
         rest = dict(
             maintainer=maintainer,
-            changedby=changed_by,
-            #install_date=datetime.now().date(),
             poolfile=db_file_dsc,
             poolfile=db_file_dsc,
-            fingerprint=fingerprint,
             dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
             )
             dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
             )
+        # Other attributes that are ignored for purposes of equality with
+        # an existing source
+        rest2 = dict(
+            changedby=changed_by,
+            fingerprint=fingerprint,
+            )
 
         created = False
         try:
 
         created = False
         try:
@@ -318,8 +295,8 @@ class ArchiveTransaction(object):
             db_source = DBSource(**unique)
             for key, value in rest.iteritems():
                 setattr(db_source, key, value)
             db_source = DBSource(**unique)
             for key, value in rest.iteritems():
                 setattr(db_source, key, value)
-            # XXX: set as default in postgres?
-            db_source.install_date = datetime.now().date()
+            for key, value in rest2.iteritems():
+                setattr(db_source, key, value)
             session.add(db_source)
             session.flush()
 
             session.add(db_source)
             session.flush()
 
@@ -330,11 +307,6 @@ class ArchiveTransaction(object):
             session.add(db_dsc_file)
             session.flush()
 
             session.add(db_dsc_file)
             session.flush()
 
-        if suite in db_source.suites:
-            return db_source
-
-        db_source.suites.append(suite)
-
         if not created:
             for f in db_source.srcfiles:
                 self._copy_file(f.poolfile, archive, component, allow_tainted=allow_tainted)
         if not created:
             for f in db_source.srcfiles:
                 self._copy_file(f.poolfile, archive, component, allow_tainted=allow_tainted)
@@ -343,7 +315,7 @@ class ArchiveTransaction(object):
         ### Now add remaining files and copy them to the archive.
 
         for hashed_file in source.files.itervalues():
         ### Now add remaining files and copy them to the archive.
 
         for hashed_file in source.files.itervalues():
-            hashed_file_path = os.path.join(directory, hashed_file.filename)
+            hashed_file_path = os.path.join(directory, hashed_file.input_filename)
             if os.path.exists(hashed_file_path):
                 db_file = self._install_file(directory, hashed_file, archive, component, source_name)
                 session.add(db_file)
             if os.path.exists(hashed_file_path):
                 db_file = self._install_file(directory, hashed_file, archive, component, source_name)
                 session.add(db_file)
@@ -371,6 +343,42 @@ class ArchiveTransaction(object):
 
         return db_source
 
 
         return db_source
 
+    def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
+        """Install a source package
+
+        @type  directory: str
+        @param directory: directory the source package is located in
+
+        @type  source: L{daklib.upload.Source}
+        @param source: source package to install
+
+        @type  suite: L{daklib.dbconn.Suite}
+        @param suite: target suite
+
+        @type  component: L{daklib.dbconn.Component}
+        @param component: target component
+
+        @type  changed_by: L{daklib.dbconn.Maintainer}
+        @param changed_by: person who prepared this version of the package
+
+        @type  allow_tainted: bool
+        @param allow_tainted: allow to copy additional files from tainted archives
+
+        @type  fingerprint: L{daklib.dbconn.Fingerprint}
+        @param fingerprint: optional fingerprint
+
+        @rtype:  L{daklib.dbconn.DBSource}
+        @return: database object for the new source
+        """
+        db_source = self.install_source_to_archive(directory, source, suite.archive, component, changed_by, allow_tainted, fingerprint)
+
+        if suite in db_source.suites:
+            return db_source
+        db_source.suites.append(suite)
+        self.session.flush()
+
+        return db_source
+
     def _copy_file(self, db_file, archive, component, allow_tainted=False):
         """Copy a file to the given archive and component
 
     def _copy_file(self, db_file, archive, component, allow_tainted=False):
         """Copy a file to the given archive and component
 
@@ -389,13 +397,13 @@ class ArchiveTransaction(object):
         session = self.session
 
         if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
         session = self.session
 
         if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
-            query = session.query(ArchiveFile).filter_by(file=db_file, component=component)
+            query = session.query(ArchiveFile).filter_by(file=db_file)
             if not allow_tainted:
                 query = query.join(Archive).filter(Archive.tainted == False)
 
             source_af = query.first()
             if source_af is None:
             if not allow_tainted:
                 query = query.join(Archive).filter(Archive.tainted == False)
 
             source_af = query.first()
             if source_af is None:
-                raise ArchiveException('cp: Could not find {0} in component {1} in any archive.'.format(db_file.filename, component.component_name))
+                raise ArchiveException('cp: Could not find {0} in any archive.'.format(db_file.filename))
             target_af = ArchiveFile(archive, component, db_file)
             session.add(target_af)
             session.flush()
             target_af = ArchiveFile(archive, component, db_file)
             session.add(target_af)
             session.flush()
@@ -530,6 +538,9 @@ class ArchiveTransaction(object):
         self.session.rollback()
         self.fs.rollback()
 
         self.session.rollback()
         self.fs.rollback()
 
+    def flush(self):
+        self.session.flush()
+
     def __enter__(self):
         return self
 
     def __enter__(self):
         return self
 
@@ -540,6 +551,34 @@ class ArchiveTransaction(object):
             self.rollback()
         return None
 
             self.rollback()
         return None
 
+def source_component_from_package_list(package_list, suite):
+    """Get component for a source package
+
+    This function will look at the Package-List field to determine the
+    component the source package belongs to. This is the first component
+    the source package provides binaries for (first with respect to the
+    ordering of components).
+
+    It the source package has no Package-List field, None is returned.
+
+    @type  package_list: L{daklib.packagelist.PackageList}
+    @param package_list: package list of the source to get the override for
+
+    @type  suite: L{daklib.dbconn.Suite}
+    @param suite: suite to consider for binaries produced
+
+    @rtype:  L{daklib.dbconn.Component} or C{None}
+    @return: component for the given source or C{None}
+    """
+    if package_list.fallback:
+        return None
+    session = object_session(suite)
+    packages = package_list.packages_for_suite(suite)
+    components = set(p.component for p in packages)
+    query = session.query(Component).order_by(Component.ordering) \
+            .filter(Component.component_name.in_(components))
+    return query.first()
+
 class ArchiveUpload(object):
     """handle an upload
 
 class ArchiveUpload(object):
     """handle an upload
 
@@ -695,7 +734,7 @@ class ArchiveUpload(object):
         sourcedir = os.path.join(self.directory, 'source')
         if not os.path.exists(sourcedir):
             devnull = open('/dev/null', 'w')
         sourcedir = os.path.join(self.directory, 'source')
         if not os.path.exists(sourcedir):
             devnull = open('/dev/null', 'w')
-            subprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=devnull)
+            daklib.daksubprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=devnull)
         if not os.path.isdir(sourcedir):
             raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
         return sourcedir
         if not os.path.isdir(sourcedir):
             raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
         return sourcedir
@@ -739,12 +778,39 @@ class ArchiveUpload(object):
         suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
         return suites
 
         suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
         return suites
 
-    def _check_new(self, suite):
+    def _check_new_binary_overrides(self, suite, overridesuite):
+        new = False
+        source = self.changes.source
+
+        if source is not None and not source.package_list.fallback:
+            packages = source.package_list.packages_for_suite(suite)
+            binaries = [ entry for entry in packages ]
+            for b in binaries:
+                override = self._binary_override(overridesuite, b)
+                if override is None:
+                    self.warnings.append('binary:{0} is NEW.'.format(b.name))
+                    new = True
+        else:
+            binaries = self.changes.binaries
+            for b in binaries:
+                if utils.is_in_debug_section(b.control) and suite.debug_suite is not None:
+                    continue
+                override = self._binary_override(overridesuite, b)
+                if override is None:
+                    self.warnings.append('binary:{0} is NEW.'.format(b.name))
+                    new = True
+
+        return new
+
+    def _check_new(self, suite, overridesuite):
         """Check if upload is NEW
 
         An upload is NEW if it has binary or source packages that do not have
         """Check if upload is NEW
 
         An upload is NEW if it has binary or source packages that do not have
-        an override in C{suite} OR if it references files ONLY in a tainted
-        archive (eg. when it references files in NEW).
+        an override in C{overridesuite} OR if it references files ONLY in a
+        tainted archive (eg. when it references files in NEW).
+
+        Debug packages (*-dbgsym in Section: debug) are not considered as NEW
+        if C{suite} has a seperate debug suite.
 
         @rtype:  bool
         @return: C{True} if the upload is NEW, C{False} otherwise
 
         @rtype:  bool
         @return: C{True} if the upload is NEW, C{False} otherwise
@@ -753,14 +819,10 @@ class ArchiveUpload(object):
         new = False
 
         # Check for missing overrides
         new = False
 
         # Check for missing overrides
-        for b in self.changes.binaries:
-            override = self._binary_override(suite, b)
-            if override is None:
-                self.warnings.append('binary:{0} is NEW.'.format(b.control['Package']))
-                new = True
-
+        if self._check_new_binary_overrides(suite, overridesuite):
+            new = True
         if self.changes.source is not None:
         if self.changes.source is not None:
-            override = self._source_override(suite, self.changes.source)
+            override = self._source_override(overridesuite, self.changes.source)
             if override is None:
                 self.warnings.append('source:{0} is NEW.'.format(self.changes.source.dsc['Source']))
                 new = True
             if override is None:
                 self.warnings.append('source:{0} is NEW.'.format(self.changes.source.dsc['Source']))
                 new = True
@@ -792,7 +854,7 @@ class ArchiveUpload(object):
             overridesuite = suite
             if suite.overridesuite is not None:
                 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
             overridesuite = suite
             if suite.overridesuite is not None:
                 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
-            if self._check_new(overridesuite):
+            if self._check_new(suite, overridesuite):
                 self.new = True
             final_suites.add(suite)
 
                 self.new = True
             final_suites.add(suite)
 
@@ -804,7 +866,7 @@ class ArchiveUpload(object):
         @type  suite: L{daklib.dbconn.Suite}
         @param suite: suite to get override for
 
         @type  suite: L{daklib.dbconn.Suite}
         @param suite: suite to get override for
 
-        @type  binary: L{daklib.upload.Binary}
+        @type  binary: L{daklib.upload.Binary} or L{daklib.packagelist.PackageListEntry}
         @param binary: binary to get override for
 
         @rtype:  L{daklib.dbconn.Override} or C{None}
         @param binary: binary to get override for
 
         @rtype:  L{daklib.dbconn.Override} or C{None}
@@ -817,7 +879,7 @@ class ArchiveUpload(object):
         if mapped_component is None:
             return None
 
         if mapped_component is None:
             return None
 
-        query = self.session.query(Override).filter_by(suite=suite, package=binary.control['Package']) \
+        query = self.session.query(Override).filter_by(suite=suite, package=binary.name) \
                 .join(Component).filter(Component.component_name == mapped_component.component_name) \
                 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
 
                 .join(Component).filter(Component.component_name == mapped_component.component_name) \
                 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
 
@@ -841,10 +903,13 @@ class ArchiveUpload(object):
         if suite.overridesuite is not None:
             suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
 
         if suite.overridesuite is not None:
             suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
 
-        # XXX: component for source?
         query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
                 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
 
         query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
                 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
 
+        component = source_component_from_package_list(source.package_list, suite)
+        if component is not None:
+            query = query.filter(Override.component == component)
+
         try:
             return query.one()
         except NoResultFound:
         try:
             return query.one()
         except NoResultFound:
@@ -889,12 +954,14 @@ class ArchiveUpload(object):
             # Validate signatures and hashes before we do any real work:
             for chk in (
                     checks.SignatureAndHashesCheck,
             # Validate signatures and hashes before we do any real work:
             for chk in (
                     checks.SignatureAndHashesCheck,
+                    checks.SignatureTimestampCheck,
                     checks.ChangesCheck,
                     checks.ExternalHashesCheck,
                     checks.SourceCheck,
                     checks.BinaryCheck,
                     checks.BinaryTimestampCheck,
                     checks.SingleDistributionCheck,
                     checks.ChangesCheck,
                     checks.ExternalHashesCheck,
                     checks.SourceCheck,
                     checks.BinaryCheck,
                     checks.BinaryTimestampCheck,
                     checks.SingleDistributionCheck,
+                    checks.ArchAllBinNMUCheck,
                     ):
                 chk().check(self)
 
                     ):
                 chk().check(self)
 
@@ -914,6 +981,7 @@ class ArchiveUpload(object):
                 chk().check(self)
 
             for chk in (
                 chk().check(self)
 
             for chk in (
+                    checks.SuiteCheck,
                     checks.ACLCheck,
                     checks.SourceFormatCheck,
                     checks.SuiteArchitectureCheck,
                     checks.ACLCheck,
                     checks.SourceFormatCheck,
                     checks.SuiteArchitectureCheck,
@@ -966,14 +1034,33 @@ class ArchiveUpload(object):
         source = self.changes.source
         if source is not None:
             component = source_component_func(source)
         source = self.changes.source
         if source is not None:
             component = source_component_func(source)
-            db_source = self.transaction.install_source(self.directory, source, suite, component, changed_by, fingerprint=self.fingerprint)
+            db_source = self.transaction.install_source(
+                self.directory,
+                source,
+                suite,
+                component,
+                changed_by,
+                fingerprint=self.fingerprint
+            )
         else:
             db_source = None
 
         db_binaries = []
         for binary in self.changes.binaries:
         else:
             db_source = None
 
         db_binaries = []
         for binary in self.changes.binaries:
+            copy_to_suite = suite
+            if utils.is_in_debug_section(binary.control) and suite.debug_suite is not None:
+                copy_to_suite = suite.debug_suite
+
             component = binary_component_func(binary)
             component = binary_component_func(binary)
-            db_binary = self.transaction.install_binary(self.directory, binary, suite, component, fingerprint=self.fingerprint, source_suites=source_suites, extra_source_archives=extra_source_archives)
+            db_binary = self.transaction.install_binary(
+                self.directory,
+                binary,
+                copy_to_suite,
+                component,
+                fingerprint=self.fingerprint,
+                source_suites=source_suites,
+                extra_source_archives=extra_source_archives
+            )
             db_binaries.append(db_binary)
 
         if suite.copychanges:
             db_binaries.append(db_binary)
 
         if suite.copychanges:
@@ -981,6 +1068,8 @@ class ArchiveUpload(object):
             dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
             self.transaction.fs.copy(src, dst, mode=suite.archive.mode)
 
             dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
             self.transaction.fs.copy(src, dst, mode=suite.archive.mode)
 
+        suite.update_last_changed()
+
         return (db_source, db_binaries)
 
     def _install_changes(self):
         return (db_source, db_binaries)
 
     def _install_changes(self):
@@ -1061,14 +1150,26 @@ class ArchiveUpload(object):
 
         remaining = []
         for f in byhand:
 
         remaining = []
         for f in byhand:
-            parts = f.filename.split('_', 2)
-            if len(parts) != 3:
-                print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
-                remaining.append(f)
-                continue
+            if '_' in f.filename:
+                parts = f.filename.split('_', 2)
+                if len(parts) != 3:
+                    print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
+                    remaining.append(f)
+                    continue
+
+                package, version, archext = parts
+                arch, ext = archext.split('.', 1)
+            else:
+                parts = f.filename.split('.')
+                if len(parts) < 2:
+                    print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
+                    remaining.append(f)
+                    continue
 
 
-            package, version, archext = parts
-            arch, ext = archext.split('.', 1)
+                package = parts[0]
+                version = '0'
+                arch = 'all'
+                ext = parts[-1]
 
             try:
                 rule = automatic_byhand_packages.subtree(package)
 
             try:
                 rule = automatic_byhand_packages.subtree(package)
@@ -1083,7 +1184,7 @@ class ArchiveUpload(object):
                 continue
 
             script = rule['Script']
                 continue
 
             script = rule['Script']
-            retcode = subprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename)], shell=False)
+            retcode = daklib.daksubprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename), suite.suite_name], shell=False)
             if retcode != 0:
                 print "W: error processing {0}.".format(f.filename)
                 remaining.append(f)
             if retcode != 0:
                 print "W: error processing {0}.".format(f.filename)
                 remaining.append(f)
@@ -1185,7 +1286,7 @@ class ArchiveUpload(object):
             source_suites = self.session.query(Suite).filter(Suite.suite_id.in_(source_suite_ids)).subquery()
 
             source_component_func = lambda source: self._source_override(overridesuite, source).component
             source_suites = self.session.query(Suite).filter(Suite.suite_id.in_(source_suite_ids)).subquery()
 
             source_component_func = lambda source: self._source_override(overridesuite, source).component
-            binary_component_func = lambda binary: self._binary_component(overridesuite, binary)
+            binary_component_func = lambda binary: self._binary_component(overridesuite, binary, only_overrides=False)
 
             (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
 
 
             (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])