]> git.decadent.org.uk Git - dak.git/blobdiff - daklib/archive.py
Add by-hash support
[dak.git] / daklib / archive.py
index bc67c2d9a7492278417c69e0d22bb4aee0d687f1..bfab2c64e832ba63f64806a633bb0fc177cbc8b7 100644 (file)
@@ -26,13 +26,15 @@ import daklib.upload as upload
 import daklib.utils as utils
 from daklib.fstransactions import FilesystemTransaction
 from daklib.regexes import re_changelog_versions, re_bin_only_nmu
 import daklib.utils as utils
 from daklib.fstransactions import FilesystemTransaction
 from daklib.regexes import re_changelog_versions, re_bin_only_nmu
+import daklib.daksubprocess
 
 import apt_pkg
 from datetime import datetime
 import os
 import shutil
 
 import apt_pkg
 from datetime import datetime
 import os
 import shutil
-import subprocess
 from sqlalchemy.orm.exc import NoResultFound
 from sqlalchemy.orm.exc import NoResultFound
+from sqlalchemy.orm import object_session
+import sqlalchemy.exc
 import tempfile
 import traceback
 
 import tempfile
 import traceback
 
@@ -85,7 +87,7 @@ class ArchiveTransaction(object):
         Will not give an error when the file is already present.
 
         @rtype:  L{daklib.dbconn.PoolFile}
         Will not give an error when the file is already present.
 
         @rtype:  L{daklib.dbconn.PoolFile}
-        @return: batabase object for the new file
+        @return: database object for the new file
         """
         session = self.session
 
         """
         session = self.session
 
@@ -108,7 +110,7 @@ class ArchiveTransaction(object):
             session.flush()
 
             path = os.path.join(archive.path, 'pool', component.component_name, poolname)
             session.flush()
 
             path = os.path.join(archive.path, 'pool', component.component_name, poolname)
-            hashed_file_path = os.path.join(directory, hashed_file.filename)
+            hashed_file_path = os.path.join(directory, hashed_file.input_filename)
             self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
 
         return poolfile
             self.fs.copy(hashed_file_path, path, link=False, mode=archive.mode)
 
         return poolfile
@@ -174,6 +176,10 @@ class ArchiveTransaction(object):
             maintainer=maintainer,
             poolfile=db_file,
             binarytype=binary.type,
             maintainer=maintainer,
             poolfile=db_file,
             binarytype=binary.type,
+            )
+        # Other attributes that are ignored for purposes of equality with
+        # an existing source
+        rest2 = dict(
             fingerprint=fingerprint,
             )
 
             fingerprint=fingerprint,
             )
 
@@ -186,6 +192,8 @@ class ArchiveTransaction(object):
             db_binary = DBBinary(**unique)
             for key, value in rest.iteritems():
                 setattr(db_binary, key, value)
             db_binary = DBBinary(**unique)
             for key, value in rest.iteritems():
                 setattr(db_binary, key, value)
+            for key, value in rest2.iteritems():
+                setattr(db_binary, key, value)
             session.add(db_binary)
             session.flush()
             import_metadata_into_db(db_binary, session)
             session.add(db_binary)
             session.flush()
             import_metadata_into_db(db_binary, session)
@@ -239,51 +247,18 @@ class ArchiveTransaction(object):
         """Add Built-Using sources to C{db_binary.extra_sources}
         """
         session = self.session
         """Add Built-Using sources to C{db_binary.extra_sources}
         """
         session = self.session
-        built_using = control.get('Built-Using', None)
-
-        if built_using is not None:
-            for dep in apt_pkg.parse_depends(built_using):
-                assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
-                bu_source_name, bu_source_version, comp = dep[0]
-                assert comp == '=', 'Built-Using must contain strict dependencies'
-
-                bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
-                if bu_source is None:
-                    raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
-
-                self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
-
-                db_binary.extra_sources.append(bu_source)
-
-    def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
-        """Install a source package
-
-        @type  directory: str
-        @param directory: directory the source package is located in
-
-        @type  source: L{daklib.upload.Source}
-        @param source: source package to install
-
-        @type  suite: L{daklib.dbconn.Suite}
-        @param suite: target suite
-
-        @type  component: L{daklib.dbconn.Component}
-        @param component: target component
 
 
-        @type  changed_by: L{daklib.dbconn.Maintainer}
-        @param changed_by: person who prepared this version of the package
+        for bu_source_name, bu_source_version in daklib.utils.parse_built_using(control):
+            bu_source = session.query(DBSource).filter_by(source=bu_source_name, version=bu_source_version).first()
+            if bu_source is None:
+                raise ArchiveException('{0}: Built-Using refers to non-existing source package {1} (= {2})'.format(filename, bu_source_name, bu_source_version))
 
 
-        @type  allow_tainted: bool
-        @param allow_tainted: allow to copy additional files from tainted archives
+            self._ensure_extra_source_exists(filename, bu_source, suite.archive, extra_archives=extra_archives)
 
 
-        @type  fingerprint: L{daklib.dbconn.Fingerprint}
-        @param fingerprint: optional fingerprint
+            db_binary.extra_sources.append(bu_source)
 
 
-        @rtype:  L{daklib.dbconn.DBSource}
-        @return: database object for the new source
-        """
+    def install_source_to_archive(self, directory, source, archive, component, changed_by, allow_tainted=False, fingerprint=None):
         session = self.session
         session = self.session
-        archive = suite.archive
         control = source.dsc
         maintainer = get_or_set_maintainer(control['Maintainer'], session)
         source_name = control['Source']
         control = source.dsc
         maintainer = get_or_set_maintainer(control['Maintainer'], session)
         source_name = control['Source']
@@ -299,12 +274,15 @@ class ArchiveTransaction(object):
             )
         rest = dict(
             maintainer=maintainer,
             )
         rest = dict(
             maintainer=maintainer,
-            changedby=changed_by,
-            #install_date=datetime.now().date(),
             poolfile=db_file_dsc,
             poolfile=db_file_dsc,
-            fingerprint=fingerprint,
             dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
             )
             dm_upload_allowed=(control.get('DM-Upload-Allowed', 'no') == 'yes'),
             )
+        # Other attributes that are ignored for purposes of equality with
+        # an existing source
+        rest2 = dict(
+            changedby=changed_by,
+            fingerprint=fingerprint,
+            )
 
         created = False
         try:
 
         created = False
         try:
@@ -317,8 +295,8 @@ class ArchiveTransaction(object):
             db_source = DBSource(**unique)
             for key, value in rest.iteritems():
                 setattr(db_source, key, value)
             db_source = DBSource(**unique)
             for key, value in rest.iteritems():
                 setattr(db_source, key, value)
-            # XXX: set as default in postgres?
-            db_source.install_date = datetime.now().date()
+            for key, value in rest2.iteritems():
+                setattr(db_source, key, value)
             session.add(db_source)
             session.flush()
 
             session.add(db_source)
             session.flush()
 
@@ -329,18 +307,15 @@ class ArchiveTransaction(object):
             session.add(db_dsc_file)
             session.flush()
 
             session.add(db_dsc_file)
             session.flush()
 
-        if suite in db_source.suites:
-            return db_source
-
-        db_source.suites.append(suite)
-
         if not created:
         if not created:
+            for f in db_source.srcfiles:
+                self._copy_file(f.poolfile, archive, component, allow_tainted=allow_tainted)
             return db_source
 
         ### Now add remaining files and copy them to the archive.
 
         for hashed_file in source.files.itervalues():
             return db_source
 
         ### Now add remaining files and copy them to the archive.
 
         for hashed_file in source.files.itervalues():
-            hashed_file_path = os.path.join(directory, hashed_file.filename)
+            hashed_file_path = os.path.join(directory, hashed_file.input_filename)
             if os.path.exists(hashed_file_path):
                 db_file = self._install_file(directory, hashed_file, archive, component, source_name)
                 session.add(db_file)
             if os.path.exists(hashed_file_path):
                 db_file = self._install_file(directory, hashed_file, archive, component, source_name)
                 session.add(db_file)
@@ -361,17 +336,49 @@ class ArchiveTransaction(object):
         # Uploaders are the maintainer and co-maintainers from the Uploaders field
         db_source.uploaders.append(maintainer)
         if 'Uploaders' in control:
         # Uploaders are the maintainer and co-maintainers from the Uploaders field
         db_source.uploaders.append(maintainer)
         if 'Uploaders' in control:
-            def split_uploaders(field):
-                import re
-                for u in re.sub(">[ ]*,", ">\t", field).split("\t"):
-                    yield u.strip()
-
+            from daklib.textutils import split_uploaders
             for u in split_uploaders(control['Uploaders']):
                 db_source.uploaders.append(get_or_set_maintainer(u, session))
         session.flush()
 
         return db_source
 
             for u in split_uploaders(control['Uploaders']):
                 db_source.uploaders.append(get_or_set_maintainer(u, session))
         session.flush()
 
         return db_source
 
+    def install_source(self, directory, source, suite, component, changed_by, allow_tainted=False, fingerprint=None):
+        """Install a source package
+
+        @type  directory: str
+        @param directory: directory the source package is located in
+
+        @type  source: L{daklib.upload.Source}
+        @param source: source package to install
+
+        @type  suite: L{daklib.dbconn.Suite}
+        @param suite: target suite
+
+        @type  component: L{daklib.dbconn.Component}
+        @param component: target component
+
+        @type  changed_by: L{daklib.dbconn.Maintainer}
+        @param changed_by: person who prepared this version of the package
+
+        @type  allow_tainted: bool
+        @param allow_tainted: allow to copy additional files from tainted archives
+
+        @type  fingerprint: L{daklib.dbconn.Fingerprint}
+        @param fingerprint: optional fingerprint
+
+        @rtype:  L{daklib.dbconn.DBSource}
+        @return: database object for the new source
+        """
+        db_source = self.install_source_to_archive(directory, source, suite.archive, component, changed_by, allow_tainted, fingerprint)
+
+        if suite in db_source.suites:
+            return db_source
+        db_source.suites.append(suite)
+        self.session.flush()
+
+        return db_source
+
     def _copy_file(self, db_file, archive, component, allow_tainted=False):
         """Copy a file to the given archive and component
 
     def _copy_file(self, db_file, archive, component, allow_tainted=False):
         """Copy a file to the given archive and component
 
@@ -390,13 +397,13 @@ class ArchiveTransaction(object):
         session = self.session
 
         if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
         session = self.session
 
         if session.query(ArchiveFile).filter_by(archive=archive, component=component, file=db_file).first() is None:
-            query = session.query(ArchiveFile).filter_by(file=db_file, component=component)
+            query = session.query(ArchiveFile).filter_by(file=db_file)
             if not allow_tainted:
                 query = query.join(Archive).filter(Archive.tainted == False)
 
             source_af = query.first()
             if source_af is None:
             if not allow_tainted:
                 query = query.join(Archive).filter(Archive.tainted == False)
 
             source_af = query.first()
             if source_af is None:
-                raise ArchiveException('cp: Could not find {0} in component {1} in any archive.'.format(db_file.filename, component.component_name))
+                raise ArchiveException('cp: Could not find {0} in any archive.'.format(db_file.filename))
             target_af = ArchiveFile(archive, component, db_file)
             session.add(target_af)
             session.flush()
             target_af = ArchiveFile(archive, component, db_file)
             session.add(target_af)
             session.flush()
@@ -531,6 +538,9 @@ class ArchiveTransaction(object):
         self.session.rollback()
         self.fs.rollback()
 
         self.session.rollback()
         self.fs.rollback()
 
+    def flush(self):
+        self.session.flush()
+
     def __enter__(self):
         return self
 
     def __enter__(self):
         return self
 
@@ -541,6 +551,34 @@ class ArchiveTransaction(object):
             self.rollback()
         return None
 
             self.rollback()
         return None
 
+def source_component_from_package_list(package_list, suite):
+    """Get component for a source package
+
+    This function will look at the Package-List field to determine the
+    component the source package belongs to. This is the first component
+    the source package provides binaries for (first with respect to the
+    ordering of components).
+
+    It the source package has no Package-List field, None is returned.
+
+    @type  package_list: L{daklib.packagelist.PackageList}
+    @param package_list: package list of the source to get the override for
+
+    @type  suite: L{daklib.dbconn.Suite}
+    @param suite: suite to consider for binaries produced
+
+    @rtype:  L{daklib.dbconn.Component} or C{None}
+    @return: component for the given source or C{None}
+    """
+    if package_list.fallback:
+        return None
+    session = object_session(suite)
+    packages = package_list.packages_for_suite(suite)
+    components = set(p.component for p in packages)
+    query = session.query(Component).order_by(Component.ordering) \
+            .filter(Component.component_name.in_(components))
+    return query.first()
+
 class ArchiveUpload(object):
     """handle an upload
 
 class ArchiveUpload(object):
     """handle an upload
 
@@ -599,9 +637,24 @@ class ArchiveUpload(object):
         @type: bool
         """
 
         @type: bool
         """
 
+        self._checked = False
+        """checks passes. set by C{check}
+        @type: bool
+        """
+
         self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
         self._new = self._new_queue.suite
 
         self._new_queue = self.session.query(PolicyQueue).filter_by(queue_name='new').one()
         self._new = self._new_queue.suite
 
+    def warn(self, message):
+        """add a warning message
+
+        Adds a warning message that can later be seen in C{self.warnings}
+
+        @type  message: string
+        @param message: warning message
+        """
+        self.warnings.append(message)
+
     def prepare(self):
         """prepare upload for further processing
 
     def prepare(self):
         """prepare upload for further processing
 
@@ -622,11 +675,13 @@ class ArchiveUpload(object):
         cnf = Config()
         session = self.transaction.session
 
         cnf = Config()
         session = self.transaction.session
 
-        self.directory = tempfile.mkdtemp(dir=cnf.get('Dir::TempPath'))
+        group = cnf.get('Dinstall::UnprivGroup') or None
+        self.directory = utils.temp_dirname(parent=cnf.get('Dir::TempPath'),
+                                            mode=0o2750, group=group)
         with FilesystemTransaction() as fs:
             src = os.path.join(self.original_directory, self.original_changes.filename)
             dst = os.path.join(self.directory, self.original_changes.filename)
         with FilesystemTransaction() as fs:
             src = os.path.join(self.original_directory, self.original_changes.filename)
             dst = os.path.join(self.directory, self.original_changes.filename)
-            fs.copy(src, dst)
+            fs.copy(src, dst, mode=0o640)
 
             self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
 
 
             self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)
 
@@ -635,9 +690,15 @@ class ArchiveUpload(object):
                 dst = os.path.join(self.directory, f.filename)
                 if not os.path.exists(src):
                     continue
                 dst = os.path.join(self.directory, f.filename)
                 if not os.path.exists(src):
                     continue
-                fs.copy(src, dst)
+                fs.copy(src, dst, mode=0o640)
+
+            source = None
+            try:
+                source = self.changes.source
+            except Exception:
+                # Do not raise an exception here if the .dsc is invalid.
+                pass
 
 
-            source = self.changes.source
             if source is not None:
                 for f in source.files.itervalues():
                     src = os.path.join(self.original_directory, f.filename)
             if source is not None:
                 for f in source.files.itervalues():
                     src = os.path.join(self.original_directory, f.filename)
@@ -646,7 +707,7 @@ class ArchiveUpload(object):
                         try:
                             db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False)
                             db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
                         try:
                             db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False)
                             db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
-                            fs.copy(db_archive_file.path, dst, symlink=True)
+                            fs.copy(db_archive_file.path, dst, mode=0o640)
                         except KeyError:
                             # Ignore if get_file could not find it. Upload will
                             # probably be rejected later.
                         except KeyError:
                             # Ignore if get_file could not find it. Upload will
                             # probably be rejected later.
@@ -672,7 +733,8 @@ class ArchiveUpload(object):
 
         sourcedir = os.path.join(self.directory, 'source')
         if not os.path.exists(sourcedir):
 
         sourcedir = os.path.join(self.directory, 'source')
         if not os.path.exists(sourcedir):
-            subprocess.check_call(["dpkg-source", "--no-copy", "-x", dsc_path, sourcedir], shell=False)
+            devnull = open('/dev/null', 'w')
+            daklib.daksubprocess.check_call(["dpkg-source", "--no-copy", "--no-check", "-x", dsc_path, sourcedir], shell=False, stdout=devnull)
         if not os.path.isdir(sourcedir):
             raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
         return sourcedir
         if not os.path.isdir(sourcedir):
             raise Exception("{0} is not a directory after extracting source package".format(sourcedir))
         return sourcedir
@@ -686,7 +748,7 @@ class ArchiveUpload(object):
                 if src == suite_name:
                     suite_name = dst
                     if rtype != "silent-map":
                 if src == suite_name:
                     suite_name = dst
                     if rtype != "silent-map":
-                        self.warnings.append('Mapping {0} to {0}.'.format(src, dst))
+                        self.warnings.append('Mapping {0} to {1}.'.format(src, dst))
             elif rtype == "ignore":
                 ignored = fields[1]
                 if suite_name == ignored:
             elif rtype == "ignore":
                 ignored = fields[1]
                 if suite_name == ignored:
@@ -695,7 +757,7 @@ class ArchiveUpload(object):
             elif rtype == "reject":
                 rejected = fields[1]
                 if suite_name == rejected:
             elif rtype == "reject":
                 rejected = fields[1]
                 if suite_name == rejected:
-                    self.reject_reasons.append('Uploads to {0} are not accepted.'.format(suite))
+                    raise checks.Reject('Uploads to {0} are not accepted.'.format(rejected))
             ## XXX: propup-version and map-unreleased not yet implemented
         return suite_name
 
             ## XXX: propup-version and map-unreleased not yet implemented
         return suite_name
 
@@ -716,28 +778,54 @@ class ArchiveUpload(object):
         suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
         return suites
 
         suites = session.query(Suite).filter(Suite.suite_name.in_(suite_names))
         return suites
 
-    def _check_new(self, suite):
+    def _check_new_binary_overrides(self, suite, overridesuite):
+        new = False
+        source = self.changes.source
+
+        if source is not None and not source.package_list.fallback:
+            packages = source.package_list.packages_for_suite(suite)
+            binaries = [ entry for entry in packages ]
+            for b in binaries:
+                override = self._binary_override(overridesuite, b)
+                if override is None:
+                    self.warnings.append('binary:{0} is NEW.'.format(b.name))
+                    new = True
+        else:
+            binaries = self.changes.binaries
+            for b in binaries:
+                if utils.is_in_debug_section(b.control) and suite.debug_suite is not None:
+                    continue
+                override = self._binary_override(overridesuite, b)
+                if override is None:
+                    self.warnings.append('binary:{0} is NEW.'.format(b.name))
+                    new = True
+
+        return new
+
+    def _check_new(self, suite, overridesuite):
         """Check if upload is NEW
 
         An upload is NEW if it has binary or source packages that do not have
         """Check if upload is NEW
 
         An upload is NEW if it has binary or source packages that do not have
-        an override in C{suite} OR if it references files ONLY in a tainted
-        archive (eg. when it references files in NEW).
+        an override in C{overridesuite} OR if it references files ONLY in a
+        tainted archive (eg. when it references files in NEW).
+
+        Debug packages (*-dbgsym in Section: debug) are not considered as NEW
+        if C{suite} has a seperate debug suite.
 
         @rtype:  bool
         @return: C{True} if the upload is NEW, C{False} otherwise
         """
         session = self.session
 
         @rtype:  bool
         @return: C{True} if the upload is NEW, C{False} otherwise
         """
         session = self.session
+        new = False
 
         # Check for missing overrides
 
         # Check for missing overrides
-        for b in self.changes.binaries:
-            override = self._binary_override(suite, b)
-            if override is None:
-                return True
-
+        if self._check_new_binary_overrides(suite, overridesuite):
+            new = True
         if self.changes.source is not None:
         if self.changes.source is not None:
-            override = self._source_override(suite, self.changes.source)
+            override = self._source_override(overridesuite, self.changes.source)
             if override is None:
             if override is None:
-                return True
+                self.warnings.append('source:{0} is NEW.'.format(self.changes.source.dsc['Source']))
+                new = True
 
         # Check if we reference a file only in a tainted archive
         files = self.changes.files.values()
 
         # Check if we reference a file only in a tainted archive
         files = self.changes.files.values()
@@ -751,7 +839,10 @@ class ArchiveUpload(object):
             in_untainted_archive = (query_untainted.first() is not None)
 
             if in_archive and not in_untainted_archive:
             in_untainted_archive = (query_untainted.first() is not None)
 
             if in_archive and not in_untainted_archive:
-                return True
+                self.warnings.append('{0} is only available in NEW.'.format(f.filename))
+                new = True
+
+        return new
 
     def _final_suites(self):
         session = self.session
 
     def _final_suites(self):
         session = self.session
@@ -763,7 +854,7 @@ class ArchiveUpload(object):
             overridesuite = suite
             if suite.overridesuite is not None:
                 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
             overridesuite = suite
             if suite.overridesuite is not None:
                 overridesuite = session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
-            if self._check_new(overridesuite):
+            if self._check_new(suite, overridesuite):
                 self.new = True
             final_suites.add(suite)
 
                 self.new = True
             final_suites.add(suite)
 
@@ -775,7 +866,7 @@ class ArchiveUpload(object):
         @type  suite: L{daklib.dbconn.Suite}
         @param suite: suite to get override for
 
         @type  suite: L{daklib.dbconn.Suite}
         @param suite: suite to get override for
 
-        @type  binary: L{daklib.upload.Binary}
+        @type  binary: L{daklib.upload.Binary} or L{daklib.packagelist.PackageListEntry}
         @param binary: binary to get override for
 
         @rtype:  L{daklib.dbconn.Override} or C{None}
         @param binary: binary to get override for
 
         @rtype:  L{daklib.dbconn.Override} or C{None}
@@ -784,8 +875,12 @@ class ArchiveUpload(object):
         if suite.overridesuite is not None:
             suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
 
         if suite.overridesuite is not None:
             suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
 
-        query = self.session.query(Override).filter_by(suite=suite, package=binary.control['Package']) \
-                .join(Component).filter(Component.component_name == binary.component) \
+        mapped_component = get_mapped_component(binary.component)
+        if mapped_component is None:
+            return None
+
+        query = self.session.query(Override).filter_by(suite=suite, package=binary.name) \
+                .join(Component).filter(Component.component_name == mapped_component.component_name) \
                 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
 
         try:
                 .join(OverrideType).filter(OverrideType.overridetype == binary.type)
 
         try:
@@ -808,10 +903,13 @@ class ArchiveUpload(object):
         if suite.overridesuite is not None:
             suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
 
         if suite.overridesuite is not None:
             suite = self.session.query(Suite).filter_by(suite_name=suite.overridesuite).one()
 
-        # XXX: component for source?
         query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
                 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
 
         query = self.session.query(Override).filter_by(suite=suite, package=source.dsc['Source']) \
                 .join(OverrideType).filter(OverrideType.overridetype == 'dsc')
 
+        component = source_component_from_package_list(source.package_list, suite)
+        if component is not None:
+            query = query.filter(Override.component == component)
+
         try:
             return query.one()
         except NoResultFound:
         try:
             return query.one()
         except NoResultFound:
@@ -853,28 +951,38 @@ class ArchiveUpload(object):
         assert self.changes.valid_signature
 
         try:
         assert self.changes.valid_signature
 
         try:
+            # Validate signatures and hashes before we do any real work:
             for chk in (
             for chk in (
-                    checks.SignatureCheck,
+                    checks.SignatureAndHashesCheck,
+                    checks.SignatureTimestampCheck,
                     checks.ChangesCheck,
                     checks.ChangesCheck,
-                    checks.TransitionCheck,
-                    checks.UploadBlockCheck,
-                    checks.HashesCheck,
+                    checks.ExternalHashesCheck,
                     checks.SourceCheck,
                     checks.BinaryCheck,
                     checks.BinaryTimestampCheck,
                     checks.SourceCheck,
                     checks.BinaryCheck,
                     checks.BinaryTimestampCheck,
-                    checks.ACLCheck,
                     checks.SingleDistributionCheck,
                     checks.SingleDistributionCheck,
-                    checks.NoSourceOnlyCheck,
-                    checks.LintianCheck,
+                    checks.ArchAllBinNMUCheck,
                     ):
                 chk().check(self)
 
             final_suites = self._final_suites()
             if len(final_suites) == 0:
                     ):
                 chk().check(self)
 
             final_suites = self._final_suites()
             if len(final_suites) == 0:
-                self.reject_reasons.append('Ended with no suite to install to.')
+                self.reject_reasons.append('No target suite found. Please check your target distribution and that you uploaded to the right archive.')
                 return False
 
                 return False
 
+            self.final_suites = final_suites
+
             for chk in (
             for chk in (
+                    checks.TransitionCheck,
+                    checks.ACLCheck,
+                    checks.NoSourceOnlyCheck,
+                    checks.LintianCheck,
+                    ):
+                chk().check(self)
+
+            for chk in (
+                    checks.SuiteCheck,
+                    checks.ACLCheck,
                     checks.SourceFormatCheck,
                     checks.SuiteArchitectureCheck,
                     checks.VersionCheck,
                     checks.SourceFormatCheck,
                     checks.SuiteArchitectureCheck,
                     checks.VersionCheck,
@@ -885,7 +993,7 @@ class ArchiveUpload(object):
             if len(self.reject_reasons) != 0:
                 return False
 
             if len(self.reject_reasons) != 0:
                 return False
 
-            self.final_suites = final_suites
+            self._checked = True
             return True
         except checks.Reject as e:
             self.reject_reasons.append(unicode(e))
             return True
         except checks.Reject as e:
             self.reject_reasons.append(unicode(e))
@@ -926,20 +1034,41 @@ class ArchiveUpload(object):
         source = self.changes.source
         if source is not None:
             component = source_component_func(source)
         source = self.changes.source
         if source is not None:
             component = source_component_func(source)
-            db_source = self.transaction.install_source(self.directory, source, suite, component, changed_by, fingerprint=self.fingerprint)
+            db_source = self.transaction.install_source(
+                self.directory,
+                source,
+                suite,
+                component,
+                changed_by,
+                fingerprint=self.fingerprint
+            )
         else:
             db_source = None
 
         db_binaries = []
         for binary in self.changes.binaries:
         else:
             db_source = None
 
         db_binaries = []
         for binary in self.changes.binaries:
+            copy_to_suite = suite
+            if utils.is_in_debug_section(binary.control) and suite.debug_suite is not None:
+                copy_to_suite = suite.debug_suite
+
             component = binary_component_func(binary)
             component = binary_component_func(binary)
-            db_binary = self.transaction.install_binary(self.directory, binary, suite, component, fingerprint=self.fingerprint, source_suites=source_suites, extra_source_archives=extra_source_archives)
+            db_binary = self.transaction.install_binary(
+                self.directory,
+                binary,
+                copy_to_suite,
+                component,
+                fingerprint=self.fingerprint,
+                source_suites=source_suites,
+                extra_source_archives=extra_source_archives
+            )
             db_binaries.append(db_binary)
 
         if suite.copychanges:
             src = os.path.join(self.directory, self.changes.filename)
             dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
             db_binaries.append(db_binary)
 
         if suite.copychanges:
             src = os.path.join(self.directory, self.changes.filename)
             dst = os.path.join(suite.archive.path, 'dists', suite.suite_name, self.changes.filename)
-            self.transaction.fs.copy(src, dst)
+            self.transaction.fs.copy(src, dst, mode=suite.archive.mode)
+
+        suite.update_last_changed()
 
         return (db_source, db_binaries)
 
 
         return (db_source, db_binaries)
 
@@ -971,8 +1100,11 @@ class ArchiveUpload(object):
         db_changes.changelog_id = changelog_id
         db_changes.closes = self.changes.closed_bugs
 
         db_changes.changelog_id = changelog_id
         db_changes.closes = self.changes.closed_bugs
 
-        self.transaction.session.add(db_changes)
-        self.transaction.session.flush()
+        try:
+            self.transaction.session.add(db_changes)
+            self.transaction.session.flush()
+        except sqlalchemy.exc.IntegrityError:
+            raise ArchiveException('{0} is already known.'.format(self.changes.filename))
 
         return db_changes
 
 
         return db_changes
 
@@ -987,7 +1119,7 @@ class ArchiveUpload(object):
         self.transaction.session.flush()
 
         dst = os.path.join(policy_queue.path, self.changes.filename)
         self.transaction.session.flush()
 
         dst = os.path.join(policy_queue.path, self.changes.filename)
-        self.transaction.fs.copy(self.changes.path, dst)
+        self.transaction.fs.copy(self.changes.path, dst, mode=policy_queue.change_perms)
 
         return u
 
 
         return u
 
@@ -1002,6 +1134,7 @@ class ArchiveUpload(object):
         assert len(self.reject_reasons) == 0
         assert self.changes.valid_signature
         assert self.final_suites is not None
         assert len(self.reject_reasons) == 0
         assert self.changes.valid_signature
         assert self.final_suites is not None
+        assert self._checked
 
         byhand = self.changes.byhand_files
         if len(byhand) == 0:
 
         byhand = self.changes.byhand_files
         if len(byhand) == 0:
@@ -1017,26 +1150,41 @@ class ArchiveUpload(object):
 
         remaining = []
         for f in byhand:
 
         remaining = []
         for f in byhand:
-            parts = f.filename.split('_', 2)
-            if len(parts) != 3:
-                print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
-                remaining.append(f)
-                continue
+            if '_' in f.filename:
+                parts = f.filename.split('_', 2)
+                if len(parts) != 3:
+                    print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
+                    remaining.append(f)
+                    continue
 
 
-            package, version, archext = parts
-            arch, ext = archext.split('.', 1)
+                package, version, archext = parts
+                arch, ext = archext.split('.', 1)
+            else:
+                parts = f.filename.split('.')
+                if len(parts) < 2:
+                    print "W: unexpected byhand filename {0}. No automatic processing.".format(f.filename)
+                    remaining.append(f)
+                    continue
 
 
-            rule = automatic_byhand_packages.get(package)
-            if rule is None:
+                package = parts[0]
+                version = '0'
+                arch = 'all'
+                ext = parts[-1]
+
+            try:
+                rule = automatic_byhand_packages.subtree(package)
+            except KeyError:
                 remaining.append(f)
                 continue
 
                 remaining.append(f)
                 continue
 
-            if rule['Source'] != control['Source'] or rule['Section'] != f.section or rule['Extension'] != ext:
+            if rule['Source'] != self.changes.source_name \
+                    or rule['Section'] != f.section \
+                    or ('Extension' in rule and rule['Extension'] != ext):
                 remaining.append(f)
                 continue
 
             script = rule['Script']
                 remaining.append(f)
                 continue
 
             script = rule['Script']
-            retcode = subprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename)], shell=False)
+            retcode = daklib.daksubprocess.call([script, os.path.join(self.directory, f.filename), control['Version'], arch, os.path.join(self.directory, self.changes.filename), suite.suite_name], shell=False)
             if retcode != 0:
                 print "W: error processing {0}.".format(f.filename)
                 remaining.append(f)
             if retcode != 0:
                 print "W: error processing {0}.".format(f.filename)
                 remaining.append(f)
@@ -1062,7 +1210,7 @@ class ArchiveUpload(object):
 
         src = os.path.join(self.directory, hashed_file.filename)
         dst = os.path.join(policy_queue.path, hashed_file.filename)
 
         src = os.path.join(self.directory, hashed_file.filename)
         dst = os.path.join(policy_queue.path, hashed_file.filename)
-        fs.copy(src, dst)
+        fs.copy(src, dst, mode=policy_queue.change_perms)
 
         return byhand_file
 
 
         return byhand_file
 
@@ -1092,7 +1240,7 @@ class ArchiveUpload(object):
         for binary in self.changes.binaries:
             control = binary.control
             source_package, source_version = binary.source
         for binary in self.changes.binaries:
             control = binary.control
             source_package, source_version = binary.source
-            line = " ".join([control['Package'], control['Version'], source_package, source_version])
+            line = " ".join([control['Package'], control['Version'], control['Architecture'], source_package, source_version])
             print >>debinfo, line
         debinfo.close()
 
             print >>debinfo, line
         debinfo.close()
 
@@ -1112,6 +1260,7 @@ class ArchiveUpload(object):
         assert len(self.reject_reasons) == 0
         assert self.changes.valid_signature
         assert self.final_suites is not None
         assert len(self.reject_reasons) == 0
         assert self.changes.valid_signature
         assert self.final_suites is not None
+        assert self._checked
         assert not self.new
 
         db_changes = self._install_changes()
         assert not self.new
 
         db_changes = self._install_changes()
@@ -1127,10 +1276,17 @@ class ArchiveUpload(object):
             if policy_queue is not None:
                 redirected_suite = policy_queue.suite
 
             if policy_queue is not None:
                 redirected_suite = policy_queue.suite
 
-            source_suites = self.session.query(Suite).filter(Suite.suite_id.in_([suite.suite_id, redirected_suite.suite_id])).subquery()
+            # source can be in the suite we install to or any suite we enhance
+            source_suite_ids = set([suite.suite_id, redirected_suite.suite_id])
+            for enhanced_suite_id, in self.session.query(VersionCheck.reference_id) \
+                    .filter(VersionCheck.suite_id.in_(source_suite_ids)) \
+                    .filter(VersionCheck.check == 'Enhances'):
+                source_suite_ids.add(enhanced_suite_id)
+
+            source_suites = self.session.query(Suite).filter(Suite.suite_id.in_(source_suite_ids)).subquery()
 
             source_component_func = lambda source: self._source_override(overridesuite, source).component
 
             source_component_func = lambda source: self._source_override(overridesuite, source).component
-            binary_component_func = lambda binary: self._binary_component(overridesuite, binary)
+            binary_component_func = lambda binary: self._binary_component(overridesuite, binary, only_overrides=False)
 
             (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
 
 
             (db_source, db_binaries) = self._install_to_suite(redirected_suite, source_component_func, binary_component_func, source_suites=source_suites, extra_source_archives=[suite.archive])
 
@@ -1161,16 +1317,22 @@ class ArchiveUpload(object):
         binaries = self.changes.binaries
         byhand = self.changes.byhand_files
 
         binaries = self.changes.binaries
         byhand = self.changes.byhand_files
 
-        new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
-        if len(byhand) > 0:
-            new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
-        new_suite = new_queue.suite
-
         # we need a suite to guess components
         suites = list(self.final_suites)
         assert len(suites) == 1, "NEW uploads must be to a single suite"
         suite = suites[0]
 
         # we need a suite to guess components
         suites = list(self.final_suites)
         assert len(suites) == 1, "NEW uploads must be to a single suite"
         suite = suites[0]
 
+        # decide which NEW queue to use
+        if suite.new_queue is None:
+            new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='new').one()
+        else:
+            new_queue = suite.new_queue
+        if len(byhand) > 0:
+            # There is only one global BYHAND queue
+            new_queue = self.transaction.session.query(PolicyQueue).filter_by(queue_name='byhand').one()
+        new_suite = new_queue.suite
+
+
         def binary_component_func(binary):
             return self._binary_component(suite, binary, only_overrides=False)
 
         def binary_component_func(binary):
             return self._binary_component(suite, binary, only_overrides=False)
 
@@ -1187,8 +1349,9 @@ class ArchiveUpload(object):
                 source_component_name = guess
                 break
         if source_component_name is None:
                 source_component_name = guess
                 break
         if source_component_name is None:
-            raise Exception('Could not guess source component.')
-        source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
+            source_component = self.session.query(Component).order_by(Component.component_id).first()
+        else:
+            source_component = self.session.query(Component).filter_by(component_name=source_component_name).one()
         source_component_func = lambda source: source_component
 
         db_changes = self._install_changes()
         source_component_func = lambda source: source_component
 
         db_changes = self._install_changes()