]> git.decadent.org.uk Git - dak.git/commitdiff
Merge branch 'master' into rm
authorTorsten Werner <twerner@debian.org>
Thu, 11 Aug 2011 20:42:27 +0000 (22:42 +0200)
committerTorsten Werner <twerner@debian.org>
Thu, 11 Aug 2011 20:42:27 +0000 (22:42 +0200)
dak/rm.py
daklib/dbconn.py

index e0a5fa79d73653d538067f0947330222cae109e3..0583755b3b6bc4c048af6d395b51f599edcf3bec 100755 (executable)
--- a/dak/rm.py
+++ b/dak/rm.py
@@ -98,67 +98,77 @@ def game_over():
 
 ################################################################################
 
-def reverse_depends_check(removals, suites, arches=None):
+def reverse_depends_check(removals, suite, arches=None, session=None):
+    dbsuite = get_suite(suite, session)
     cnf = Config()
 
     print "Checking reverse dependencies..."
-    components = get_component_names()
     dep_problem = 0
     p2c = {}
     all_broken = {}
     if arches:
         all_arches = set(arches)
     else:
-        all_arches = set([x.arch_string for x in get_suite_architectures(suites[0])])
+        all_arches = set([x.arch_string for x in get_suite_architectures(suite)])
     all_arches -= set(["source", "all"])
+    metakey_d = get_or_set_metadatakey("Depends", session)
+    metakey_p = get_or_set_metadatakey("Provides", session)
+    params = {
+        'suite_id':     dbsuite.suite_id,
+        'metakey_d_id': metakey_d.key_id,
+        'metakey_p_id': metakey_p.key_id,
+        'arch_all_id' : get_architecture('all', session).arch_id,
+    }
     for architecture in all_arches:
         deps = {}
         sources = {}
         virtual_packages = {}
-        for component in components:
-            filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (cnf["Dir::Root"], suites[0], component, architecture)
-            # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
-            (fd, temp_filename) = utils.temp_filename()
-            (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
-            if (result != 0):
-                utils.fubar("Gunzip invocation failed!\n%s\n" % (output), result)
-            # Also check for udebs
-            filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (cnf["Dir::Root"], suites[0], component, architecture)
-            if os.path.exists(filename):
-                (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_filename))
-                if (result != 0):
-                    utils.fubar("Gunzip invocation failed!\n%s\n" % (output), result)
-            packages = utils.open_file(temp_filename)
-            Packages = apt_pkg.ParseTagFile(packages)
-            while Packages.Step():
-                package = Packages.Section.Find("Package")
-                source = Packages.Section.Find("Source")
-                if not source:
-                    source = package
-                elif ' ' in source:
-                    source = source.split(' ', 1)[0]
-                sources[package] = source
-                depends = Packages.Section.Find("Depends")
-                if depends:
-                    deps[package] = depends
-                provides = Packages.Section.Find("Provides")
-                # Maintain a counter for each virtual package.  If a
-                # Provides: exists, set the counter to 0 and count all
-                # provides by a package not in the list for removal.
-                # If the counter stays 0 at the end, we know that only
-                # the to-be-removed packages provided this virtual
-                # package.
-                if provides:
-                    for virtual_pkg in provides.split(","):
-                        virtual_pkg = virtual_pkg.strip()
-                        if virtual_pkg == package: continue
-                        if not virtual_packages.has_key(virtual_pkg):
-                            virtual_packages[virtual_pkg] = 0
-                        if package not in removals:
-                            virtual_packages[virtual_pkg] += 1
-                p2c[package] = component
-            packages.close()
-            os.unlink(temp_filename)
+        params['arch_id'] = get_architecture(architecture, session).arch_id
+
+        statement = '''
+            create temp table suite_binaries (
+                id integer primary key,
+                package text,
+                source integer,
+                file integer);
+            insert into suite_binaries
+                select b.id, b.package, b.source, b.file
+                    from binaries b WHERE b.id in
+                        (SELECT bin FROM bin_associations WHERE suite = :suite_id)
+                        AND b.architecture in (:arch_id, :arch_all_id);
+            SELECT b.id, b.package, s.source, c.name as component,
+                bmd.value as depends, bmp.value as provides
+                FROM suite_binaries b
+                LEFT OUTER JOIN binaries_metadata bmd
+                    ON b.id = bmd.bin_id AND bmd.key_id = :metakey_d_id
+                LEFT OUTER JOIN binaries_metadata bmp
+                    ON b.id = bmp.bin_id AND bmp.key_id = :metakey_p_id
+                JOIN source s ON b.source = s.id
+                JOIN files f ON b.file = f.id
+                JOIN location l ON f.location = l.id
+                JOIN component c ON l.component = c.id'''
+        session.rollback()
+        query = session.query('id', 'package', 'source', 'component', 'depends', 'provides'). \
+            from_statement(statement).params(params)
+        for binary_id, package, source, component, depends, provides in query:
+            sources[package] = source
+            p2c[package] = component
+            if depends is not None:
+                deps[package] = depends
+            # Maintain a counter for each virtual package.  If a
+            # Provides: exists, set the counter to 0 and count all
+            # provides by a package not in the list for removal.
+            # If the counter stays 0 at the end, we know that only
+            # the to-be-removed packages provided this virtual
+            # package.
+            if provides is not None:
+                for virtual_pkg in provides.split(","):
+                    virtual_pkg = virtual_pkg.strip()
+                    if virtual_pkg == package: continue
+                    if not virtual_packages.has_key(virtual_pkg):
+                        virtual_packages[virtual_pkg] = 0
+                    if package not in removals:
+                        virtual_packages[virtual_pkg] += 1
 
         # If a virtual package is only provided by the to-be-removed
         # packages, treat the virtual package as to-be-removed too.
@@ -206,41 +216,44 @@ def reverse_depends_check(removals, suites, arches=None):
 
     # Check source dependencies (Build-Depends and Build-Depends-Indep)
     all_broken.clear()
-    for component in components:
-        filename = "%s/dists/%s/%s/source/Sources.gz" % (cnf["Dir::Root"], suites[0], component)
-        # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance...
-        (fd, temp_filename) = utils.temp_filename()
-        result, output = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
-        if result != 0:
-            sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
-            sys.exit(result)
-        sources = utils.open_file(temp_filename, "r")
-        Sources = apt_pkg.ParseTagFile(sources)
-        while Sources.Step():
-            source = Sources.Section.Find("Package")
-            if source in removals: continue
-            parsed_dep = []
-            for build_dep_type in ["Build-Depends", "Build-Depends-Indep"]:
-                build_dep = Sources.Section.get(build_dep_type)
-                if build_dep:
-                    # Remove [arch] information since we want to see breakage on all arches
-                    build_dep = re_build_dep_arch.sub("", build_dep)
-                    try:
-                        parsed_dep += apt_pkg.ParseDepends(build_dep)
-                    except ValueError, e:
-                        print "Error for source %s: %s" % (source, e)
-            for dep in parsed_dep:
-                unsat = 0
-                for dep_package, _, _ in dep:
-                    if dep_package in removals:
-                        unsat += 1
-                if unsat == len(dep):
-                    if component != "main":
-                        source = "%s/%s" % (source, component)
-                    all_broken.setdefault(source, set()).add(utils.pp_deps(dep))
-                    dep_problem = 1
-        sources.close()
-        os.unlink(temp_filename)
+    metakey_bd = get_or_set_metadatakey("Build-Depends", session)
+    metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
+    params = {
+        'suite_id':    dbsuite.suite_id,
+        'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
+    }
+    statement = '''
+        SELECT s.id, s.source, string_agg(sm.value, ', ') as build_dep
+           FROM source s
+           JOIN source_metadata sm ON s.id = sm.src_id
+           WHERE s.id in
+               (SELECT source FROM src_associations
+                   WHERE suite = :suite_id)
+               AND sm.key_id in :metakey_ids
+           GROUP BY s.id, s.source'''
+    query = session.query('id', 'source', 'build_dep').from_statement(statement). \
+        params(params)
+    for source_id, source, build_dep in query:
+        if source in removals: continue
+        parsed_dep = []
+        if build_dep is not None:
+            # Remove [arch] information since we want to see breakage on all arches
+            build_dep = re_build_dep_arch.sub("", build_dep)
+            try:
+                parsed_dep += apt_pkg.ParseDepends(build_dep)
+            except ValueError, e:
+                print "Error for source %s: %s" % (source, e)
+        for dep in parsed_dep:
+            unsat = 0
+            for dep_package, _, _ in dep:
+                if dep_package in removals:
+                    unsat += 1
+            if unsat == len(dep):
+                component = DBSource.get(source_id, session).get_component_name()
+                if component != "main":
+                    source = "%s/%s" % (source, component)
+                all_broken.setdefault(source, set()).add(utils.pp_deps(dep))
+                dep_problem = 1
 
     if all_broken:
         print "# Broken Build-Depends:"
@@ -368,8 +381,8 @@ def main ():
             s = get_suite(suite, session=session)
             if s is not None:
                 suite_ids_list.append(s.suite_id)
-            if suite == "stable":
-                print "**WARNING** About to remove from the stable suite!"
+            if suite in ("oldstable", "stable"):
+                print "**WARNING** About to remove from the (old)stable suite!"
                 print "This should only be done just prior to a (point) release and not at"
                 print "any other time."
                 game_over()
@@ -386,6 +399,10 @@ def main ():
     # Additional component processing
     over_con_components = con_components.replace("c.id", "component")
 
+    # Don't do dependency checks on multiple suites
+    if Options["Rdep-Check"] and len(suites) > 1:
+        utils.fubar("Reverse dependency check on multiple suites is not implemented.")
+
     print "Working...",
     sys.stdout.flush()
     to_remove = []
@@ -508,7 +525,7 @@ def main ():
 
     if Options["Rdep-Check"]:
         arches = utils.split_args(Options["Architecture"])
-        reverse_depends_check(removals, suites, arches)
+        reverse_depends_check(removals, suites[0], arches, session)
 
     # If -n/--no-action, drop out here
     if Options["No-Action"]:
index a6a9e02219c749fabd3a7ac364a6faa0d757e273..1fa7974f5d0f5e2f2cdd4101c1621722bf86a731 100755 (executable)
@@ -2491,6 +2491,9 @@ class DBSource(ORMObject):
 
     metadata = association_proxy('key', 'value')
 
+    def get_component_name(self):
+        return self.poolfile.location.component.component_name
+
     def scan_contents(self):
         '''
         Returns a set of names for non directories. The path names are