[repacker] recursively duplicate nodes during isolation.

If a node is duplicated during isolation then any children it has will have incoming links from outside the subgraph (from the duplicated node and the original node), so they must be duplicated too.
diff --git a/src/hb-repacker.hh b/src/hb-repacker.hh
index c52720b..4bc9e12 100644
--- a/src/hb-repacker.hh
+++ b/src/hb-repacker.hh
@@ -364,11 +364,11 @@
       const auto& node = vertices_[entry.first];
       unsigned subgraph_incoming_edges = entry.second;
 
-      if (entry.first != root_idx && subgraph_incoming_edges < node.incoming_edges)
+      if (subgraph_incoming_edges < node.incoming_edges)
       {
         // Only  de-dup objects with incoming links from outside the subgraph.
         made_changes = true;
-        index_map.set (entry.first, duplicate (entry.first));
+        duplicate_subgraph (entry.first, index_map);
       }
     }
 
@@ -393,6 +393,22 @@
   }
 
   /*
+   * duplicates all nodes in the subgraph reachable from node_idx. Does not re-assign
+   * links. index_map is updated with mappings from old id to new id. If a duplication has already
+   * been performed for a given index, then it will be skipped.
+   */
+  void duplicate_subgraph (unsigned node_idx, hb_hashmap_t<unsigned, unsigned>& index_map)
+  {
+    if (index_map.has (node_idx))
+      return;
+
+    index_map.set (node_idx, duplicate (node_idx));
+    for (const auto& l : object (node_idx).links) {
+      duplicate_subgraph (l.objidx, index_map);
+    }
+  }
+
+  /*
    * Creates a copy of node_idx and returns it's new index.
    */
   unsigned duplicate (unsigned node_idx)