aboutsummaryrefslogtreecommitdiffstats
path: root/gcc-4.9/gcc/cgraphbuild.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc-4.9/gcc/cgraphbuild.c')
-rw-r--r--gcc-4.9/gcc/cgraphbuild.c391
1 files changed, 389 insertions, 2 deletions
diff --git a/gcc-4.9/gcc/cgraphbuild.c b/gcc-4.9/gcc/cgraphbuild.c
index 19961e27d..c6b04be5a 100644
--- a/gcc-4.9/gcc/cgraphbuild.c
+++ b/gcc-4.9/gcc/cgraphbuild.c
@@ -23,9 +23,12 @@ along with GCC; see the file COPYING3. If not see
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
+#include "tree-eh.h"
#include "pointer-set.h"
#include "basic-block.h"
#include "tree-ssa-alias.h"
+#include "tree-ssa-operands.h"
+#include "tree-into-ssa.h"
#include "internal-fn.h"
#include "gimple-fold.h"
#include "gimple-expr.h"
@@ -33,11 +36,16 @@ along with GCC; see the file COPYING3. If not see
#include "gimple.h"
#include "gimple-iterator.h"
#include "gimple-walk.h"
+#include "gimple-ssa.h"
#include "langhooks.h"
#include "intl.h"
+#include "toplev.h"
+#include "gcov-io.h"
+#include "coverage.h"
#include "tree-pass.h"
#include "ipa-utils.h"
#include "except.h"
+#include "l-ipo.h"
#include "ipa-inline.h"
/* Context of record_reference. */
@@ -221,6 +229,158 @@ compute_call_stmt_bb_frequency (tree decl, basic_block bb)
return freq;
}
+
+bool cgraph_pre_profiling_inlining_done = false;
+
+/* Return true if E is a fake indirect call edge. */
+
+bool
+cgraph_is_fake_indirect_call_edge (struct cgraph_edge *e)
+{
+ return !e->call_stmt;
+}
+
+
+/* Add fake cgraph edges from NODE to its indirect call callees
+ using profile data. */
+
+static void
+add_fake_indirect_call_edges (struct cgraph_node *node)
+{
+ unsigned n_counts, i;
+ gcov_type *ic_counts;
+
+ /* Enable this only for LIPO for now. */
+ if (!L_IPO_COMP_MODE)
+ return;
+
+ ic_counts
+ = get_coverage_counts_no_warn (DECL_STRUCT_FUNCTION (node->decl),
+ GCOV_COUNTER_ICALL_TOPNV, &n_counts);
+
+ if (!ic_counts)
+ return;
+
+ gcc_assert ((n_counts % GCOV_ICALL_TOPN_NCOUNTS) == 0);
+
+/* After the early_inline_1 before value profile transformation,
+ functions that are indirect call targets may have their bodies
+ removed (extern inline functions or functions from aux modules,
+ functions in comdat etc) if all direct callsites are inlined. This
+ will lead to missing inline opportunities after profile based
+ indirect call promotion. The solution is to add fake edges to
+ indirect call targets. Note that such edges are not associated
+ with actual indirect call sites because it is not possible to
+ reliably match pre-early-inline indirect callsites with indirect
+ call profile counters which are from post-early inline function body. */
+
+ for (i = 0; i < n_counts;
+ i += GCOV_ICALL_TOPN_NCOUNTS, ic_counts += GCOV_ICALL_TOPN_NCOUNTS)
+ {
+ gcov_type val1, val2, count1, count2;
+ struct cgraph_node *direct_call1 = 0, *direct_call2 = 0;
+
+ val1 = ic_counts[1];
+ count1 = ic_counts[2];
+ val2 = ic_counts[3];
+ count2 = ic_counts[4];
+
+ if (val1 == 0 || count1 == 0)
+ continue;
+
+ direct_call1 = find_func_by_global_id (val1, false);
+ if (direct_call1)
+ {
+ tree decl = direct_call1->decl;
+ cgraph_create_edge (node,
+ cgraph_get_create_node (decl),
+ NULL,
+ count1, 0);
+ }
+
+ if (val2 == 0 || count2 == 0)
+ continue;
+ direct_call2 = find_func_by_global_id (val2, false);
+ if (direct_call2)
+ {
+ tree decl = direct_call2->decl;
+ cgraph_create_edge (node,
+ cgraph_get_create_node (decl),
+ NULL,
+ count2, 0);
+ }
+ }
+}
+
+
+/* This can be implemented as an IPA pass that must be first one
+ before any unreachable node elimination. */
+
+void
+cgraph_add_fake_indirect_call_edges (void)
+{
+ struct cgraph_node *node;
+
+ /* Enable this only for LIPO for now. */
+ if (!L_IPO_COMP_MODE)
+ return;
+
+ FOR_EACH_DEFINED_FUNCTION (node)
+ {
+ if (!gimple_has_body_p (node->decl))
+ continue;
+ add_fake_indirect_call_edges (node);
+ }
+}
+
+/* Remove zero count fake edges added for the purpose of ensuring
+ the right processing order. This should be called after all
+ small ipa passes. */
+void
+cgraph_remove_zero_count_fake_edges (void)
+{
+ struct cgraph_node *node;
+
+ /* Enable this only for LIPO for now. */
+ if (!L_IPO_COMP_MODE)
+ return;
+
+ FOR_EACH_DEFINED_FUNCTION (node)
+ {
+ if (!gimple_has_body_p (node->decl))
+ continue;
+
+ struct cgraph_edge *e, *f;
+ for (e = node->callees; e; e = f)
+ {
+ f = e->next_callee;
+ if (!e->call_stmt && !e->count && !e->frequency)
+ cgraph_remove_edge (e);
+ }
+ }
+}
+
+static void
+record_reference_to_real_target_from_alias (struct cgraph_node *alias)
+{
+ if (!L_IPO_COMP_MODE || !cgraph_pre_profiling_inlining_done)
+ return;
+
+ /* Need to add a reference to the resolved node in LIPO
+ mode to avoid the real node from eliminated */
+ if (alias->alias && alias->analyzed)
+ {
+ struct cgraph_node *target, *real_target;
+
+ target = dyn_cast<cgraph_node> (symtab_alias_target (alias));
+ real_target = cgraph_lipo_get_resolved_node (target->decl);
+ /* TODO: this make create duplicate entries in the reference list. */
+ if (real_target != target)
+ ipa_record_reference (alias, real_target,
+ IPA_REF_ALIAS, NULL);
+ }
+}
+
/* Mark address taken in STMT. */
static bool
@@ -229,11 +389,55 @@ mark_address (gimple stmt, tree addr, tree, void *data)
addr = get_base_address (addr);
if (TREE_CODE (addr) == FUNCTION_DECL)
{
+ /* Before possibly creating a new node in cgraph_get_create_node,
+ save the current cgraph node for addr. */
+ struct cgraph_node *first_clone = cgraph_get_node (addr);
struct cgraph_node *node = cgraph_get_create_node (addr);
+ /* In LIPO mode we use the resolved node. However, there is
+ a possibility that it may not exist at this point. This
+ can happen in cases of ipa-cp, where this is a reference
+ that will eventually go away during inline_transform when we
+ invoke cgraph_redirect_edge_call_stmt_to_callee to rewrite
+ the call_stmt and skip some arguments. It is possible
+ that earlier during inline_call the references to the original
+ non-cloned resolved node were all eliminated, and it was removed.
+ However, virtual clones may stick around until inline_transform,
+ due to references in other virtual clones, at which point they
+ will all be removed. In between inline_call and inline_transform,
+ however, we will materialize clones which would rebuild references
+ and end up here upon still seeing the reference on the call.
+ Handle this by skipping the resolved node lookup when the first
+ clone was marked global.inlined_to (i.e. it is a virtual clone,
+ the original is gone).
+
+ For example, when this is called after ipa inlining for a call stmt
+ in an ipa cp clone, the call will still look like:
+ foo.isra.3 (pow, ...);
+ while the caller node actually has foo.isra.3.constprop in its
+ callee list. And the original, resolved node for pow would have
+ been eliminated during ipa inlining/virtual cloning if this was
+ the only reference leading to a call.
+
+ Later, during inline_transform, this call statement will be rewritted
+ in cgraph_redirect_edge_call_stmt_to_callee to:
+ foo.isra.3.constprop (...); // pow argument removed
+ */
+ if (L_IPO_COMP_MODE && cgraph_pre_profiling_inlining_done
+ && first_clone && !first_clone->global.inlined_to)
+ {
+ /* We now fix up address references to function decls after the LIPO
+ link, so any existing node that isn't an inline clone should be
+ the resolved node. */
+ struct cgraph_node *resolved = cgraph_lipo_get_resolved_node (addr);
+ gcc_assert (resolved == first_clone);
+ gcc_assert (resolved == node);
+ }
+
cgraph_mark_address_taken_node (node);
ipa_record_reference ((symtab_node *)data,
node,
IPA_REF_ADDR, stmt);
+ record_reference_to_real_target_from_alias (node);
}
else if (addr && TREE_CODE (addr) == VAR_DECL
&& (TREE_STATIC (addr) || DECL_EXTERNAL (addr)))
@@ -243,6 +447,13 @@ mark_address (gimple stmt, tree addr, tree, void *data)
ipa_record_reference ((symtab_node *)data,
vnode,
IPA_REF_ADDR, stmt);
+ if (L_IPO_COMP_MODE && cgraph_pre_profiling_inlining_done)
+ {
+ struct varpool_node *rvnode = real_varpool_node (addr);
+ if (rvnode != vnode)
+ ipa_record_reference ((symtab_node *)data, rvnode,
+ IPA_REF_ADDR, stmt);
+ }
}
return false;
@@ -272,6 +483,15 @@ mark_load (gimple stmt, tree t, tree, void *data)
ipa_record_reference ((symtab_node *)data,
vnode,
IPA_REF_LOAD, stmt);
+
+ if (L_IPO_COMP_MODE && cgraph_pre_profiling_inlining_done)
+ {
+ struct varpool_node *rvnode = real_varpool_node (t);
+ if (rvnode != vnode)
+ ipa_record_reference ((symtab_node *)data,
+ rvnode,
+ IPA_REF_ADDR, stmt);
+ }
}
return false;
}
@@ -290,6 +510,14 @@ mark_store (gimple stmt, tree t, tree, void *data)
ipa_record_reference ((symtab_node *)data,
vnode,
IPA_REF_STORE, stmt);
+ if (L_IPO_COMP_MODE && cgraph_pre_profiling_inlining_done)
+ {
+ struct varpool_node *rvnode = real_varpool_node (t);
+ if (rvnode != vnode)
+ ipa_record_reference ((symtab_node *)data,
+ rvnode,
+ IPA_REF_ADDR, stmt);
+ }
}
return false;
}
@@ -369,6 +597,7 @@ build_cgraph_edges (void)
ipa_record_stmt_references (node, gsi_stmt (gsi));
}
+
/* Look for initializers of constant variables and private statics. */
FOR_EACH_LOCAL_DECL (cfun, ix, decl)
if (TREE_CODE (decl) == VAR_DECL
@@ -436,6 +665,144 @@ record_references_in_initializer (tree decl, bool only_vars)
pointer_set_destroy (visited_nodes);
}
+/* Update any function decl references in base ADDR of operand OP to refer to
+ the resolved node. */
+
+static bool
+fixup_ref (gimple, tree addr, tree op)
+{
+ addr = get_base_address (addr);
+ if (addr && TREE_CODE (addr) == FUNCTION_DECL)
+ {
+ gcc_assert (TREE_CODE (op) == ADDR_EXPR);
+ gcc_assert (TREE_OPERAND (op,0) == addr);
+ struct cgraph_node *real_callee;
+ real_callee = cgraph_lipo_get_resolved_node (addr);
+ if (addr == real_callee->decl)
+ return false;
+ TREE_OPERAND (op,0) = real_callee->decl;
+ }
+ return false;
+}
+
+/* Update any function decl references in base ADDR of operand OP from address
+ STMT operand OP to refer to the resolved node. */
+
+static bool
+fixup_address (gimple stmt, tree addr, tree op, void *)
+{
+ return fixup_ref (stmt, addr, op);
+}
+
+/* Update any function decl references in base ADDR of operand OP from load
+ STMT operand OP to refer to the resolved node. See comments in mark_load
+ on when a load may have a function decl reference. */
+
+static bool
+fixup_load (gimple stmt, tree addr, tree op, void *)
+{
+ return fixup_ref (stmt, addr, op);
+}
+
+/* After the LIPO link, references to function decls should be updated
+ to the resolved node, so that the correct references are added to the
+ cgraph. Update all references in STMT. */
+
+void
+lipo_fixup_load_addr_ops (gimple stmt)
+{
+ walk_stmt_load_store_addr_ops (stmt, NULL, fixup_load, NULL,
+ fixup_address);
+}
+
+/* In LIPO mode, before tree_profiling, the call graph edge
+ needs to be built with the original target node to make
+ sure consistent early inline decisions between profile
+ generate and profile use. After tree-profiling, the target
+ needs to be set to the resolved node so that ipa-inline
+ sees the definitions. */
+#include "gimple-pretty-print.h"
+static void
+lipo_fixup_cgraph_edge_call_target (gimple stmt)
+{
+ tree decl;
+ gcc_assert (is_gimple_call (stmt));
+
+ decl = gimple_call_fndecl (stmt);
+ if (decl)
+ {
+ struct cgraph_node *real_callee;
+ real_callee = cgraph_lipo_get_resolved_node (decl);
+
+ if (decl != real_callee->decl)
+ {
+ int lp_nr;
+
+ gcc_assert (!real_callee->clone.combined_args_to_skip);
+ gimple_call_set_fndecl (stmt, real_callee->decl);
+ update_stmt (stmt);
+ lp_nr = lookup_stmt_eh_lp (stmt);
+ if (lp_nr != 0 && !stmt_could_throw_p (stmt))
+ remove_stmt_from_eh_lp (stmt);
+ }
+ }
+}
+
+/* Link the cgraph nodes, varpool nodes and fixup the call target to
+ the correct decl. Remove dead functions. */
+
+
+void
+lipo_link_and_fixup ()
+{
+ struct cgraph_node *node;
+
+ cgraph_pre_profiling_inlining_done = true;
+ cgraph_process_module_scope_statics ();
+ /* Now perform link to allow cross module inlining. */
+ cgraph_do_link ();
+ varpool_do_link ();
+ cgraph_unify_type_alias_sets ();
+ cgraph_init_gid_map ();
+
+ FOR_EACH_DEFINED_FUNCTION (node)
+ {
+ if (!gimple_has_body_p (node->decl))
+ continue;
+
+ /* Don't profile functions produced for builtin stuff. */
+ if (DECL_SOURCE_LOCATION (node->decl) == BUILTINS_LOCATION)
+ continue;
+
+ push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+
+ if (L_IPO_COMP_MODE)
+ {
+ basic_block bb;
+ FOR_EACH_BB_FN (bb, cfun)
+ {
+ gimple_stmt_iterator gsi;
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ if (is_gimple_call (stmt))
+ lipo_fixup_cgraph_edge_call_target (stmt);
+ lipo_fixup_load_addr_ops (stmt);
+ }
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ lipo_fixup_load_addr_ops (gsi_stmt (gsi));
+ }
+ update_ssa (TODO_update_ssa);
+ }
+ rebuild_cgraph_edges ();
+ pop_cfun ();
+ }
+
+ cgraph_add_fake_indirect_call_edges ();
+ symtab_remove_unreachable_nodes (true, dump_file);
+}
+
+
/* Rebuild cgraph edges for current function node. This needs to be run after
passes that don't update the cgraph. */
@@ -450,9 +817,12 @@ rebuild_cgraph_edges (void)
ipa_remove_all_references (&node->ref_list);
node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
+ node->max_bb_count = 0;
FOR_EACH_BB_FN (bb, cfun)
{
+ if (bb->count > node->max_bb_count)
+ node->max_bb_count = bb->count;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
@@ -464,8 +834,13 @@ rebuild_cgraph_edges (void)
bb);
decl = gimple_call_fndecl (stmt);
if (decl)
- cgraph_create_edge (node, cgraph_get_create_node (decl), stmt,
- bb->count, freq);
+ {
+ struct cgraph_node *callee = cgraph_get_create_node (decl);
+ if (L_IPO_COMP_MODE)
+ record_reference_to_real_target_from_alias (callee);
+ cgraph_create_edge (node, callee, stmt,
+ bb->count, freq);
+ }
else if (gimple_call_internal_p (stmt))
;
else
@@ -478,6 +853,9 @@ rebuild_cgraph_edges (void)
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
ipa_record_stmt_references (node, gsi_stmt (gsi));
}
+
+ if (!cgraph_pre_profiling_inlining_done)
+ add_fake_indirect_call_edges (node);
record_eh_tables (node, cfun);
gcc_assert (!node->global.inlined_to);
@@ -547,6 +925,9 @@ public:
} // anon namespace
+/* Defined in passes.c */
+extern bool cgraph_callee_edges_final_cleanup;
+
gimple_opt_pass *
make_pass_rebuild_cgraph_edges (gcc::context *ctxt)
{
@@ -557,6 +938,12 @@ make_pass_rebuild_cgraph_edges (gcc::context *ctxt)
static unsigned int
remove_cgraph_callee_edges (void)
{
+ /* The -freorder-functions=* needs the call-graph preserved till
+ pass_final. */
+ if (cgraph_callee_edges_final_cleanup
+ && (flag_reorder_functions > 1))
+ return 0;
+
struct cgraph_node *node = cgraph_get_node (current_function_decl);
cgraph_node_remove_callees (node);
ipa_remove_all_references (&node->ref_list);