cgraph.c (cgraph_add_thunk): Create real function node instead of alias node...
* cgraph.c (cgraph_add_thunk): Create real function node instead of alias node; finalize it and mark needed/reachale; arrange visibility to be right and add it into the corresponding same comdat group list. (dump_cgraph_node): Dump thunks. * cgraph.h (cgraph_first_defined_function, cgraph_next_defined_function, cgraph_function_with_gimple_body_p, cgraph_first_function_with_gimple_body, cgraph_next_function_with_gimple_body): New functions. (FOR_EACH_FUNCTION_WITH_GIMPLE_BODY, FOR_EACH_DEFINED_FUNCTION): New macros. * ipa-cp.c (ipcp_need_redirect_p): Thunks can't be redirected. (ipcp_generate_summary): Use FOR_EACH_FUNCTION_WITH_GIMPLE_BODY. * cgraphunit.c (cgraph_finalize_function): Only look into possible devirtualization when optimizing. (verify_cgraph_node): Verify thunks. (cgraph_analyze_function): Analyze thunks. (cgraph_mark_functions_to_output): Output thunks only in combination with function they are assigned to. (assemble_thunk): Turn thunk into non-thunk; don't try to turn alias into normal node. (assemble_thunks): New functoin. (cgraph_expand_function): Use it. * lto-cgraph.c (lto_output_node): Stream thunks. (input_overwrite_node): Stream in thunks. * ipa-pure-const.c (analyze_function): Thunks do nothing interesting. * lto-streamer-out.c (lto_output): Do not try to output thunk's body. * ipa-inline.c (inline_small_functions): Use FOR_EACH_DEFINED_FUNCTION. * ipa-inline-analysis.c (compute_inline_parameters): "Analyze" thunks. (inline_analyze_function): Do not care about thunk jump functions. (inline_generate_summary):Use FOR_EACH_DEFINED_FUNCTION. * ipa-prop.c (ipa_prop_write_jump_functions): Use cgraph_function_with_gimple_body_p. * passes.c (do_per_function_toporder): Use cgraph_function_with_gimple_body_p. (execute_one_pass);Use FOR_EACH_FUNCTION_WITH_GIMPLE_BODY. (ipa_write_summaries): Use cgraph_function_with_gimple_body_p. (function_called_by_processed_nodes_p): Likewise. * lto.c (lto_materialize_function): Use cgraph_function_with_gimple_body_p. (add_cgraph_node_to_partition): Do not re-add items to partition; handle thunks. (add_varpool_node_to_partition): Do not re-add items to partition. From-SVN: r173517
This commit is contained in:
parent
e68dde1f40
commit
c47d00347d
15 changed files with 384 additions and 122 deletions
|
@ -1,3 +1,40 @@
|
|||
2011-05-06 Jan Hubicka <jh@suse.cz>
|
||||
|
||||
* cgraph.c (cgraph_add_thunk): Create real function node instead
|
||||
of alias node; finalize it and mark needed/reachale; arrange visibility
|
||||
to be right and add it into the corresponding same comdat group list.
|
||||
(dump_cgraph_node): Dump thunks.
|
||||
* cgraph.h (cgraph_first_defined_function, cgraph_next_defined_function,
|
||||
cgraph_function_with_gimple_body_p, cgraph_first_function_with_gimple_body,
|
||||
cgraph_next_function_with_gimple_body): New functions.
|
||||
(FOR_EACH_FUNCTION_WITH_GIMPLE_BODY, FOR_EACH_DEFINED_FUNCTION):
|
||||
New macros.
|
||||
* ipa-cp.c (ipcp_need_redirect_p): Thunks can't be redirected.
|
||||
(ipcp_generate_summary): Use FOR_EACH_FUNCTION_WITH_GIMPLE_BODY.
|
||||
* cgraphunit.c (cgraph_finalize_function): Only look into possible
|
||||
devirtualization when optimizing.
|
||||
(verify_cgraph_node): Verify thunks.
|
||||
(cgraph_analyze_function): Analyze thunks.
|
||||
(cgraph_mark_functions_to_output): Output thunks only in combination
|
||||
with function they are assigned to.
|
||||
(assemble_thunk): Turn thunk into non-thunk; don't try to turn
|
||||
alias into normal node.
|
||||
(assemble_thunks): New functoin.
|
||||
(cgraph_expand_function): Use it.
|
||||
* lto-cgraph.c (lto_output_node): Stream thunks.
|
||||
(input_overwrite_node): Stream in thunks.
|
||||
* ipa-pure-const.c (analyze_function): Thunks do nothing interesting.
|
||||
* lto-streamer-out.c (lto_output): Do not try to output thunk's body.
|
||||
* ipa-inline.c (inline_small_functions): Use FOR_EACH_DEFINED_FUNCTION.
|
||||
* ipa-inline-analysis.c (compute_inline_parameters): "Analyze" thunks.
|
||||
(inline_analyze_function): Do not care about thunk jump functions.
|
||||
(inline_generate_summary):Use FOR_EACH_DEFINED_FUNCTION.
|
||||
* ipa-prop.c (ipa_prop_write_jump_functions): Use cgraph_function_with_gimple_body_p.
|
||||
* passes.c (do_per_function_toporder): Use cgraph_function_with_gimple_body_p.
|
||||
(execute_one_pass);Use FOR_EACH_FUNCTION_WITH_GIMPLE_BODY.
|
||||
(ipa_write_summaries): Use cgraph_function_with_gimple_body_p.
|
||||
(function_called_by_processed_nodes_p): Likewise.
|
||||
|
||||
2011-05-06 Joseph Myers <joseph@codesourcery.com>
|
||||
|
||||
* config/rs6000/rs6000.opt (rs6000_ieeequad, rs6000_altivec_abi,
|
||||
|
|
46
gcc/cgraph.c
46
gcc/cgraph.c
|
@ -595,14 +595,16 @@ cgraph_same_body_alias (struct cgraph_node *decl_node, tree alias, tree decl)
|
|||
See comments in thunk_adjust for detail on the parameters. */
|
||||
|
||||
struct cgraph_node *
|
||||
cgraph_add_thunk (struct cgraph_node *decl_node, tree alias, tree decl,
|
||||
cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
|
||||
tree alias, tree decl,
|
||||
bool this_adjusting,
|
||||
HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
|
||||
tree virtual_offset,
|
||||
tree real_alias)
|
||||
{
|
||||
struct cgraph_node *node = cgraph_get_node (alias);
|
||||
struct cgraph_node *node;
|
||||
|
||||
node = cgraph_get_node (alias);
|
||||
if (node)
|
||||
{
|
||||
gcc_assert (node->local.finalized);
|
||||
|
@ -610,8 +612,7 @@ cgraph_add_thunk (struct cgraph_node *decl_node, tree alias, tree decl,
|
|||
cgraph_remove_node (node);
|
||||
}
|
||||
|
||||
node = cgraph_same_body_alias_1 (decl_node, alias, decl);
|
||||
gcc_assert (node);
|
||||
node = cgraph_create_node (alias);
|
||||
gcc_checking_assert (!virtual_offset
|
||||
|| double_int_equal_p
|
||||
(tree_to_double_int (virtual_offset),
|
||||
|
@ -622,6 +623,15 @@ cgraph_add_thunk (struct cgraph_node *decl_node, tree alias, tree decl,
|
|||
node->thunk.virtual_offset_p = virtual_offset != NULL;
|
||||
node->thunk.alias = real_alias;
|
||||
node->thunk.thunk_p = true;
|
||||
node->local.finalized = true;
|
||||
|
||||
if (cgraph_decide_is_function_needed (node, decl))
|
||||
cgraph_mark_needed_node (node);
|
||||
|
||||
if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
|
||||
|| (DECL_VIRTUAL_P (decl)
|
||||
&& (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
|
||||
cgraph_mark_reachable_node (node);
|
||||
return node;
|
||||
}
|
||||
|
||||
|
@ -1875,7 +1885,21 @@ dump_cgraph_node (FILE *f, struct cgraph_node *node)
|
|||
if (node->only_called_at_exit)
|
||||
fprintf (f, " only_called_at_exit");
|
||||
|
||||
fprintf (f, "\n called by: ");
|
||||
fprintf (f, "\n");
|
||||
|
||||
if (node->thunk.thunk_p)
|
||||
{
|
||||
fprintf (f, " thunk of %s (asm: %s) fixed offset %i virtual value %i has "
|
||||
"virtual offset %i)\n",
|
||||
lang_hooks.decl_printable_name (node->thunk.alias, 2),
|
||||
IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)),
|
||||
(int)node->thunk.fixed_offset,
|
||||
(int)node->thunk.virtual_value,
|
||||
(int)node->thunk.virtual_offset_p);
|
||||
}
|
||||
|
||||
fprintf (f, " called by: ");
|
||||
|
||||
for (edge = node->callers; edge; edge = edge->next_caller)
|
||||
{
|
||||
fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
|
||||
|
@ -1927,20 +1951,10 @@ dump_cgraph_node (FILE *f, struct cgraph_node *node)
|
|||
if (node->same_body)
|
||||
{
|
||||
struct cgraph_node *n;
|
||||
fprintf (f, " aliases & thunks:");
|
||||
fprintf (f, " aliases:");
|
||||
for (n = node->same_body; n; n = n->next)
|
||||
{
|
||||
fprintf (f, " %s/%i", cgraph_node_name (n), n->uid);
|
||||
if (n->thunk.thunk_p)
|
||||
{
|
||||
fprintf (f, " (thunk of %s fixed offset %i virtual value %i has "
|
||||
"virtual offset %i",
|
||||
lang_hooks.decl_printable_name (n->thunk.alias, 2),
|
||||
(int)n->thunk.fixed_offset,
|
||||
(int)n->thunk.virtual_value,
|
||||
(int)n->thunk.virtual_offset_p);
|
||||
fprintf (f, ")");
|
||||
}
|
||||
if (DECL_ASSEMBLER_NAME_SET_P (n->decl))
|
||||
fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (n->decl)));
|
||||
}
|
||||
|
|
73
gcc/cgraph.h
73
gcc/cgraph.h
|
@ -695,6 +695,79 @@ varpool_next_static_initializer (struct varpool_node *node)
|
|||
for ((node) = varpool_first_static_initializer (); (node); \
|
||||
(node) = varpool_next_static_initializer (node))
|
||||
|
||||
/* Return first function with body defined. */
|
||||
static inline struct cgraph_node *
|
||||
cgraph_first_defined_function (void)
|
||||
{
|
||||
struct cgraph_node *node;
|
||||
for (node = cgraph_nodes; node; node = node->next)
|
||||
{
|
||||
if (node->analyzed)
|
||||
return node;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* Return next reachable static variable with initializer after NODE. */
|
||||
static inline struct cgraph_node *
|
||||
cgraph_next_defined_function (struct cgraph_node *node)
|
||||
{
|
||||
for (node = node->next; node; node = node->next)
|
||||
{
|
||||
if (node->analyzed)
|
||||
return node;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* Walk all functions with body defined. */
|
||||
#define FOR_EACH_DEFINED_FUNCTION(node) \
|
||||
for ((node) = cgraph_first_defined_function (); (node); \
|
||||
(node) = cgraph_next_defined_function (node))
|
||||
|
||||
|
||||
/* Return true when NODE is a function with Gimple body defined
|
||||
in current unit. Functions can also be define externally or they
|
||||
can be thunks with no Gimple representation.
|
||||
|
||||
Note that at WPA stage, the function body may not be present in memory. */
|
||||
|
||||
static inline bool
|
||||
cgraph_function_with_gimple_body_p (struct cgraph_node *node)
|
||||
{
|
||||
return node->analyzed && !node->thunk.thunk_p;
|
||||
}
|
||||
|
||||
/* Return first function with body defined. */
|
||||
static inline struct cgraph_node *
|
||||
cgraph_first_function_with_gimple_body (void)
|
||||
{
|
||||
struct cgraph_node *node;
|
||||
for (node = cgraph_nodes; node; node = node->next)
|
||||
{
|
||||
if (cgraph_function_with_gimple_body_p (node))
|
||||
return node;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* Return next reachable static variable with initializer after NODE. */
|
||||
static inline struct cgraph_node *
|
||||
cgraph_next_function_with_gimple_body (struct cgraph_node *node)
|
||||
{
|
||||
for (node = node->next; node; node = node->next)
|
||||
{
|
||||
if (cgraph_function_with_gimple_body_p (node))
|
||||
return node;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* Walk all functions with body defined. */
|
||||
#define FOR_EACH_FUNCTION_WITH_GIMPLE_BODY(node) \
|
||||
for ((node) = cgraph_first_function_with_gimple_body (); (node); \
|
||||
(node) = cgraph_next_function_with_gimple_body (node))
|
||||
|
||||
/* Create a new static variable of type TYPE. */
|
||||
tree add_new_static_var (tree type);
|
||||
|
||||
|
|
122
gcc/cgraphunit.c
122
gcc/cgraphunit.c
|
@ -370,7 +370,8 @@ cgraph_finalize_function (tree decl, bool nested)
|
|||
to those so we need to analyze them.
|
||||
FIXME: We should introduce may edges for this purpose and update
|
||||
their handling in unreachable function removal and inliner too. */
|
||||
|| (DECL_VIRTUAL_P (decl) && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
|
||||
|| (DECL_VIRTUAL_P (decl)
|
||||
&& optimize && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
|
||||
cgraph_mark_reachable_node (node);
|
||||
|
||||
/* If we've not yet emitted decl, tell the debug info about it. */
|
||||
|
@ -624,10 +625,28 @@ verify_cgraph_node (struct cgraph_node *node)
|
|||
while (n != node);
|
||||
}
|
||||
|
||||
if (node->analyzed && gimple_has_body_p (node->decl)
|
||||
&& !TREE_ASM_WRITTEN (node->decl)
|
||||
&& (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
|
||||
&& !flag_wpa)
|
||||
if (node->analyzed && node->thunk.thunk_p)
|
||||
{
|
||||
if (!node->callees)
|
||||
{
|
||||
error ("No edge out of thunk node");
|
||||
error_found = true;
|
||||
}
|
||||
else if (node->callees->next_callee)
|
||||
{
|
||||
error ("More than one edge out of thunk node");
|
||||
error_found = true;
|
||||
}
|
||||
if (gimple_has_body_p (node->decl))
|
||||
{
|
||||
error ("Thunk is not supposed to have body");
|
||||
error_found = true;
|
||||
}
|
||||
}
|
||||
else if (node->analyzed && gimple_has_body_p (node->decl)
|
||||
&& !TREE_ASM_WRITTEN (node->decl)
|
||||
&& (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
|
||||
&& !flag_wpa)
|
||||
{
|
||||
if (this_cfun->cfg)
|
||||
{
|
||||
|
@ -656,8 +675,6 @@ verify_cgraph_node (struct cgraph_node *node)
|
|||
}
|
||||
if (!e->indirect_unknown_callee)
|
||||
{
|
||||
struct cgraph_node *n;
|
||||
|
||||
if (e->callee->same_body_alias)
|
||||
{
|
||||
error ("edge points to same body alias:");
|
||||
|
@ -678,16 +695,6 @@ verify_cgraph_node (struct cgraph_node *node)
|
|||
debug_tree (decl);
|
||||
error_found = true;
|
||||
}
|
||||
else if (decl
|
||||
&& (n = cgraph_get_node_or_alias (decl))
|
||||
&& (n->same_body_alias
|
||||
&& n->thunk.thunk_p))
|
||||
{
|
||||
error ("a call to thunk improperly represented "
|
||||
"in the call graph:");
|
||||
cgraph_debug_gimple_stmt (this_cfun, stmt);
|
||||
error_found = true;
|
||||
}
|
||||
}
|
||||
else if (decl)
|
||||
{
|
||||
|
@ -780,23 +787,31 @@ cgraph_analyze_function (struct cgraph_node *node)
|
|||
tree save = current_function_decl;
|
||||
tree decl = node->decl;
|
||||
|
||||
current_function_decl = decl;
|
||||
push_cfun (DECL_STRUCT_FUNCTION (decl));
|
||||
if (node->thunk.thunk_p)
|
||||
{
|
||||
cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
|
||||
NULL, 0, CGRAPH_FREQ_BASE);
|
||||
}
|
||||
else
|
||||
{
|
||||
current_function_decl = decl;
|
||||
push_cfun (DECL_STRUCT_FUNCTION (decl));
|
||||
|
||||
assign_assembler_name_if_neeeded (node->decl);
|
||||
assign_assembler_name_if_neeeded (node->decl);
|
||||
|
||||
/* Make sure to gimplify bodies only once. During analyzing a
|
||||
function we lower it, which will require gimplified nested
|
||||
functions, so we can end up here with an already gimplified
|
||||
body. */
|
||||
if (!gimple_body (decl))
|
||||
gimplify_function_tree (decl);
|
||||
dump_function (TDI_generic, decl);
|
||||
/* Make sure to gimplify bodies only once. During analyzing a
|
||||
function we lower it, which will require gimplified nested
|
||||
functions, so we can end up here with an already gimplified
|
||||
body. */
|
||||
if (!gimple_body (decl))
|
||||
gimplify_function_tree (decl);
|
||||
dump_function (TDI_generic, decl);
|
||||
|
||||
cgraph_lower_function (node);
|
||||
cgraph_lower_function (node);
|
||||
pop_cfun ();
|
||||
}
|
||||
node->analyzed = true;
|
||||
|
||||
pop_cfun ();
|
||||
current_function_decl = save;
|
||||
}
|
||||
|
||||
|
@ -969,7 +984,8 @@ cgraph_analyze_functions (void)
|
|||
/* ??? It is possible to create extern inline function and later using
|
||||
weak alias attribute to kill its body. See
|
||||
gcc.c-torture/compile/20011119-1.c */
|
||||
if (!DECL_STRUCT_FUNCTION (decl))
|
||||
if (!DECL_STRUCT_FUNCTION (decl)
|
||||
&& !node->thunk.thunk_p)
|
||||
{
|
||||
cgraph_reset_node (node);
|
||||
continue;
|
||||
|
@ -981,6 +997,9 @@ cgraph_analyze_functions (void)
|
|||
for (edge = node->callees; edge; edge = edge->next_callee)
|
||||
if (!edge->callee->reachable)
|
||||
cgraph_mark_reachable_node (edge->callee);
|
||||
for (edge = node->callers; edge; edge = edge->next_caller)
|
||||
if (!edge->caller->reachable && edge->caller->thunk.thunk_p)
|
||||
cgraph_mark_reachable_node (edge->caller);
|
||||
|
||||
if (node->same_comdat_group)
|
||||
{
|
||||
|
@ -1031,10 +1050,12 @@ cgraph_analyze_functions (void)
|
|||
tree decl = node->decl;
|
||||
next = node->next;
|
||||
|
||||
if (node->local.finalized && !gimple_has_body_p (decl))
|
||||
if (node->local.finalized && !gimple_has_body_p (decl)
|
||||
&& !node->thunk.thunk_p)
|
||||
cgraph_reset_node (node);
|
||||
|
||||
if (!node->reachable && gimple_has_body_p (decl))
|
||||
if (!node->reachable
|
||||
&& (gimple_has_body_p (decl) || node->thunk.thunk_p))
|
||||
{
|
||||
if (cgraph_dump_file)
|
||||
fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
|
||||
|
@ -1043,7 +1064,8 @@ cgraph_analyze_functions (void)
|
|||
}
|
||||
else
|
||||
node->next_needed = NULL;
|
||||
gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
|
||||
gcc_assert (!node->local.finalized || node->thunk.thunk_p
|
||||
|| gimple_has_body_p (decl));
|
||||
gcc_assert (node->analyzed == node->local.finalized);
|
||||
}
|
||||
if (cgraph_dump_file)
|
||||
|
@ -1132,6 +1154,7 @@ cgraph_mark_functions_to_output (void)
|
|||
always inlined, as well as those that are reachable from
|
||||
outside the current compilation unit. */
|
||||
if (node->analyzed
|
||||
&& !node->thunk.thunk_p
|
||||
&& !node->global.inlined_to
|
||||
&& (!cgraph_only_called_directly_p (node)
|
||||
|| (e && node->reachable))
|
||||
|
@ -1145,7 +1168,8 @@ cgraph_mark_functions_to_output (void)
|
|||
for (next = node->same_comdat_group;
|
||||
next != node;
|
||||
next = next->same_comdat_group)
|
||||
next->process = 1;
|
||||
if (!next->thunk.thunk_p)
|
||||
next->process = 1;
|
||||
}
|
||||
}
|
||||
else if (node->same_comdat_group)
|
||||
|
@ -1406,6 +1430,8 @@ assemble_thunk (struct cgraph_node *node)
|
|||
free_after_compilation (cfun);
|
||||
set_cfun (NULL);
|
||||
TREE_ASM_WRITTEN (thunk_fndecl) = 1;
|
||||
node->thunk.thunk_p = false;
|
||||
node->analyzed = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -1530,15 +1556,36 @@ assemble_thunk (struct cgraph_node *node)
|
|||
delete_unreachable_blocks ();
|
||||
update_ssa (TODO_update_ssa);
|
||||
|
||||
cgraph_remove_same_body_alias (node);
|
||||
/* Since we want to emit the thunk, we explicitly mark its name as
|
||||
referenced. */
|
||||
node->thunk.thunk_p = false;
|
||||
cgraph_node_remove_callees (node);
|
||||
cgraph_add_new_function (thunk_fndecl, true);
|
||||
bitmap_obstack_release (NULL);
|
||||
}
|
||||
current_function_decl = NULL;
|
||||
}
|
||||
|
||||
|
||||
/* Assemble thunks asociated to NODE. */
|
||||
|
||||
static void
|
||||
assemble_thunks (struct cgraph_node *node)
|
||||
{
|
||||
struct cgraph_edge *e;
|
||||
for (e = node->callers; e;)
|
||||
if (e->caller->thunk.thunk_p)
|
||||
{
|
||||
struct cgraph_node *thunk = e->caller;
|
||||
|
||||
e = e->next_caller;
|
||||
assemble_thunks (thunk);
|
||||
assemble_thunk (thunk);
|
||||
}
|
||||
else
|
||||
e = e->next_caller;
|
||||
}
|
||||
|
||||
/* Expand function specified by NODE. */
|
||||
|
||||
static void
|
||||
|
@ -1566,13 +1613,12 @@ cgraph_expand_function (struct cgraph_node *node)
|
|||
if (!alias->thunk.thunk_p)
|
||||
assemble_alias (alias->decl,
|
||||
DECL_ASSEMBLER_NAME (alias->thunk.alias));
|
||||
else
|
||||
assemble_thunk (alias);
|
||||
}
|
||||
node->alias = saved_alias;
|
||||
cgraph_process_new_functions ();
|
||||
}
|
||||
|
||||
assemble_thunks (node);
|
||||
gcc_assert (node->lowered);
|
||||
|
||||
/* Generate RTL for the body of DECL. */
|
||||
|
@ -1688,7 +1734,7 @@ cgraph_output_in_order (void)
|
|||
|
||||
for (pf = cgraph_nodes; pf; pf = pf->next)
|
||||
{
|
||||
if (pf->process)
|
||||
if (pf->process && !pf->thunk.thunk_p)
|
||||
{
|
||||
i = pf->order;
|
||||
gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
|
||||
|
|
|
@ -951,6 +951,10 @@ ipcp_need_redirect_p (struct cgraph_edge *cs)
|
|||
if (!n_cloning_candidates)
|
||||
return false;
|
||||
|
||||
/* We can't redirect anything in thunks, yet. */
|
||||
if (cs->caller->thunk.thunk_p)
|
||||
return true;
|
||||
|
||||
if ((orig = ipcp_get_orig_node (node)) != NULL)
|
||||
node = orig;
|
||||
if (ipcp_get_orig_node (cs->caller))
|
||||
|
@ -1508,8 +1512,9 @@ ipcp_generate_summary (void)
|
|||
fprintf (dump_file, "\nIPA constant propagation start:\n");
|
||||
ipa_register_cgraph_hooks ();
|
||||
|
||||
for (node = cgraph_nodes; node; node = node->next)
|
||||
if (node->analyzed)
|
||||
/* FIXME: We could propagate through thunks happily and we could be
|
||||
even able to clone them, if needed. Do that later. */
|
||||
FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
|
||||
{
|
||||
/* Unreachable nodes should have been eliminated before ipcp. */
|
||||
gcc_assert (node->needed || node->reachable);
|
||||
|
|
|
@ -1443,6 +1443,23 @@ compute_inline_parameters (struct cgraph_node *node, bool early)
|
|||
|
||||
info = inline_summary (node);
|
||||
|
||||
/* FIXME: Thunks are inlinable, but tree-inline don't know how to do that.
|
||||
Once this happen, we will need to more curefully predict call
|
||||
statement size. */
|
||||
if (node->thunk.thunk_p)
|
||||
{
|
||||
struct inline_edge_summary *es = inline_edge_summary (node->callees);
|
||||
struct predicate t = true_predicate ();
|
||||
|
||||
info->inlinable = info->versionable = 0;
|
||||
node->callees->call_stmt_cannot_inline_p = true;
|
||||
node->local.can_change_signature = false;
|
||||
es->call_stmt_time = 1;
|
||||
es->call_stmt_size = 1;
|
||||
account_size_time (info, 0, 0, &t);
|
||||
return;
|
||||
}
|
||||
|
||||
/* Estimate the stack size for the function if we're optimizing. */
|
||||
self_stack_size = optimize ? estimated_stack_frame_size (node) : 0;
|
||||
info->estimated_self_stack_size = self_stack_size;
|
||||
|
@ -2027,7 +2044,7 @@ inline_analyze_function (struct cgraph_node *node)
|
|||
cgraph_node_name (node), node->uid);
|
||||
/* FIXME: We should remove the optimize check after we ensure we never run
|
||||
IPA passes when not optimizing. */
|
||||
if (flag_indirect_inlining && optimize)
|
||||
if (flag_indirect_inlining && optimize && !node->thunk.thunk_p)
|
||||
inline_indirect_intraprocedural_analysis (node);
|
||||
compute_inline_parameters (node, false);
|
||||
|
||||
|
@ -2058,8 +2075,7 @@ inline_generate_summary (void)
|
|||
if (flag_indirect_inlining)
|
||||
ipa_register_cgraph_hooks ();
|
||||
|
||||
for (node = cgraph_nodes; node; node = node->next)
|
||||
if (node->analyzed)
|
||||
FOR_EACH_DEFINED_FUNCTION (node)
|
||||
inline_analyze_function (node);
|
||||
}
|
||||
|
||||
|
|
|
@ -1235,9 +1235,8 @@ inline_small_functions (void)
|
|||
max_count = 0;
|
||||
initialize_growth_caches ();
|
||||
|
||||
for (node = cgraph_nodes; node; node = node->next)
|
||||
if (node->analyzed
|
||||
&& !node->global.inlined_to)
|
||||
FOR_EACH_DEFINED_FUNCTION (node)
|
||||
if (!node->global.inlined_to)
|
||||
{
|
||||
struct inline_summary *info = inline_summary (node);
|
||||
|
||||
|
@ -1255,9 +1254,8 @@ inline_small_functions (void)
|
|||
|
||||
/* Populate the heeap with all edges we might inline. */
|
||||
|
||||
for (node = cgraph_nodes; node; node = node->next)
|
||||
if (node->analyzed
|
||||
&& !node->global.inlined_to)
|
||||
FOR_EACH_DEFINED_FUNCTION (node)
|
||||
if (!node->global.inlined_to)
|
||||
{
|
||||
if (dump_file)
|
||||
fprintf (dump_file, "Enqueueing calls of %s/%i.\n",
|
||||
|
|
|
@ -2888,7 +2888,8 @@ ipa_prop_write_jump_functions (cgraph_node_set set)
|
|||
for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
|
||||
{
|
||||
node = csi_node (csi);
|
||||
if (node->analyzed && IPA_NODE_REF (node) != NULL)
|
||||
if (cgraph_function_with_gimple_body_p (node)
|
||||
&& IPA_NODE_REF (node) != NULL)
|
||||
count++;
|
||||
}
|
||||
|
||||
|
@ -2898,7 +2899,8 @@ ipa_prop_write_jump_functions (cgraph_node_set set)
|
|||
for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
|
||||
{
|
||||
node = csi_node (csi);
|
||||
if (node->analyzed && IPA_NODE_REF (node) != NULL)
|
||||
if (cgraph_function_with_gimple_body_p (node)
|
||||
&& IPA_NODE_REF (node) != NULL)
|
||||
ipa_write_node_info (ob, node);
|
||||
}
|
||||
lto_output_1_stream (ob->main_stream, 0);
|
||||
|
|
|
@ -731,6 +731,16 @@ analyze_function (struct cgraph_node *fn, bool ipa)
|
|||
l->looping_previously_known = true;
|
||||
l->looping = false;
|
||||
l->can_throw = false;
|
||||
state_from_flags (&l->state_previously_known, &l->looping_previously_known,
|
||||
flags_from_decl_or_type (fn->decl),
|
||||
cgraph_node_cannot_return (fn));
|
||||
|
||||
if (fn->thunk.thunk_p)
|
||||
{
|
||||
/* Thunk gets propagated through, so nothing interesting happens. */
|
||||
gcc_assert (ipa);
|
||||
return l;
|
||||
}
|
||||
|
||||
if (dump_file)
|
||||
{
|
||||
|
@ -799,9 +809,6 @@ end:
|
|||
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
fprintf (dump_file, " checking previously known:");
|
||||
state_from_flags (&l->state_previously_known, &l->looping_previously_known,
|
||||
flags_from_decl_or_type (fn->decl),
|
||||
cgraph_node_cannot_return (fn));
|
||||
|
||||
better_state (&l->pure_const_state, &l->looping,
|
||||
l->state_previously_known,
|
||||
|
|
40
gcc/ipa.c
40
gcc/ipa.c
|
@ -877,7 +877,47 @@ function_and_variable_visibility (bool whole_program)
|
|||
segfault though. */
|
||||
dissolve_same_comdat_group_list (node);
|
||||
}
|
||||
|
||||
if (node->thunk.thunk_p
|
||||
&& TREE_PUBLIC (node->decl))
|
||||
{
|
||||
struct cgraph_node *decl_node = node;
|
||||
|
||||
while (decl_node->thunk.thunk_p)
|
||||
decl_node = decl_node->callees->callee;
|
||||
|
||||
/* Thunks have the same visibility as function they are attached to.
|
||||
For some reason C++ frontend don't seem to care. I.e. in
|
||||
g++.dg/torture/pr41257-2.C the thunk is not comdat while function
|
||||
it is attached to is.
|
||||
|
||||
We also need to arrange the thunk into the same comdat group as
|
||||
the function it reffers to. */
|
||||
if (DECL_COMDAT (decl_node->decl))
|
||||
{
|
||||
DECL_COMDAT (node->decl) = 1;
|
||||
DECL_COMDAT_GROUP (node->decl) = DECL_COMDAT_GROUP (decl_node->decl);
|
||||
if (!node->same_comdat_group)
|
||||
{
|
||||
node->same_comdat_group = decl_node;
|
||||
if (!decl_node->same_comdat_group)
|
||||
decl_node->same_comdat_group = node;
|
||||
else
|
||||
{
|
||||
struct cgraph_node *n;
|
||||
for (n = decl_node->same_comdat_group;
|
||||
n->same_comdat_group != decl_node;
|
||||
n = n->same_comdat_group)
|
||||
;
|
||||
n->same_comdat_group = node;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (DECL_EXTERNAL (decl_node->decl))
|
||||
DECL_EXTERNAL (node->decl) = 1;
|
||||
}
|
||||
node->local.local = cgraph_local_node_p (node);
|
||||
|
||||
}
|
||||
for (vnode = varpool_nodes; vnode; vnode = vnode->next)
|
||||
{
|
||||
|
|
|
@ -502,9 +502,24 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
|
|||
bp_pack_value (&bp, node->frequency, 2);
|
||||
bp_pack_value (&bp, node->only_called_at_startup, 1);
|
||||
bp_pack_value (&bp, node->only_called_at_exit, 1);
|
||||
bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
|
||||
lto_output_bitpack (&bp);
|
||||
lto_output_uleb128_stream (ob->main_stream, node->resolution);
|
||||
|
||||
if (node->thunk.thunk_p && !boundary_p)
|
||||
{
|
||||
lto_output_uleb128_stream
|
||||
(ob->main_stream,
|
||||
1 + (node->thunk.this_adjusting != 0) * 2
|
||||
+ (node->thunk.virtual_offset_p != 0) * 4);
|
||||
lto_output_uleb128_stream (ob->main_stream,
|
||||
node->thunk.fixed_offset);
|
||||
lto_output_uleb128_stream (ob->main_stream,
|
||||
node->thunk.virtual_value);
|
||||
lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
|
||||
node->thunk.alias);
|
||||
}
|
||||
|
||||
if (node->same_body)
|
||||
{
|
||||
struct cgraph_node *alias;
|
||||
|
@ -516,25 +531,8 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
|
|||
{
|
||||
lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
|
||||
alias->decl);
|
||||
if (alias->thunk.thunk_p)
|
||||
{
|
||||
lto_output_uleb128_stream
|
||||
(ob->main_stream,
|
||||
1 + (alias->thunk.this_adjusting != 0) * 2
|
||||
+ (alias->thunk.virtual_offset_p != 0) * 4);
|
||||
lto_output_uleb128_stream (ob->main_stream,
|
||||
alias->thunk.fixed_offset);
|
||||
lto_output_uleb128_stream (ob->main_stream,
|
||||
alias->thunk.virtual_value);
|
||||
lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
|
||||
alias->thunk.alias);
|
||||
}
|
||||
else
|
||||
{
|
||||
lto_output_uleb128_stream (ob->main_stream, 0);
|
||||
lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
|
||||
alias->thunk.alias);
|
||||
}
|
||||
lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
|
||||
alias->thunk.alias);
|
||||
gcc_assert (cgraph_get_node (alias->thunk.alias) == node);
|
||||
lto_output_uleb128_stream (ob->main_stream, alias->resolution);
|
||||
alias = alias->previous;
|
||||
|
@ -947,6 +945,7 @@ input_overwrite_node (struct lto_file_decl_data *file_data,
|
|||
node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
|
||||
node->only_called_at_startup = bp_unpack_value (bp, 1);
|
||||
node->only_called_at_exit = bp_unpack_value (bp, 1);
|
||||
node->thunk.thunk_p = bp_unpack_value (bp, 1);
|
||||
node->resolution = resolution;
|
||||
}
|
||||
|
||||
|
@ -1031,34 +1030,33 @@ input_node (struct lto_file_decl_data *file_data,
|
|||
/* Store a reference for now, and fix up later to be a pointer. */
|
||||
node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
|
||||
|
||||
if (node->thunk.thunk_p)
|
||||
{
|
||||
int type = lto_input_uleb128 (ib);
|
||||
HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
|
||||
HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
|
||||
tree real_alias;
|
||||
|
||||
decl_index = lto_input_uleb128 (ib);
|
||||
real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
|
||||
node->thunk.fixed_offset = fixed_offset;
|
||||
node->thunk.this_adjusting = (type & 2);
|
||||
node->thunk.virtual_value = virtual_value;
|
||||
node->thunk.virtual_offset_p = (type & 4);
|
||||
node->thunk.alias = real_alias;
|
||||
}
|
||||
|
||||
same_body_count = lto_input_uleb128 (ib);
|
||||
while (same_body_count-- > 0)
|
||||
{
|
||||
tree alias_decl;
|
||||
int type;
|
||||
tree alias_decl, real_alias;
|
||||
struct cgraph_node *alias;
|
||||
|
||||
decl_index = lto_input_uleb128 (ib);
|
||||
alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
|
||||
type = lto_input_uleb128 (ib);
|
||||
if (!type)
|
||||
{
|
||||
tree real_alias;
|
||||
decl_index = lto_input_uleb128 (ib);
|
||||
real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
|
||||
alias = cgraph_same_body_alias (node, alias_decl, real_alias);
|
||||
}
|
||||
else
|
||||
{
|
||||
HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
|
||||
HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
|
||||
tree real_alias;
|
||||
decl_index = lto_input_uleb128 (ib);
|
||||
real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
|
||||
alias = cgraph_add_thunk (node, alias_decl, fn_decl, type & 2, fixed_offset,
|
||||
virtual_value,
|
||||
(type & 4) ? size_int (virtual_value) : NULL_TREE,
|
||||
real_alias);
|
||||
}
|
||||
decl_index = lto_input_uleb128 (ib);
|
||||
real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
|
||||
alias = cgraph_same_body_alias (node, alias_decl, real_alias);
|
||||
gcc_assert (alias);
|
||||
alias->resolution = (enum ld_plugin_symbol_resolution)lto_input_uleb128 (ib);
|
||||
}
|
||||
|
|
|
@ -2196,7 +2196,8 @@ lto_output (cgraph_node_set set, varpool_node_set vset)
|
|||
for (i = 0; i < n_nodes; i++)
|
||||
{
|
||||
node = lto_cgraph_encoder_deref (encoder, i);
|
||||
if (lto_cgraph_encoder_encode_body_p (encoder, node))
|
||||
if (lto_cgraph_encoder_encode_body_p (encoder, node)
|
||||
&& !node->thunk.thunk_p)
|
||||
{
|
||||
#ifdef ENABLE_CHECKING
|
||||
gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
|
||||
|
|
|
@ -1,3 +1,9 @@
|
|||
2011-05-07 Jan Hubicka <jh@suse.cz>
|
||||
|
||||
* lto.c (lto_materialize_function): Use cgraph_function_with_gimple_body_p.
|
||||
(add_cgraph_node_to_partition): Do not re-add items to partition; handle thunks.
|
||||
(add_varpool_node_to_partition): Do not re-add items to partition.
|
||||
|
||||
2011-05-03 Jan Hubicka <jh@suse.cz>
|
||||
|
||||
* lto.c (free_ltrans_partitions): Fix accidental commit.
|
||||
|
|
|
@ -147,9 +147,9 @@ lto_materialize_function (struct cgraph_node *node)
|
|||
decl = node->decl;
|
||||
/* Read in functions with body (analyzed nodes)
|
||||
and also functions that are needed to produce virtual clones. */
|
||||
if (node->analyzed || has_analyzed_clone_p (node))
|
||||
if (cgraph_function_with_gimple_body_p (node) || has_analyzed_clone_p (node))
|
||||
{
|
||||
/* Clones don't need to be read. */
|
||||
/* Clones and thunks don't need to be read. */
|
||||
if (node->clone_of)
|
||||
return;
|
||||
|
||||
|
@ -1197,6 +1197,12 @@ static void
|
|||
add_cgraph_node_to_partition (ltrans_partition part, struct cgraph_node *node)
|
||||
{
|
||||
struct cgraph_edge *e;
|
||||
cgraph_node_set_iterator csi;
|
||||
|
||||
/* If NODE is already there, we have nothing to do. */
|
||||
csi = cgraph_node_set_find (part->cgraph_set, node);
|
||||
if (!csi_end_p (csi))
|
||||
return;
|
||||
|
||||
part->insns += inline_summary (node)->self_size;
|
||||
|
||||
|
@ -1211,6 +1217,13 @@ add_cgraph_node_to_partition (ltrans_partition part, struct cgraph_node *node)
|
|||
|
||||
cgraph_node_set_add (part->cgraph_set, node);
|
||||
|
||||
/* Thunks always must go along with function they reffer to. */
|
||||
if (node->thunk.thunk_p)
|
||||
add_cgraph_node_to_partition (part, node->callees->callee);
|
||||
for (e = node->callers; e; e = e->next_caller)
|
||||
if (e->caller->thunk.thunk_p)
|
||||
add_cgraph_node_to_partition (part, e->caller);
|
||||
|
||||
for (e = node->callees; e; e = e->next_callee)
|
||||
if ((!e->inline_failed || DECL_COMDAT (e->callee->decl))
|
||||
&& !cgraph_node_in_set_p (e->callee, part->cgraph_set))
|
||||
|
@ -1228,6 +1241,13 @@ add_cgraph_node_to_partition (ltrans_partition part, struct cgraph_node *node)
|
|||
static void
|
||||
add_varpool_node_to_partition (ltrans_partition part, struct varpool_node *vnode)
|
||||
{
|
||||
varpool_node_set_iterator vsi;
|
||||
|
||||
/* If NODE is already there, we have nothing to do. */
|
||||
vsi = varpool_node_set_find (part->varpool_set, vnode);
|
||||
if (!vsi_end_p (vsi))
|
||||
return;
|
||||
|
||||
varpool_node_set_add (part->varpool_set, vnode);
|
||||
|
||||
if (vnode->aux)
|
||||
|
|
15
gcc/passes.c
15
gcc/passes.c
|
@ -1135,7 +1135,7 @@ do_per_function_toporder (void (*callback) (void *data), void *data)
|
|||
/* Allow possibly removed nodes to be garbage collected. */
|
||||
order[i] = NULL;
|
||||
node->process = 0;
|
||||
if (node->analyzed)
|
||||
if (cgraph_function_with_gimple_body_p (node))
|
||||
{
|
||||
push_cfun (DECL_STRUCT_FUNCTION (node->decl));
|
||||
current_function_decl = node->decl;
|
||||
|
@ -1581,10 +1581,9 @@ execute_one_pass (struct opt_pass *pass)
|
|||
if (pass->type == IPA_PASS)
|
||||
{
|
||||
struct cgraph_node *node;
|
||||
for (node = cgraph_nodes; node; node = node->next)
|
||||
if (node->analyzed)
|
||||
VEC_safe_push (ipa_opt_pass, heap, node->ipa_transforms_to_apply,
|
||||
(struct ipa_opt_pass_d *)pass);
|
||||
FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
|
||||
VEC_safe_push (ipa_opt_pass, heap, node->ipa_transforms_to_apply,
|
||||
(struct ipa_opt_pass_d *)pass);
|
||||
}
|
||||
|
||||
if (!current_function_decl)
|
||||
|
@ -1705,7 +1704,7 @@ ipa_write_summaries (void)
|
|||
{
|
||||
struct cgraph_node *node = order[i];
|
||||
|
||||
if (node->analyzed)
|
||||
if (cgraph_function_with_gimple_body_p (node))
|
||||
{
|
||||
/* When streaming out references to statements as part of some IPA
|
||||
pass summary, the statements need to have uids assigned and the
|
||||
|
@ -1718,7 +1717,7 @@ ipa_write_summaries (void)
|
|||
pop_cfun ();
|
||||
}
|
||||
if (node->analyzed)
|
||||
cgraph_node_set_add (set, node);
|
||||
cgraph_node_set_add (set, node);
|
||||
}
|
||||
vset = varpool_node_set_new ();
|
||||
|
||||
|
@ -2036,7 +2035,7 @@ function_called_by_processed_nodes_p (void)
|
|||
{
|
||||
if (e->caller->decl == current_function_decl)
|
||||
continue;
|
||||
if (!e->caller->analyzed)
|
||||
if (!cgraph_function_with_gimple_body_p (e->caller))
|
||||
continue;
|
||||
if (TREE_ASM_WRITTEN (e->caller->decl))
|
||||
continue;
|
||||
|
|
Loading…
Add table
Reference in a new issue