ipa-utils.c (postorder_stack): New structure.

* ipa-utils.c (postorder_stack): New structure.
	(ipa_reverse_postorder): Handle aliases.

From-SVN: r174991
This commit is contained in:
Jan Hubicka 2011-06-13 17:31:22 +02:00 committed by Jan Hubicka
parent db1d217c76
commit 8775a18b96
2 changed files with 61 additions and 37 deletions

View File

@ -1,3 +1,15 @@
2011-06-13 Jan Hubicka <jh@suse.cz>
* ipa-utils.c (postorder_stack): New structure.
(ipa_reverse_postorder): Handle aliases.
2011-06-13 Jan Hubicka <jh@suse.cz>
* ipa-inline.c (reset_edge_caches): Walk aliases.
(update_caller_keys): Do not test inlinability of aliases.
* ipa-inline-analysis.c (do_estimate_edge_time): Look through alias.
(do_estimate_growth): Fix typo.
2011-06-13 Jan Hubicka <jh@suse.cz>
* ipa-inline-transform.c (+can_remove_node_now_p_1): Break out from...

View File

@ -233,6 +233,13 @@ ipa_free_postorder_info (void)
}
}
struct postorder_stack
{
struct cgraph_node *node;
struct cgraph_edge *edge;
int ref;
};
/* Fill array order with all nodes with output flag set in the reverse
topological order. Return the number of elements in the array.
FIXME: While walking, consider aliases, too. */
@ -243,11 +250,12 @@ ipa_reverse_postorder (struct cgraph_node **order)
struct cgraph_node *node, *node2;
int stack_size = 0;
int order_pos = 0;
struct cgraph_edge *edge, last;
struct cgraph_edge *edge;
int pass;
struct ipa_ref *ref;
struct cgraph_node **stack =
XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
struct postorder_stack *stack =
XCNEWVEC (struct postorder_stack, cgraph_n_nodes);
/* We have to deal with cycles nicely, so use a depth first traversal
output algorithm. Ignore the fact that some functions won't need
@ -261,47 +269,51 @@ ipa_reverse_postorder (struct cgraph_node **order)
&& (pass
|| (!node->address_taken
&& !node->global.inlined_to
&& !cgraph_only_called_directly_or_aliased_p (node))))
&& !node->alias && !node->thunk.thunk_p
&& !cgraph_only_called_directly_p (node))))
{
node2 = node;
if (!node->callers)
node->aux = &last;
else
node->aux = node->callers;
while (node2)
stack_size = 0;
stack[stack_size].node = node;
stack[stack_size].edge = node->callers;
stack[stack_size].ref = 0;
node->aux = (void *)(size_t)1;
while (stack_size >= 0)
{
while (node2->aux != &last)
while (true)
{
edge = (struct cgraph_edge *) node2->aux;
if (edge->next_caller)
node2->aux = edge->next_caller;
else
node2->aux = &last;
/* Break possible cycles involving always-inline
functions by ignoring edges from always-inline
functions to non-always-inline functions. */
if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
&& !DECL_DISREGARD_INLINE_LIMITS (edge->callee->decl))
continue;
if (!edge->caller->aux)
node2 = NULL;
while (stack[stack_size].edge && !node2)
{
if (!edge->caller->callers)
edge->caller->aux = &last;
else
edge->caller->aux = edge->caller->callers;
stack[stack_size++] = node2;
edge = stack[stack_size].edge;
node2 = edge->caller;
break;
stack[stack_size].edge = edge->next_caller;
/* Break possible cycles involving always-inline
functions by ignoring edges from always-inline
functions to non-always-inline functions. */
if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
&& !DECL_DISREGARD_INLINE_LIMITS
(cgraph_function_node (edge->callee, NULL)->decl))
node2 = NULL;
}
for (;ipa_ref_list_refering_iterate (&stack[stack_size].node->ref_list,
stack[stack_size].ref,
ref) && !node2;
stack[stack_size].ref++)
{
if (ref->use == IPA_REF_ALIAS)
node2 = ipa_ref_refering_node (ref);
}
if (!node2)
break;
if (!node2->aux)
{
stack[++stack_size].node = node2;
stack[stack_size].edge = node2->callers;
stack[stack_size].ref = 0;
node2->aux = (void *)(size_t)1;
}
}
if (node2->aux == &last)
{
order[order_pos++] = node2;
if (stack_size)
node2 = stack[--stack_size];
else
node2 = NULL;
}
order[order_pos++] = stack[stack_size--].node;
}
}
free (stack);