Implement new immediate use iterators.
2006-04-27 Andrew MacLeod <amacleod@redhat.com> PR tree-optimization/26854 * tree-vrp.c (remove_range_assertions): Use new Immuse iterator. * doc/tree-ssa.texi: Update immuse iterator documentation. * tree-ssa-math-opts.c (execute_cse_reciprocals_1): Use new iterator. * tree-ssa-dom.c (propagate_rhs_into_lhs): Use new iterator. * tree-flow-inline.h (end_safe_imm_use_traverse, end_safe_imm_use_p, first_safe_imm_use, next_safe_imm_use): Remove. (end_imm_use_stmt_p): New. Check for end of immuse stmt traversal. (end_imm_use_stmt_traverse): New. Terminate immuse stmt traversal. (move_use_after_head): New. Helper function to sort immuses in a stmt. (link_use_stmts_after): New. Link all immuses in a stmt consescutively. (first_imm_use_stmt): New. Get first stmt in an immuse list. (next_imm_use_stmt): New. Get next stmt in an immuse list. (first_imm_use_on_stmt): New. Get first immuse on a stmt. (end_imm_use_on_stmt_p): New. Check for end of immuses on a stmt. (next_imm_use_on_stmt): New. Move to next immuse on a stmt. * tree-ssa-forwprop.c (forward_propagate_addr_expr): Use new iterator. * lambda-code.c (lambda_loopnest_to_gcc_loopnest): Use new iterator. (perfect_nestify): Use new iterator. * tree-vect-transform.c (vect_create_epilog_for_reduction): Use new iterator. * tree-flow.h (struct immediate_use_iterator_d): Add comments. (next_imm_name): New field in struct immediate_use_iterator_d. (FOR_EACH_IMM_USE_SAFE, BREAK_FROM_SAFE_IMM_USE): Remove. (FOR_EACH_IMM_USE_STMT, BREAK_FROM_IMM_USE_STMT, FOR_EACH_IMM_USE_ON_STMT): New immediate use iterator macros. * tree-cfg.c (replace_uses_by): Use new iterator. * tree-ssa-threadedge.c (lhs_of_dominating_assert): Use new iterator. * tree-ssa-operands.c (correct_use_link): Remove. (finalize_ssa_use_ops): No longer call correct_use_link. From-SVN: r113321
This commit is contained in:
parent
8fcd79cbcd
commit
6c00f606c1
@ -1,3 +1,36 @@
|
||||
2006-04-27 Andrew MacLeod <amacleod@redhat.com>
|
||||
|
||||
PR tree-optimization/26854
|
||||
* tree-vrp.c (remove_range_assertions): Use new Immuse iterator.
|
||||
* doc/tree-ssa.texi: Update immuse iterator documentation.
|
||||
* tree-ssa-math-opts.c (execute_cse_reciprocals_1): Use new iterator.
|
||||
* tree-ssa-dom.c (propagate_rhs_into_lhs): Use new iterator.
|
||||
* tree-flow-inline.h (end_safe_imm_use_traverse, end_safe_imm_use_p,
|
||||
first_safe_imm_use, next_safe_imm_use): Remove.
|
||||
(end_imm_use_stmt_p): New. Check for end of immuse stmt traversal.
|
||||
(end_imm_use_stmt_traverse): New. Terminate immuse stmt traversal.
|
||||
(move_use_after_head): New. Helper function to sort immuses in a stmt.
|
||||
(link_use_stmts_after): New. Link all immuses in a stmt consescutively.
|
||||
(first_imm_use_stmt): New. Get first stmt in an immuse list.
|
||||
(next_imm_use_stmt): New. Get next stmt in an immuse list.
|
||||
(first_imm_use_on_stmt): New. Get first immuse on a stmt.
|
||||
(end_imm_use_on_stmt_p): New. Check for end of immuses on a stmt.
|
||||
(next_imm_use_on_stmt): New. Move to next immuse on a stmt.
|
||||
* tree-ssa-forwprop.c (forward_propagate_addr_expr): Use new iterator.
|
||||
* lambda-code.c (lambda_loopnest_to_gcc_loopnest): Use new iterator.
|
||||
(perfect_nestify): Use new iterator.
|
||||
* tree-vect-transform.c (vect_create_epilog_for_reduction): Use new
|
||||
iterator.
|
||||
* tree-flow.h (struct immediate_use_iterator_d): Add comments.
|
||||
(next_imm_name): New field in struct immediate_use_iterator_d.
|
||||
(FOR_EACH_IMM_USE_SAFE, BREAK_FROM_SAFE_IMM_USE): Remove.
|
||||
(FOR_EACH_IMM_USE_STMT, BREAK_FROM_IMM_USE_STMT,
|
||||
FOR_EACH_IMM_USE_ON_STMT): New immediate use iterator macros.
|
||||
* tree-cfg.c (replace_uses_by): Use new iterator.
|
||||
* tree-ssa-threadedge.c (lhs_of_dominating_assert): Use new iterator.
|
||||
* tree-ssa-operands.c (correct_use_link): Remove.
|
||||
(finalize_ssa_use_ops): No longer call correct_use_link.
|
||||
|
||||
2006-04-27 Stuart Hastings <stuart@apple.com>
|
||||
|
||||
* config/rs6000/t-darwin (DARWIN_EXTRA_CRT_BUILD_CFLAGS): New.
|
||||
|
@ -1092,15 +1092,21 @@ FOR_EACH_PHI_OR_STMT_DEF (def_operand_p, phi, iter, flags)
|
||||
|
||||
Immediate use information is now always available. Using the immediate use
|
||||
iterators, you may examine every use of any @code{SSA_NAME}. For instance,
|
||||
to change each use of @code{ssa_var} to @code{ssa_var2}:
|
||||
to change each use of @code{ssa_var} to @code{ssa_var2} and call fold_stmt on
|
||||
each stmt after that is done:
|
||||
|
||||
@smallexample
|
||||
use_operand_p imm_use_p;
|
||||
imm_use_iterator iterator;
|
||||
tree ssa_var
|
||||
tree ssa_var, stmt;
|
||||
|
||||
FOR_EACH_IMM_USE_SAFE (imm_use_p, iterator, ssa_var)
|
||||
SET_USE (imm_use_p, ssa_var_2);
|
||||
|
||||
FOR_EACH_IMM_USE_STMT (stmt, iterator, ssa_var)
|
||||
@{
|
||||
FOR_EACH_IMM_USE_ON_STMT (imm_use_p, iterator)
|
||||
SET_USE (imm_use_p, ssa_var_2);
|
||||
fold_stmt (stmt);
|
||||
@}
|
||||
@end smallexample
|
||||
|
||||
There are 2 iterators which can be used. @code{FOR_EACH_IMM_USE_FAST} is used
|
||||
@ -1108,21 +1114,26 @@ when the immediate uses are not changed, ie. you are looking at the uses, but
|
||||
not setting them.
|
||||
|
||||
If they do get changed, then care must be taken that things are not changed
|
||||
under the iterators, so use the @code{FOR_EACH_IMM_USE_SAFE} iterator. It
|
||||
attempts to preserve the sanity of the use list by moving an iterator element
|
||||
through the use list, preventing insertions and deletions in the list from
|
||||
resulting in invalid pointers. This is a little slower since it adds a
|
||||
placeholder element and moves it through the list. This element must be
|
||||
also be removed if the loop is terminated early. A macro
|
||||
(@code{BREAK_FROM SAFE_IMM_USE}) is provided for this:
|
||||
under the iterators, so use the @code{FOR_EACH_IMM_USE_STMT} and
|
||||
@code{FOR_EACH_IMM_USE_ON_STMT} iterators. They attempt to preserve the
|
||||
sanity of the use list by moving all the uses for a statement into
|
||||
a controlled position, and then iterating over those uses. Then the
|
||||
optimization can manipulate the stmt when all the uses have been
|
||||
processed. This is a little slower than the FAST version since it adds a
|
||||
placeholder element and must sort through the list a bit for each statement.
|
||||
This placeholder element must be also be removed if the loop is
|
||||
terminated early. The macro @code{BREAK_FROM_IMM_USE_SAFE} is provided
|
||||
to do this :
|
||||
|
||||
@smallexample
|
||||
FOR_EACH_IMM_USE_SAFE (use_p, iter, var)
|
||||
FOR_EACH_IMM_USE_STMT (stmt, iterator, ssa_var)
|
||||
@{
|
||||
if (var == last_var)
|
||||
if (stmt == last_stmt)
|
||||
BREAK_FROM_SAFE_IMM_USE (iter);
|
||||
else
|
||||
SET_USE (use_p, var2);
|
||||
|
||||
FOR_EACH_IMM_USE_ON_STMT (imm_use_p, iterator)
|
||||
SET_USE (imm_use_p, ssa_var_2);
|
||||
fold_stmt (stmt);
|
||||
@}
|
||||
@end smallexample
|
||||
|
||||
|
@ -1923,9 +1923,10 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
|
||||
for (i = 0; VEC_iterate (tree, old_ivs, i, oldiv); i++)
|
||||
{
|
||||
imm_use_iterator imm_iter;
|
||||
use_operand_p imm_use;
|
||||
use_operand_p use_p;
|
||||
tree oldiv_def;
|
||||
tree oldiv_stmt = SSA_NAME_DEF_STMT (oldiv);
|
||||
tree stmt;
|
||||
|
||||
if (TREE_CODE (oldiv_stmt) == PHI_NODE)
|
||||
oldiv_def = PHI_RESULT (oldiv_stmt);
|
||||
@ -1933,37 +1934,31 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
|
||||
oldiv_def = SINGLE_SSA_TREE_OPERAND (oldiv_stmt, SSA_OP_DEF);
|
||||
gcc_assert (oldiv_def != NULL_TREE);
|
||||
|
||||
FOR_EACH_IMM_USE_SAFE (imm_use, imm_iter, oldiv_def)
|
||||
{
|
||||
tree stmt = USE_STMT (imm_use);
|
||||
use_operand_p use_p;
|
||||
ssa_op_iter iter;
|
||||
gcc_assert (TREE_CODE (stmt) != PHI_NODE);
|
||||
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
|
||||
{
|
||||
if (USE_FROM_PTR (use_p) == oldiv)
|
||||
{
|
||||
tree newiv, stmts;
|
||||
lambda_body_vector lbv, newlbv;
|
||||
/* Compute the new expression for the induction
|
||||
variable. */
|
||||
depth = VEC_length (tree, new_ivs);
|
||||
lbv = lambda_body_vector_new (depth);
|
||||
LBV_COEFFICIENTS (lbv)[i] = 1;
|
||||
|
||||
newlbv = lambda_body_vector_compute_new (transform, lbv);
|
||||
FOR_EACH_IMM_USE_STMT (stmt, imm_iter, oldiv_def)
|
||||
{
|
||||
tree newiv, stmts;
|
||||
lambda_body_vector lbv, newlbv;
|
||||
|
||||
newiv = lbv_to_gcc_expression (newlbv, TREE_TYPE (oldiv),
|
||||
new_ivs, &stmts);
|
||||
bsi = bsi_for_stmt (stmt);
|
||||
/* Insert the statements to build that
|
||||
expression. */
|
||||
bsi_insert_before (&bsi, stmts, BSI_SAME_STMT);
|
||||
propagate_value (use_p, newiv);
|
||||
update_stmt (stmt);
|
||||
|
||||
}
|
||||
}
|
||||
gcc_assert (TREE_CODE (stmt) != PHI_NODE);
|
||||
|
||||
/* Compute the new expression for the induction
|
||||
variable. */
|
||||
depth = VEC_length (tree, new_ivs);
|
||||
lbv = lambda_body_vector_new (depth);
|
||||
LBV_COEFFICIENTS (lbv)[i] = 1;
|
||||
|
||||
newlbv = lambda_body_vector_compute_new (transform, lbv);
|
||||
|
||||
newiv = lbv_to_gcc_expression (newlbv, TREE_TYPE (oldiv),
|
||||
new_ivs, &stmts);
|
||||
bsi = bsi_for_stmt (stmt);
|
||||
/* Insert the statements to build that
|
||||
expression. */
|
||||
bsi_insert_before (&bsi, stmts, BSI_SAME_STMT);
|
||||
|
||||
FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
|
||||
propagate_value (use_p, newiv);
|
||||
update_stmt (stmt);
|
||||
}
|
||||
}
|
||||
VEC_free (tree, heap, new_ivs);
|
||||
@ -2482,6 +2477,7 @@ perfect_nestify (struct loops *loops,
|
||||
{
|
||||
use_operand_p use_p;
|
||||
imm_use_iterator imm_iter;
|
||||
tree imm_stmt;
|
||||
tree stmt = bsi_stmt (bsi);
|
||||
|
||||
if (stmt == exit_condition
|
||||
@ -2495,10 +2491,9 @@ perfect_nestify (struct loops *loops,
|
||||
|
||||
/* Make copies of this statement to put it back next
|
||||
to its uses. */
|
||||
FOR_EACH_IMM_USE_SAFE (use_p, imm_iter,
|
||||
FOR_EACH_IMM_USE_STMT (imm_stmt, imm_iter,
|
||||
TREE_OPERAND (stmt, 0))
|
||||
{
|
||||
tree imm_stmt = USE_STMT (use_p);
|
||||
if (!exit_phi_for_loop_p (loop->inner, imm_stmt))
|
||||
{
|
||||
block_stmt_iterator tobsi;
|
||||
@ -2511,7 +2506,8 @@ perfect_nestify (struct loops *loops,
|
||||
newname = SSA_NAME_VAR (newname);
|
||||
newname = make_ssa_name (newname, newstmt);
|
||||
TREE_OPERAND (newstmt, 0) = newname;
|
||||
SET_USE (use_p, TREE_OPERAND (newstmt, 0));
|
||||
FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
|
||||
SET_USE (use_p, newname);
|
||||
bsi_insert_before (&tobsi, newstmt, BSI_SAME_STMT);
|
||||
update_stmt (newstmt);
|
||||
update_stmt (imm_stmt);
|
||||
|
@ -1259,51 +1259,42 @@ replace_uses_by (tree name, tree val)
|
||||
tree stmt;
|
||||
edge e;
|
||||
unsigned i;
|
||||
VEC(tree,heap) *stmts = VEC_alloc (tree, heap, 20);
|
||||
|
||||
FOR_EACH_IMM_USE_SAFE (use, imm_iter, name)
|
||||
|
||||
FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
|
||||
{
|
||||
stmt = USE_STMT (use);
|
||||
replace_exp (use, val);
|
||||
FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
|
||||
{
|
||||
replace_exp (use, val);
|
||||
|
||||
if (TREE_CODE (stmt) == PHI_NODE)
|
||||
{
|
||||
e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
|
||||
if (e->flags & EDGE_ABNORMAL)
|
||||
if (TREE_CODE (stmt) == PHI_NODE)
|
||||
{
|
||||
/* This can only occur for virtual operands, since
|
||||
for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
|
||||
would prevent replacement. */
|
||||
gcc_assert (!is_gimple_reg (name));
|
||||
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
|
||||
e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
|
||||
if (e->flags & EDGE_ABNORMAL)
|
||||
{
|
||||
/* This can only occur for virtual operands, since
|
||||
for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
|
||||
would prevent replacement. */
|
||||
gcc_assert (!is_gimple_reg (name));
|
||||
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
VEC_safe_push (tree, heap, stmts, stmt);
|
||||
if (TREE_CODE (stmt) != PHI_NODE)
|
||||
{
|
||||
tree rhs;
|
||||
|
||||
fold_stmt_inplace (stmt);
|
||||
rhs = get_rhs (stmt);
|
||||
if (TREE_CODE (rhs) == ADDR_EXPR)
|
||||
recompute_tree_invariant_for_addr_expr (rhs);
|
||||
|
||||
maybe_clean_or_replace_eh_stmt (stmt, stmt);
|
||||
mark_new_vars_to_rename (stmt);
|
||||
}
|
||||
}
|
||||
|
||||
/* We do not update the statements in the loop above. Consider
|
||||
x = w * w;
|
||||
|
||||
If we performed the update in the first loop, the statement
|
||||
would be rescanned after first occurrence of w is replaced,
|
||||
the new uses would be placed to the beginning of the list,
|
||||
and we would never process them. */
|
||||
for (i = 0; VEC_iterate (tree, stmts, i, stmt); i++)
|
||||
{
|
||||
tree rhs;
|
||||
|
||||
fold_stmt_inplace (stmt);
|
||||
|
||||
rhs = get_rhs (stmt);
|
||||
if (TREE_CODE (rhs) == ADDR_EXPR)
|
||||
recompute_tree_invariant_for_addr_expr (rhs);
|
||||
|
||||
maybe_clean_or_replace_eh_stmt (stmt, stmt);
|
||||
mark_new_vars_to_rename (stmt);
|
||||
}
|
||||
|
||||
VEC_free (tree, heap, stmts);
|
||||
gcc_assert (num_imm_uses (name) == 0);
|
||||
|
||||
/* Also update the trees stored in loop structures. */
|
||||
if (current_loops)
|
||||
|
@ -394,83 +394,6 @@ relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old, tree s
|
||||
linknode->stmt = stmt;
|
||||
}
|
||||
|
||||
/* Finished the traverse of an immediate use list IMM by removing it from
|
||||
the list. */
|
||||
static inline void
|
||||
end_safe_imm_use_traverse (imm_use_iterator *imm)
|
||||
{
|
||||
delink_imm_use (&(imm->iter_node));
|
||||
}
|
||||
|
||||
/* Return true if IMM is at the end of the list. */
|
||||
static inline bool
|
||||
end_safe_imm_use_p (imm_use_iterator *imm)
|
||||
{
|
||||
return (imm->imm_use == imm->end_p);
|
||||
}
|
||||
|
||||
/* Initialize iterator IMM to process the list for VAR. */
|
||||
static inline use_operand_p
|
||||
first_safe_imm_use (imm_use_iterator *imm, tree var)
|
||||
{
|
||||
/* Set up and link the iterator node into the linked list for VAR. */
|
||||
imm->iter_node.use = NULL;
|
||||
imm->iter_node.stmt = NULL_TREE;
|
||||
imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
|
||||
/* Check if there are 0 elements. */
|
||||
if (imm->end_p->next == imm->end_p)
|
||||
{
|
||||
imm->imm_use = imm->end_p;
|
||||
return NULL_USE_OPERAND_P;
|
||||
}
|
||||
|
||||
link_imm_use (&(imm->iter_node), var);
|
||||
imm->imm_use = imm->iter_node.next;
|
||||
return imm->imm_use;
|
||||
}
|
||||
|
||||
/* Bump IMM to the next use in the list. */
|
||||
static inline use_operand_p
|
||||
next_safe_imm_use (imm_use_iterator *imm)
|
||||
{
|
||||
ssa_use_operand_t *ptr;
|
||||
use_operand_p old;
|
||||
|
||||
old = imm->imm_use;
|
||||
/* If the next node following the iter_node is still the one referred to by
|
||||
imm_use, then the list hasn't changed, go to the next node. */
|
||||
if (imm->iter_node.next == imm->imm_use)
|
||||
{
|
||||
ptr = &(imm->iter_node);
|
||||
/* Remove iternode from the list. */
|
||||
delink_imm_use (ptr);
|
||||
imm->imm_use = imm->imm_use->next;
|
||||
if (! end_safe_imm_use_p (imm))
|
||||
{
|
||||
/* This isn't the end, link iternode before the next use. */
|
||||
ptr->prev = imm->imm_use->prev;
|
||||
ptr->next = imm->imm_use;
|
||||
imm->imm_use->prev->next = ptr;
|
||||
imm->imm_use->prev = ptr;
|
||||
}
|
||||
else
|
||||
return old;
|
||||
}
|
||||
else
|
||||
{
|
||||
/* If the 'next' value after the iterator isn't the same as it was, then
|
||||
a node has been deleted, so we simply proceed to the node following
|
||||
where the iterator is in the list. */
|
||||
imm->imm_use = imm->iter_node.next;
|
||||
if (end_safe_imm_use_p (imm))
|
||||
{
|
||||
end_safe_imm_use_traverse (imm);
|
||||
return old;
|
||||
}
|
||||
}
|
||||
|
||||
return imm->imm_use;
|
||||
}
|
||||
|
||||
/* Return true is IMM has reached the end of the immediate use list. */
|
||||
static inline bool
|
||||
@ -1376,7 +1299,162 @@ op_iter_init_phidef (ssa_op_iter *ptr, tree phi, int flags)
|
||||
return PHI_RESULT_PTR (phi);
|
||||
}
|
||||
|
||||
/* Return true is IMM has reached the end of the immediate use stmt list. */
|
||||
|
||||
static inline bool
|
||||
end_imm_use_stmt_p (imm_use_iterator *imm)
|
||||
{
|
||||
return (imm->imm_use == imm->end_p);
|
||||
}
|
||||
|
||||
/* Finished the traverse of an immediate use stmt list IMM by removing the
|
||||
placeholder node from the list. */
|
||||
|
||||
static inline void
|
||||
end_imm_use_stmt_traverse (imm_use_iterator *imm)
|
||||
{
|
||||
delink_imm_use (&(imm->iter_node));
|
||||
}
|
||||
|
||||
/* Immediate use traversal of uses within a stmt require that all the
|
||||
uses on a stmt be sequentially listed. This routine is used to build up
|
||||
this sequential list by adding USE_P to the end of the current list
|
||||
currently delimited by HEAD and LAST_P. The new LAST_P value is
|
||||
returned. */
|
||||
|
||||
static inline use_operand_p
|
||||
move_use_after_head (use_operand_p use_p, use_operand_p head,
|
||||
use_operand_p last_p)
|
||||
{
|
||||
gcc_assert (USE_FROM_PTR (use_p) == USE_FROM_PTR (head));
|
||||
/* Skip head when we find it. */
|
||||
if (use_p != head)
|
||||
{
|
||||
/* If use_p is already linked in after last_p, continue. */
|
||||
if (last_p->next == use_p)
|
||||
last_p = use_p;
|
||||
else
|
||||
{
|
||||
/* Delink from current location, and link in at last_p. */
|
||||
delink_imm_use (use_p);
|
||||
link_imm_use_to_list (use_p, last_p);
|
||||
last_p = use_p;
|
||||
}
|
||||
}
|
||||
return last_p;
|
||||
}
|
||||
|
||||
|
||||
/* This routine will relink all uses with the same stmt as HEAD into the list
|
||||
immediately following HEAD for iterator IMM. */
|
||||
|
||||
static inline void
|
||||
link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
|
||||
{
|
||||
use_operand_p use_p;
|
||||
use_operand_p last_p = head;
|
||||
tree head_stmt = USE_STMT (head);
|
||||
tree use = USE_FROM_PTR (head);
|
||||
ssa_op_iter op_iter;
|
||||
int flag;
|
||||
|
||||
/* Only look at virtual or real uses, depending on the type of HEAD. */
|
||||
flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
|
||||
|
||||
if (TREE_CODE (head_stmt) == PHI_NODE)
|
||||
{
|
||||
FOR_EACH_PHI_ARG (use_p, head_stmt, op_iter, flag)
|
||||
if (USE_FROM_PTR (use_p) == use)
|
||||
last_p = move_use_after_head (use_p, head, last_p);
|
||||
}
|
||||
else
|
||||
{
|
||||
FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag)
|
||||
if (USE_FROM_PTR (use_p) == use)
|
||||
last_p = move_use_after_head (use_p, head, last_p);
|
||||
}
|
||||
/* LInk iter node in after last_p. */
|
||||
if (imm->iter_node.prev != NULL)
|
||||
delink_imm_use (&imm->iter_node);
|
||||
link_imm_use_to_list (&(imm->iter_node), last_p);
|
||||
}
|
||||
|
||||
/* Initialize IMM to traverse over uses of VAR. Return the first statement. */
|
||||
static inline tree
|
||||
first_imm_use_stmt (imm_use_iterator *imm, tree var)
|
||||
{
|
||||
gcc_assert (TREE_CODE (var) == SSA_NAME);
|
||||
|
||||
imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
|
||||
imm->imm_use = imm->end_p->next;
|
||||
imm->next_imm_name = NULL_USE_OPERAND_P;
|
||||
|
||||
/* iter_node is used as a marker within the immediate use list to indicate
|
||||
where the end of the current stmt's uses are. Iintialize it to NULL
|
||||
stmt and use, which indicateds a marker node. */
|
||||
imm->iter_node.prev = NULL_USE_OPERAND_P;
|
||||
imm->iter_node.next = NULL_USE_OPERAND_P;
|
||||
imm->iter_node.stmt = NULL_TREE;
|
||||
imm->iter_node.use = NULL_USE_OPERAND_P;
|
||||
|
||||
if (end_imm_use_stmt_p (imm))
|
||||
return NULL_TREE;
|
||||
|
||||
link_use_stmts_after (imm->imm_use, imm);
|
||||
|
||||
return USE_STMT (imm->imm_use);
|
||||
}
|
||||
|
||||
/* Bump IMM to the next stmt which has a use of var. */
|
||||
|
||||
static inline tree
|
||||
next_imm_use_stmt (imm_use_iterator *imm)
|
||||
{
|
||||
imm->imm_use = imm->iter_node.next;
|
||||
if (end_imm_use_stmt_p (imm))
|
||||
{
|
||||
if (imm->iter_node.prev != NULL)
|
||||
delink_imm_use (&imm->iter_node);
|
||||
return NULL_TREE;
|
||||
}
|
||||
|
||||
link_use_stmts_after (imm->imm_use, imm);
|
||||
return USE_STMT (imm->imm_use);
|
||||
|
||||
}
|
||||
|
||||
/* This routine will return the first use on the stmt IMM currently refers
|
||||
to. */
|
||||
|
||||
static inline use_operand_p
|
||||
first_imm_use_on_stmt (imm_use_iterator *imm)
|
||||
{
|
||||
imm->next_imm_name = imm->imm_use->next;
|
||||
return imm->imm_use;
|
||||
}
|
||||
|
||||
/* Return TRUE if the last use on the stmt IMM refers to has been visited. */
|
||||
|
||||
static inline bool
|
||||
end_imm_use_on_stmt_p (imm_use_iterator *imm)
|
||||
{
|
||||
return (imm->imm_use == &(imm->iter_node));
|
||||
}
|
||||
|
||||
/* Bump to the next use on the stmt IMM refers to, return NULL if done. */
|
||||
|
||||
static inline use_operand_p
|
||||
next_imm_use_on_stmt (imm_use_iterator *imm)
|
||||
{
|
||||
imm->imm_use = imm->next_imm_name;
|
||||
if (end_imm_use_on_stmt_p (imm))
|
||||
return NULL_USE_OPERAND_P;
|
||||
else
|
||||
{
|
||||
imm->next_imm_name = imm->imm_use->next;
|
||||
return imm->imm_use;
|
||||
}
|
||||
}
|
||||
|
||||
/* Return true if VAR cannot be modified by the program. */
|
||||
|
||||
|
@ -225,9 +225,15 @@ struct function_ann_d GTY(())
|
||||
|
||||
typedef struct immediate_use_iterator_d
|
||||
{
|
||||
/* This is the current use the iterator is processing. */
|
||||
ssa_use_operand_t *imm_use;
|
||||
/* This marks the last use in the list (use node from SSA_NAME) */
|
||||
ssa_use_operand_t *end_p;
|
||||
/* This node is inserted and used to mark the end of the uses for a stmt. */
|
||||
ssa_use_operand_t iter_node;
|
||||
/* This is the next ssa_name to visit. IMM_USE may get removed before
|
||||
the next one is traversed to, so it must be cached early. */
|
||||
ssa_use_operand_t *next_imm_name;
|
||||
} imm_use_iterator;
|
||||
|
||||
|
||||
@ -239,18 +245,43 @@ typedef struct immediate_use_iterator_d
|
||||
!end_readonly_imm_use_p (&(ITER)); \
|
||||
(DEST) = next_readonly_imm_use (&(ITER)))
|
||||
|
||||
/* Use this iterator to visit each stmt which has a use of SSAVAR. */
|
||||
|
||||
#define FOR_EACH_IMM_USE_SAFE(DEST, ITER, SSAVAR) \
|
||||
for ((DEST) = first_safe_imm_use (&(ITER), (SSAVAR)); \
|
||||
!end_safe_imm_use_p (&(ITER)); \
|
||||
(DEST) = next_safe_imm_use (&(ITER)))
|
||||
#define FOR_EACH_IMM_USE_STMT(STMT, ITER, SSAVAR) \
|
||||
for ((STMT) = first_imm_use_stmt (&(ITER), (SSAVAR)); \
|
||||
!end_imm_use_stmt_p (&(ITER)); \
|
||||
(STMT) = next_imm_use_stmt (&(ITER)))
|
||||
|
||||
#define BREAK_FROM_SAFE_IMM_USE(ITER) \
|
||||
/* Use this to terminate the FOR_EACH_IMM_USE_STMT loop early. Failure to
|
||||
do so will result in leaving a iterator marker node in the immediate
|
||||
use list, and nothing good will come from that. */
|
||||
#define BREAK_FROM_IMM_USE_STMT(ITER) \
|
||||
{ \
|
||||
end_safe_imm_use_traverse (&(ITER)); \
|
||||
end_imm_use_stmt_traverse (&(ITER)); \
|
||||
break; \
|
||||
}
|
||||
|
||||
|
||||
/* Use this iterator in combination with FOR_EACH_IMM_USE_STMT to
|
||||
get access to each occurence of ssavar on the stmt returned by
|
||||
that iterator.. for instance:
|
||||
|
||||
FOR_EACH_IMM_USE_STMT (stmt, iter, var)
|
||||
{
|
||||
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
|
||||
{
|
||||
SET_USE (use_p) = blah;
|
||||
}
|
||||
update_stmt (stmt);
|
||||
} */
|
||||
|
||||
#define FOR_EACH_IMM_USE_ON_STMT(DEST, ITER) \
|
||||
for ((DEST) = first_imm_use_on_stmt (&(ITER)); \
|
||||
!end_imm_use_on_stmt_p (&(ITER)); \
|
||||
(DEST) = next_imm_use_on_stmt (&(ITER)))
|
||||
|
||||
|
||||
|
||||
struct stmt_ann_d GTY(())
|
||||
{
|
||||
struct tree_ann_common_d common;
|
||||
|
@ -2118,6 +2118,7 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
|
||||
{
|
||||
use_operand_p use_p;
|
||||
imm_use_iterator iter;
|
||||
tree use_stmt;
|
||||
bool all = true;
|
||||
|
||||
/* Dump details. */
|
||||
@ -2134,10 +2135,8 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
|
||||
/* Walk over every use of LHS and try to replace the use with RHS.
|
||||
At this point the only reason why such a propagation would not
|
||||
be successful would be if the use occurs in an ASM_EXPR. */
|
||||
repeat:
|
||||
FOR_EACH_IMM_USE_SAFE (use_p, iter, lhs)
|
||||
FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
|
||||
{
|
||||
tree use_stmt = USE_STMT (use_p);
|
||||
|
||||
/* It's not always safe to propagate into an ASM_EXPR. */
|
||||
if (TREE_CODE (use_stmt) == ASM_EXPR
|
||||
@ -2156,7 +2155,8 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
|
||||
}
|
||||
|
||||
/* Propagate the RHS into this use of the LHS. */
|
||||
propagate_value (use_p, rhs);
|
||||
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
|
||||
propagate_value (use_p, rhs);
|
||||
|
||||
/* Special cases to avoid useless calls into the folding
|
||||
routines, operand scanning, etc.
|
||||
@ -2303,23 +2303,8 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
|
||||
}
|
||||
}
|
||||
|
||||
/* Due to a bug in the immediate use iterator code, we can
|
||||
miss visiting uses in some cases when there is more than
|
||||
one use in a statement. Missing a use can cause a multitude
|
||||
of problems if we expected to eliminate all uses and remove
|
||||
the defining statement.
|
||||
|
||||
Until Andrew can fix the iterator, this hack will detect
|
||||
the cases which cause us problems. Namely if ALL is set
|
||||
and we still have some immediate uses, then we must have
|
||||
skipped one or more in the loop above. So just re-execute
|
||||
the loop.
|
||||
|
||||
The maximum number of times we can re-execute the loop is
|
||||
bounded by the maximum number of times a given SSA_NAME
|
||||
appears in a single statement. */
|
||||
if (all && !has_zero_uses (lhs))
|
||||
goto repeat;
|
||||
/* Ensure there is nothing else to do. */
|
||||
gcc_assert (all && has_zero_uses (lhs));
|
||||
|
||||
/* If we were able to propagate away all uses of LHS, then
|
||||
we can remove STMT. */
|
||||
|
@ -805,14 +805,13 @@ forward_propagate_addr_expr (tree stmt, bool *some)
|
||||
{
|
||||
int stmt_loop_depth = bb_for_stmt (stmt)->loop_depth;
|
||||
tree name = TREE_OPERAND (stmt, 0);
|
||||
use_operand_p imm_use;
|
||||
imm_use_iterator iter;
|
||||
tree use_stmt;
|
||||
bool all = true;
|
||||
|
||||
FOR_EACH_IMM_USE_SAFE (imm_use, iter, name)
|
||||
FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
|
||||
{
|
||||
bool result;
|
||||
tree use_stmt = USE_STMT (imm_use);
|
||||
|
||||
/* If the use is not in a simple assignment statement, then
|
||||
there is nothing we can do. */
|
||||
|
@ -446,17 +446,20 @@ execute_cse_reciprocals_1 (block_stmt_iterator *def_bsi, tree def)
|
||||
threshold = targetm.min_divisions_for_recip_mul (TYPE_MODE (TREE_TYPE (def)));
|
||||
if (count >= threshold)
|
||||
{
|
||||
tree use_stmt;
|
||||
for (occ = occ_head; occ; occ = occ->next)
|
||||
{
|
||||
compute_merit (occ);
|
||||
insert_reciprocals (def_bsi, occ, def, NULL, threshold);
|
||||
}
|
||||
|
||||
FOR_EACH_IMM_USE_SAFE (use_p, use_iter, def)
|
||||
FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, def)
|
||||
{
|
||||
tree use_stmt = USE_STMT (use_p);
|
||||
if (is_division_by (use_stmt, def))
|
||||
replace_reciprocal (use_p);
|
||||
{
|
||||
FOR_EACH_IMM_USE_ON_STMT (use_p, use_iter)
|
||||
replace_reciprocal (use_p);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -325,49 +325,9 @@ ssa_operand_alloc (unsigned size)
|
||||
}
|
||||
|
||||
|
||||
/* Make sure PTR is in the correct immediate use list. Since uses are simply
|
||||
pointers into the stmt TREE, there is no way of telling if anyone has
|
||||
changed what this pointer points to via TREE_OPERANDS (exp, 0) = <...>.
|
||||
The contents are different, but the pointer is still the same. This
|
||||
routine will check to make sure PTR is in the correct list, and if it isn't
|
||||
put it in the correct list. We cannot simply check the previous node
|
||||
because all nodes in the same stmt might have be changed. */
|
||||
|
||||
static inline void
|
||||
correct_use_link (use_operand_p ptr, tree stmt)
|
||||
{
|
||||
use_operand_p prev;
|
||||
tree root;
|
||||
|
||||
/* fold_stmt may have changed the stmt pointers. */
|
||||
if (ptr->stmt != stmt)
|
||||
ptr->stmt = stmt;
|
||||
|
||||
prev = ptr->prev;
|
||||
if (prev)
|
||||
{
|
||||
/* Find the root element, making sure we skip any safe iterators. */
|
||||
while (prev->use != NULL || prev->stmt == NULL)
|
||||
prev = prev->prev;
|
||||
|
||||
/* Get the SSA_NAME of the list the node is in. */
|
||||
root = prev->stmt;
|
||||
|
||||
/* If it's the right list, simply return. */
|
||||
if (root == *(ptr->use))
|
||||
return;
|
||||
}
|
||||
|
||||
/* It is in the wrong list if we reach here. */
|
||||
delink_imm_use (ptr);
|
||||
link_imm_use (ptr, *(ptr->use));
|
||||
}
|
||||
|
||||
|
||||
/* This routine makes sure that PTR is in an immediate use list, and makes
|
||||
sure the stmt pointer is set to the current stmt. Virtual uses do not need
|
||||
the overhead of correct_use_link since they cannot be directly manipulated
|
||||
like a real use can be. (They don't exist in the TREE_OPERAND nodes.) */
|
||||
sure the stmt pointer is set to the current stmt. */
|
||||
|
||||
static inline void
|
||||
set_virtual_use_link (use_operand_p ptr, tree stmt)
|
||||
@ -579,9 +539,7 @@ finalize_ssa_defs (tree stmt)
|
||||
}
|
||||
|
||||
/* Takes elements from build_uses and turns them into use operands of STMT.
|
||||
TODO -- Given that use operands list is not necessarily sorted, merging
|
||||
the operands this way does not make much sense.
|
||||
-- Make build_uses VEC of tree *. */
|
||||
TODO -- Make build_uses VEC of tree *. */
|
||||
|
||||
static inline void
|
||||
finalize_ssa_use_ops (tree stmt)
|
||||
@ -589,46 +547,12 @@ finalize_ssa_use_ops (tree stmt)
|
||||
unsigned new_i;
|
||||
struct use_optype_d new_list;
|
||||
use_optype_p old_ops, ptr, last;
|
||||
tree *old_base, *new_base;
|
||||
|
||||
new_list.next = NULL;
|
||||
last = &new_list;
|
||||
|
||||
old_ops = USE_OPS (stmt);
|
||||
|
||||
new_i = 0;
|
||||
while (old_ops && new_i < VEC_length (tree, build_uses))
|
||||
{
|
||||
new_base = (tree *) VEC_index (tree, build_uses, new_i);
|
||||
old_base = USE_OP_PTR (old_ops)->use;
|
||||
|
||||
if (old_base == new_base)
|
||||
{
|
||||
/* if variables are the same, reuse this node. */
|
||||
MOVE_HEAD_AFTER (old_ops, last);
|
||||
correct_use_link (USE_OP_PTR (last), stmt);
|
||||
new_i++;
|
||||
}
|
||||
else if (old_base < new_base)
|
||||
{
|
||||
/* if old is less than new, old goes to the free list. */
|
||||
delink_imm_use (USE_OP_PTR (old_ops));
|
||||
MOVE_HEAD_TO_FREELIST (old_ops, use);
|
||||
}
|
||||
else
|
||||
{
|
||||
/* This is a new operand. */
|
||||
add_use_op (stmt, new_base, &last);
|
||||
new_i++;
|
||||
}
|
||||
}
|
||||
|
||||
/* If there is anything remaining in the build_uses list, simply emit it. */
|
||||
for ( ; new_i < VEC_length (tree, build_uses); new_i++)
|
||||
add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
|
||||
|
||||
last->next = NULL;
|
||||
|
||||
/* If there is anything in the old list, free it. */
|
||||
if (old_ops)
|
||||
{
|
||||
@ -638,6 +562,12 @@ finalize_ssa_use_ops (tree stmt)
|
||||
free_uses = old_ops;
|
||||
}
|
||||
|
||||
/* Now create nodes for all the new nodes. */
|
||||
for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
|
||||
add_use_op (stmt, (tree *) VEC_index (tree, build_uses, new_i), &last);
|
||||
|
||||
last->next = NULL;
|
||||
|
||||
/* Now set the stmt's operands. */
|
||||
USE_OPS (stmt) = new_list.next;
|
||||
|
||||
|
@ -84,18 +84,20 @@ static tree
|
||||
lhs_of_dominating_assert (tree op, basic_block bb, tree stmt)
|
||||
{
|
||||
imm_use_iterator imm_iter;
|
||||
use_operand_p imm_use;
|
||||
tree use_stmt;
|
||||
use_operand_p use_p;
|
||||
|
||||
FOR_EACH_IMM_USE_SAFE (imm_use, imm_iter, op)
|
||||
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
|
||||
{
|
||||
tree use_stmt = USE_STMT (imm_use);
|
||||
|
||||
use_stmt = USE_STMT (use_p);
|
||||
if (use_stmt != stmt
|
||||
&& TREE_CODE (use_stmt) == MODIFY_EXPR
|
||||
&& TREE_CODE (TREE_OPERAND (use_stmt, 1)) == ASSERT_EXPR
|
||||
&& TREE_OPERAND (TREE_OPERAND (use_stmt, 1), 0) == op
|
||||
&& dominated_by_p (CDI_DOMINATORS, bb, bb_for_stmt (use_stmt)))
|
||||
op = TREE_OPERAND (use_stmt, 0);
|
||||
{
|
||||
return TREE_OPERAND (use_stmt, 0);
|
||||
}
|
||||
}
|
||||
return op;
|
||||
}
|
||||
|
@ -791,6 +791,7 @@ vect_create_epilog_for_reduction (tree vect_def, tree stmt,
|
||||
bool extract_scalar_result;
|
||||
tree reduction_op;
|
||||
tree orig_stmt;
|
||||
tree use_stmt;
|
||||
tree operation = TREE_OPERAND (stmt, 1);
|
||||
int op_type;
|
||||
|
||||
@ -1082,8 +1083,9 @@ vect_create_epilog_for_reduction (tree vect_def, tree stmt,
|
||||
gcc_assert (exit_phi);
|
||||
/* Replace the uses: */
|
||||
orig_name = PHI_RESULT (exit_phi);
|
||||
FOR_EACH_IMM_USE_SAFE (use_p, imm_iter, orig_name)
|
||||
SET_USE (use_p, new_temp);
|
||||
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, orig_name)
|
||||
FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
|
||||
SET_USE (use_p, new_temp);
|
||||
}
|
||||
|
||||
|
||||
|
@ -3242,6 +3242,7 @@ remove_range_assertions (void)
|
||||
for (si = bsi_start (bb); !bsi_end_p (si);)
|
||||
{
|
||||
tree stmt = bsi_stmt (si);
|
||||
tree use_stmt;
|
||||
|
||||
if (TREE_CODE (stmt) == MODIFY_EXPR
|
||||
&& TREE_CODE (TREE_OPERAND (stmt, 1)) == ASSERT_EXPR)
|
||||
@ -3255,11 +3256,12 @@ remove_range_assertions (void)
|
||||
|
||||
/* Propagate the RHS into every use of the LHS. */
|
||||
var = ASSERT_EXPR_VAR (rhs);
|
||||
FOR_EACH_IMM_USE_SAFE (use_p, iter, TREE_OPERAND (stmt, 0))
|
||||
{
|
||||
SET_USE (use_p, var);
|
||||
gcc_assert (TREE_CODE (var) == SSA_NAME);
|
||||
}
|
||||
FOR_EACH_IMM_USE_STMT (use_stmt, iter, TREE_OPERAND (stmt, 0))
|
||||
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
|
||||
{
|
||||
SET_USE (use_p, var);
|
||||
gcc_assert (TREE_CODE (var) == SSA_NAME);
|
||||
}
|
||||
|
||||
/* And finally, remove the copy, it is not needed. */
|
||||
bsi_remove (&si, true);
|
||||
|
Loading…
Reference in New Issue
Block a user