tree-vectorizer.h (vect_is_simple_use): Remove unused parameters.

2015-10-14  Richard Biener  <rguenther@suse.de>

	* tree-vectorizer.h (vect_is_simple_use): Remove unused parameters.
	(vect_is_simple_use_1): Likewise.  Make overload of vect_is_simple_use.
	(vect_get_vec_def_for_operand): Remove unused parameter.
	* tree-vect-loop.c (get_initial_def_for_induction): Adjust.
	(vect_create_epilog_for_reduction): Likewise.
	(vectorizable_reduction): Likewise.
	(vectorizable_live_operation): Likewise.
	* tree-vect-patterns.c (type_conversion_p): Likewise.
	(vect_recog_vector_vector_shift_pattern): Likewise.
	(check_bool_pattern): Likewise.
	* tree-vect-slp.c (vect_get_and_check_slp_defs): Likewise.
	(vect_analyze_slp_cost_1): Likewise.
	* tree-vect-stmts.c (process_use): Likewise.
	(vect_get_vec_def_for_operand): Do not handle reductions.
	(vect_get_vec_defs): Adjust.
	(vectorizable_mask_load_store): Likewise.
	(vectorizable_call): Likewise.
	(vectorizable_simd_clone_call): Likewise.
	(vect_get_loop_based_defs): Likewise.
	(vectorizable_conversion): Likewise.
	(vectorizable_assignment): Likewise.
	(vectorizable_shift): Likewise.
	(vectorizable_operation): Likewise.
	(vectorizable_store): Likewise.
	(vectorizable_load): Likewise.
	(vect_is_simple_cond): Likewise.
	(vectorizable_condition): Likewise.
	(vect_is_simple_use): Remove unused parameters.
	(vect_is_simple_use_1): Adjust and rename.

From-SVN: r228811
This commit is contained in:
Richard Biener 2015-10-14 13:57:09 +00:00 committed by Richard Biener
parent c3bea07622
commit 81c4024185
6 changed files with 177 additions and 278 deletions

View File

@ -1,3 +1,35 @@
2015-10-14 Richard Biener <rguenther@suse.de>
* tree-vectorizer.h (vect_is_simple_use): Remove unused parameters.
(vect_is_simple_use_1): Likewise. Make overload of vect_is_simple_use.
(vect_get_vec_def_for_operand): Remove unused parameter.
* tree-vect-loop.c (get_initial_def_for_induction): Adjust.
(vect_create_epilog_for_reduction): Likewise.
(vectorizable_reduction): Likewise.
(vectorizable_live_operation): Likewise.
* tree-vect-patterns.c (type_conversion_p): Likewise.
(vect_recog_vector_vector_shift_pattern): Likewise.
(check_bool_pattern): Likewise.
* tree-vect-slp.c (vect_get_and_check_slp_defs): Likewise.
(vect_analyze_slp_cost_1): Likewise.
* tree-vect-stmts.c (process_use): Likewise.
(vect_get_vec_def_for_operand): Do not handle reductions.
(vect_get_vec_defs): Adjust.
(vectorizable_mask_load_store): Likewise.
(vectorizable_call): Likewise.
(vectorizable_simd_clone_call): Likewise.
(vect_get_loop_based_defs): Likewise.
(vectorizable_conversion): Likewise.
(vectorizable_assignment): Likewise.
(vectorizable_shift): Likewise.
(vectorizable_operation): Likewise.
(vectorizable_store): Likewise.
(vectorizable_load): Likewise.
(vect_is_simple_cond): Likewise.
(vectorizable_condition): Likewise.
(vect_is_simple_use): Remove unused parameters.
(vect_is_simple_use_1): Adjust and rename.
2015-10-14 Richard Biener <rguenther@suse.de>
PR tree-optimization/67915

View File

@ -3412,7 +3412,7 @@ get_initial_def_for_induction (gimple *iv_phi)
/* iv_loop is nested in the loop to be vectorized. init_expr had already
been created during vectorization of previous stmts. We obtain it
from the STMT_VINFO_VEC_STMT of the defining stmt. */
vec_init = vect_get_vec_def_for_operand (init_expr, iv_phi, NULL);
vec_init = vect_get_vec_def_for_operand (init_expr, iv_phi);
/* If the initial value is not of proper type, convert it. */
if (!useless_type_conversion_p (vectype, TREE_TYPE (vec_init)))
{
@ -3798,8 +3798,7 @@ get_initial_def_for_reduction (gimple *stmt, tree init_val,
if (adjustment_def)
{
if (nested_in_vect_loop)
*adjustment_def = vect_get_vec_def_for_operand (init_val, stmt,
NULL);
*adjustment_def = vect_get_vec_def_for_operand (init_val, stmt);
else
*adjustment_def = init_val;
}
@ -3853,7 +3852,7 @@ get_initial_def_for_reduction (gimple *stmt, tree init_val,
if (adjustment_def)
{
*adjustment_def = NULL_TREE;
init_def = vect_get_vec_def_for_operand (init_val, stmt, NULL);
init_def = vect_get_vec_def_for_operand (init_val, stmt);
break;
}
@ -4012,12 +4011,13 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple *stmt,
NULL, slp_node, reduc_index);
else
{
/* Get at the scalar def before the loop, that defines the initial value
of the reduction variable. */
gimple *def_stmt = SSA_NAME_DEF_STMT (reduction_op);
tree op = PHI_ARG_DEF_FROM_EDGE (def_stmt, loop_preheader_edge (loop));
vec_initial_defs.create (1);
/* For the case of reduction, vect_get_vec_def_for_operand returns
the scalar def before the loop, that defines the initial value
of the reduction variable. */
vec_initial_def = vect_get_vec_def_for_operand (reduction_op, stmt,
&adjustment_def);
vec_initial_def = get_initial_def_for_reduction (stmt, op,
&adjustment_def);
vec_initial_defs.quick_push (vec_initial_def);
}
@ -4800,7 +4800,6 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
int op_type;
optab optab, reduc_optab;
tree new_temp = NULL_TREE;
tree def;
gimple *def_stmt;
enum vect_def_type dt;
gphi *new_phi = NULL;
@ -4956,8 +4955,8 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
if (i == 0 && code == COND_EXPR)
continue;
is_simple_use = vect_is_simple_use_1 (ops[i], stmt, loop_vinfo,
&def_stmt, &def, &dt, &tem);
is_simple_use = vect_is_simple_use (ops[i], loop_vinfo,
&def_stmt, &dt, &tem);
if (!vectype_in)
vectype_in = tem;
gcc_assert (is_simple_use);
@ -4977,8 +4976,7 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
}
}
is_simple_use = vect_is_simple_use_1 (ops[i], stmt, loop_vinfo,
&def_stmt, &def, &dt, &tem);
is_simple_use = vect_is_simple_use (ops[i], loop_vinfo, &def_stmt, &dt, &tem);
if (!vectype_in)
vectype_in = tem;
gcc_assert (is_simple_use);
@ -5340,12 +5338,11 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
else
{
loop_vec_def0 = vect_get_vec_def_for_operand (ops[!reduc_index],
stmt, NULL);
stmt);
vec_oprnds0.quick_push (loop_vec_def0);
if (op_type == ternary_op)
{
loop_vec_def1 = vect_get_vec_def_for_operand (op1, stmt,
NULL);
loop_vec_def1 = vect_get_vec_def_for_operand (op1, stmt);
vec_oprnds1.quick_push (loop_vec_def1);
}
}
@ -5356,17 +5353,15 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
{
enum vect_def_type dt;
gimple *dummy_stmt;
tree dummy;
vect_is_simple_use (ops[!reduc_index], stmt, loop_vinfo,
&dummy_stmt, &dummy, &dt);
vect_is_simple_use (ops[!reduc_index], loop_vinfo,
&dummy_stmt, &dt);
loop_vec_def0 = vect_get_vec_def_for_stmt_copy (dt,
loop_vec_def0);
vec_oprnds0[0] = loop_vec_def0;
if (op_type == ternary_op)
{
vect_is_simple_use (op1, stmt, loop_vinfo, &dummy_stmt,
&dummy, &dt);
vect_is_simple_use (op1, loop_vinfo, &dummy_stmt, &dt);
loop_vec_def1 = vect_get_vec_def_for_stmt_copy (dt,
loop_vec_def1);
vec_oprnds1[0] = loop_vec_def1;
@ -5595,7 +5590,6 @@ vectorizable_live_operation (gimple *stmt,
int i;
int op_type;
tree op;
tree def;
gimple *def_stmt;
enum vect_def_type dt;
enum tree_code code;
@ -5667,7 +5661,7 @@ vectorizable_live_operation (gimple *stmt,
else
op = gimple_op (stmt, i + 1);
if (op
&& !vect_is_simple_use (op, stmt, loop_vinfo, &def_stmt, &def, &dt))
&& !vect_is_simple_use (op, loop_vinfo, &def_stmt, &dt))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,

View File

@ -169,17 +169,14 @@ static bool
type_conversion_p (tree name, gimple *use_stmt, bool check_sign,
tree *orig_type, gimple **def_stmt, bool *promotion)
{
tree dummy;
gimple *dummy_gimple;
stmt_vec_info stmt_vinfo;
tree type = TREE_TYPE (name);
tree oprnd0;
enum vect_def_type dt;
tree def;
stmt_vinfo = vinfo_for_stmt (use_stmt);
if (!vect_is_simple_use (name, use_stmt, stmt_vinfo->vinfo, def_stmt,
&def, &dt))
if (!vect_is_simple_use (name, stmt_vinfo->vinfo, def_stmt, &dt))
return false;
if (dt != vect_internal_def
@ -207,8 +204,7 @@ type_conversion_p (tree name, gimple *use_stmt, bool check_sign,
else
*promotion = false;
if (!vect_is_simple_use (oprnd0, *def_stmt, stmt_vinfo->vinfo,
&dummy_gimple, &dummy, &dt))
if (!vect_is_simple_use (oprnd0, stmt_vinfo->vinfo, &dummy_gimple, &dt))
return false;
return true;
@ -1830,7 +1826,7 @@ vect_recog_rotate_pattern (vec<gimple *> *stmts, tree *type_in, tree *type_out)
|| !TYPE_UNSIGNED (type))
return NULL;
if (!vect_is_simple_use (oprnd1, last_stmt, vinfo, &def_stmt, &def, &dt))
if (!vect_is_simple_use (oprnd1, vinfo, &def_stmt, &dt))
return NULL;
if (dt != vect_internal_def
@ -2058,7 +2054,6 @@ vect_recog_vector_vector_shift_pattern (vec<gimple *> *stmts,
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
vec_info *vinfo = stmt_vinfo->vinfo;
enum vect_def_type dt;
tree def;
if (!is_gimple_assign (last_stmt))
return NULL;
@ -2090,8 +2085,7 @@ vect_recog_vector_vector_shift_pattern (vec<gimple *> *stmts,
!= TYPE_PRECISION (TREE_TYPE (oprnd0)))
return NULL;
if (!vect_is_simple_use (oprnd1, last_stmt, vinfo, &def_stmt,
&def, &dt))
if (!vect_is_simple_use (oprnd1, vinfo, &def_stmt, &dt))
return NULL;
if (dt != vect_internal_def)
@ -2102,7 +2096,7 @@ vect_recog_vector_vector_shift_pattern (vec<gimple *> *stmts,
if (*type_in == NULL_TREE)
return NULL;
def = NULL_TREE;
tree def = NULL_TREE;
if (gimple_assign_cast_p (def_stmt))
{
tree rhs1 = gimple_assign_rhs1 (def_stmt);
@ -2892,11 +2886,10 @@ check_bool_pattern (tree var, vec_info *vinfo)
{
gimple *def_stmt;
enum vect_def_type dt;
tree def, rhs1;
tree rhs1;
enum tree_code rhs_code;
if (!vect_is_simple_use (var, NULL, vinfo, &def_stmt, &def,
&dt))
if (!vect_is_simple_use (var, vinfo, &def_stmt, &dt))
return false;
if (dt != vect_internal_def)
@ -2905,7 +2898,7 @@ check_bool_pattern (tree var, vec_info *vinfo)
if (!is_gimple_assign (def_stmt))
return false;
if (!has_single_use (def))
if (!has_single_use (var))
return false;
rhs1 = gimple_assign_rhs1 (def_stmt);

View File

@ -234,7 +234,6 @@ vect_get_and_check_slp_defs (vec_info *vinfo,
{
tree oprnd;
unsigned int i, number_of_oprnds;
tree def;
gimple *def_stmt;
enum vect_def_type dt = vect_uninitialized_def;
struct loop *loop = NULL;
@ -287,8 +286,7 @@ again:
oprnd_info = (*oprnds_info)[i];
if (!vect_is_simple_use (oprnd, NULL, vinfo, &def_stmt,
&def, &dt))
if (!vect_is_simple_use (oprnd, vinfo, &def_stmt, &dt))
{
if (dump_enabled_p ())
{
@ -355,19 +353,15 @@ again:
switch (gimple_code (def_stmt))
{
case GIMPLE_PHI:
def = gimple_phi_result (def_stmt);
break;
case GIMPLE_PHI:
case GIMPLE_ASSIGN:
break;
case GIMPLE_ASSIGN:
def = gimple_assign_lhs (def_stmt);
break;
default:
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"unsupported defining stmt:\n");
return -1;
default:
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"unsupported defining stmt:\n");
return -1;
}
}
@ -432,7 +426,7 @@ again:
{
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"Build SLP failed: illegal type of def ");
dump_generic_expr (MSG_MISSED_OPTIMIZATION, TDF_SLIM, def);
dump_generic_expr (MSG_MISSED_OPTIMIZATION, TDF_SLIM, oprnd);
dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
}
@ -1555,12 +1549,12 @@ vect_analyze_slp_cost_1 (slp_instance instance, slp_tree node,
lhs = gimple_get_lhs (stmt);
for (i = 0; i < gimple_num_ops (stmt); ++i)
{
tree def, op = gimple_op (stmt, i);
tree op = gimple_op (stmt, i);
gimple *def_stmt;
enum vect_def_type dt;
if (!op || op == lhs)
continue;
if (vect_is_simple_use (op, NULL, stmt_info->vinfo, &def_stmt, &def, &dt))
if (vect_is_simple_use (op, stmt_info->vinfo, &def_stmt, &dt))
{
/* Without looking at the actual initializer a vector of
constants can be implemented as load from the constant pool.

View File

@ -450,7 +450,6 @@ process_use (gimple *stmt, tree use, loop_vec_info loop_vinfo, bool live_p,
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
stmt_vec_info dstmt_vinfo;
basic_block bb, def_bb;
tree def;
gimple *def_stmt;
enum vect_def_type dt;
@ -459,7 +458,7 @@ process_use (gimple *stmt, tree use, loop_vec_info loop_vinfo, bool live_p,
if (!force && !exist_non_indexing_operands_for_use_p (use, stmt))
return true;
if (!vect_is_simple_use (use, stmt, loop_vinfo, &def_stmt, &def, &dt))
if (!vect_is_simple_use (use, loop_vinfo, &def_stmt, &dt))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -1342,16 +1341,14 @@ vect_init_vector (gimple *stmt, tree val, tree type, gimple_stmt_iterator *gsi)
needs to be introduced. */
tree
vect_get_vec_def_for_operand (tree op, gimple *stmt, tree *scalar_def)
vect_get_vec_def_for_operand (tree op, gimple *stmt)
{
tree vec_oprnd;
gimple *vec_stmt;
gimple *def_stmt;
stmt_vec_info def_stmt_info = NULL;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
unsigned int nunits;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
tree def;
enum vect_def_type dt;
bool is_simple_use;
tree vector_type;
@ -1364,19 +1361,11 @@ vect_get_vec_def_for_operand (tree op, gimple *stmt, tree *scalar_def)
dump_printf (MSG_NOTE, "\n");
}
is_simple_use = vect_is_simple_use (op, stmt, loop_vinfo,
&def_stmt, &def, &dt);
is_simple_use = vect_is_simple_use (op, loop_vinfo, &def_stmt, &dt);
gcc_assert (is_simple_use);
if (dump_enabled_p ())
{
int loc_printed = 0;
if (def)
{
dump_printf_loc (MSG_NOTE, vect_location, "def = ");
loc_printed = 1;
dump_generic_expr (MSG_NOTE, TDF_SLIM, def);
dump_printf (MSG_NOTE, "\n");
}
if (def_stmt)
{
if (loc_printed)
@ -1389,46 +1378,18 @@ vect_get_vec_def_for_operand (tree op, gimple *stmt, tree *scalar_def)
switch (dt)
{
/* Case 1: operand is a constant. */
/* operand is a constant or a loop invariant. */
case vect_constant_def:
case vect_external_def:
{
vector_type = get_vectype_for_scalar_type (TREE_TYPE (op));
gcc_assert (vector_type);
nunits = TYPE_VECTOR_SUBPARTS (vector_type);
if (scalar_def)
*scalar_def = op;
/* Create 'vect_cst_ = {cst,cst,...,cst}' */
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
"Create vector_cst. nunits = %d\n", nunits);
return vect_init_vector (stmt, op, vector_type, NULL);
}
/* Case 2: operand is defined outside the loop - loop invariant. */
case vect_external_def:
{
vector_type = get_vectype_for_scalar_type (TREE_TYPE (def));
gcc_assert (vector_type);
if (scalar_def)
*scalar_def = def;
/* Create 'vec_inv = {inv,inv,..,inv}' */
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location, "Create vector_inv.\n");
return vect_init_vector (stmt, def, vector_type, NULL);
}
/* Case 3: operand is defined inside the loop. */
/* operand is defined inside the loop. */
case vect_internal_def:
{
if (scalar_def)
*scalar_def = NULL/* FIXME tuples: def_stmt*/;
/* Get the def from the vectorized stmt. */
def_stmt_info = vinfo_for_stmt (def_stmt);
@ -1449,22 +1410,14 @@ vect_get_vec_def_for_operand (tree op, gimple *stmt, tree *scalar_def)
return vec_oprnd;
}
/* Case 4: operand is defined by a loop header phi - reduction */
/* operand is defined by a loop header phi - reduction */
case vect_reduction_def:
case vect_double_reduction_def:
case vect_nested_cycle:
{
struct loop *loop;
/* Code should use get_initial_def_for_reduction. */
gcc_unreachable ();
gcc_assert (gimple_code (def_stmt) == GIMPLE_PHI);
loop = (gimple_bb (def_stmt))->loop_father;
/* Get the def before the loop */
op = PHI_ARG_DEF_FROM_EDGE (def_stmt, loop_preheader_edge (loop));
return get_initial_def_for_reduction (stmt, op, scalar_def);
}
/* Case 5: operand is defined by loop-header phi - induction. */
/* operand is defined by loop-header phi - induction. */
case vect_induction_def:
{
gcc_assert (gimple_code (def_stmt) == GIMPLE_PHI);
@ -1618,13 +1571,13 @@ vect_get_vec_defs (tree op0, tree op1, gimple *stmt,
tree vec_oprnd;
vec_oprnds0->create (1);
vec_oprnd = vect_get_vec_def_for_operand (op0, stmt, NULL);
vec_oprnd = vect_get_vec_def_for_operand (op0, stmt);
vec_oprnds0->quick_push (vec_oprnd);
if (op1)
{
vec_oprnds1->create (1);
vec_oprnd = vect_get_vec_def_for_operand (op1, stmt, NULL);
vec_oprnd = vect_get_vec_def_for_operand (op1, stmt);
vec_oprnds1->quick_push (vec_oprnd);
}
}
@ -1753,7 +1706,6 @@ vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi,
bool is_store;
tree mask;
gimple *def_stmt;
tree def;
enum vect_def_type dt;
if (slp_node != NULL)
@ -1797,13 +1749,11 @@ vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi,
if (STMT_VINFO_GATHER_SCATTER_P (stmt_info))
{
gimple *def_stmt;
tree def;
gather_decl = vect_check_gather_scatter (stmt, loop_vinfo, &gather_base,
&gather_off, &gather_scale);
gcc_assert (gather_decl);
if (!vect_is_simple_use_1 (gather_off, NULL, loop_vinfo,
&def_stmt, &def, &gather_dt,
&gather_off_vectype))
if (!vect_is_simple_use (gather_off, loop_vinfo, &def_stmt, &gather_dt,
&gather_off_vectype))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -1833,15 +1783,13 @@ vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi,
if (TREE_CODE (mask) != SSA_NAME)
return false;
if (!vect_is_simple_use (mask, stmt, loop_vinfo,
&def_stmt, &def, &dt))
if (!vect_is_simple_use (mask, loop_vinfo, &def_stmt, &dt))
return false;
if (is_store)
{
tree rhs = gimple_call_arg (stmt, 3);
if (!vect_is_simple_use (rhs, stmt, loop_vinfo,
&def_stmt, &def, &dt))
if (!vect_is_simple_use (rhs, loop_vinfo, &def_stmt, &dt))
return false;
}
@ -1931,7 +1879,7 @@ vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi,
perm_mask, stmt, gsi);
else if (j == 0)
op = vec_oprnd0
= vect_get_vec_def_for_operand (gather_off, stmt, NULL);
= vect_get_vec_def_for_operand (gather_off, stmt);
else
op = vec_oprnd0
= vect_get_vec_def_for_stmt_copy (gather_dt, vec_oprnd0);
@ -1955,11 +1903,10 @@ vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi,
else
{
if (j == 0)
vec_mask = vect_get_vec_def_for_operand (mask, stmt, NULL);
vec_mask = vect_get_vec_def_for_operand (mask, stmt);
else
{
vect_is_simple_use (vec_mask, NULL, loop_vinfo,
&def_stmt, &def, &dt);
vect_is_simple_use (vec_mask, loop_vinfo, &def_stmt, &dt);
vec_mask = vect_get_vec_def_for_stmt_copy (dt, vec_mask);
}
@ -2043,8 +1990,8 @@ vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi,
if (i == 0)
{
tree rhs = gimple_call_arg (stmt, 3);
vec_rhs = vect_get_vec_def_for_operand (rhs, stmt, NULL);
vec_mask = vect_get_vec_def_for_operand (mask, stmt, NULL);
vec_rhs = vect_get_vec_def_for_operand (rhs, stmt);
vec_mask = vect_get_vec_def_for_operand (mask, stmt);
/* We should have catched mismatched types earlier. */
gcc_assert (useless_type_conversion_p (vectype,
TREE_TYPE (vec_rhs)));
@ -2055,11 +2002,9 @@ vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi,
}
else
{
vect_is_simple_use (vec_rhs, NULL, loop_vinfo, &def_stmt,
&def, &dt);
vect_is_simple_use (vec_rhs, loop_vinfo, &def_stmt, &dt);
vec_rhs = vect_get_vec_def_for_stmt_copy (dt, vec_rhs);
vect_is_simple_use (vec_mask, NULL, loop_vinfo, &def_stmt,
&def, &dt);
vect_is_simple_use (vec_mask, loop_vinfo, &def_stmt, &dt);
vec_mask = vect_get_vec_def_for_stmt_copy (dt, vec_mask);
dataref_ptr = bump_vector_ptr (dataref_ptr, ptr_incr, gsi, stmt,
TYPE_SIZE_UNIT (vectype));
@ -2100,7 +2045,7 @@ vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi,
if (i == 0)
{
vec_mask = vect_get_vec_def_for_operand (mask, stmt, NULL);
vec_mask = vect_get_vec_def_for_operand (mask, stmt);
dataref_ptr = vect_create_data_ref_ptr (stmt, vectype, NULL,
NULL_TREE, &dummy, gsi,
&ptr_incr, false, &inv_p);
@ -2108,8 +2053,7 @@ vectorizable_mask_load_store (gimple *stmt, gimple_stmt_iterator *gsi,
}
else
{
vect_is_simple_use (vec_mask, NULL, loop_vinfo, &def_stmt,
&def, &dt);
vect_is_simple_use (vec_mask, loop_vinfo, &def_stmt, &dt);
vec_mask = vect_get_vec_def_for_stmt_copy (dt, vec_mask);
dataref_ptr = bump_vector_ptr (dataref_ptr, ptr_incr, gsi, stmt,
TYPE_SIZE_UNIT (vectype));
@ -2180,7 +2124,7 @@ vectorizable_call (gimple *gs, gimple_stmt_iterator *gsi, gimple **vec_stmt,
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
vec_info *vinfo = stmt_info->vinfo;
tree fndecl, new_temp, def, rhs_type;
tree fndecl, new_temp, rhs_type;
gimple *def_stmt;
enum vect_def_type dt[3]
= {vect_unknown_def_type, vect_unknown_def_type, vect_unknown_def_type};
@ -2253,8 +2197,7 @@ vectorizable_call (gimple *gs, gimple_stmt_iterator *gsi, gimple **vec_stmt,
if (!rhs_type)
rhs_type = TREE_TYPE (op);
if (!vect_is_simple_use_1 (op, stmt, vinfo,
&def_stmt, &def, &dt[i], &opvectype))
if (!vect_is_simple_use (op, vinfo, &def_stmt, &dt[i], &opvectype))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -2416,7 +2359,7 @@ vectorizable_call (gimple *gs, gimple_stmt_iterator *gsi, gimple **vec_stmt,
op = gimple_call_arg (stmt, i);
if (j == 0)
vec_oprnd0
= vect_get_vec_def_for_operand (op, stmt, NULL);
= vect_get_vec_def_for_operand (op, stmt);
else
{
vec_oprnd0 = gimple_call_arg (new_stmt, i);
@ -2514,7 +2457,7 @@ vectorizable_call (gimple *gs, gimple_stmt_iterator *gsi, gimple **vec_stmt,
if (j == 0)
{
vec_oprnd0
= vect_get_vec_def_for_operand (op, stmt, NULL);
= vect_get_vec_def_for_operand (op, stmt);
vec_oprnd1
= vect_get_vec_def_for_stmt_copy (dt[i], vec_oprnd0);
}
@ -2705,7 +2648,7 @@ vectorizable_simd_clone_call (gimple *stmt, gimple_stmt_iterator *gsi,
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
vec_info *vinfo = stmt_info->vinfo;
struct loop *loop = loop_vinfo ? LOOP_VINFO_LOOP (loop_vinfo) : NULL;
tree fndecl, new_temp, def;
tree fndecl, new_temp;
gimple *def_stmt;
gimple *new_stmt = NULL;
int ncopies, j;
@ -2768,9 +2711,8 @@ vectorizable_simd_clone_call (gimple *stmt, gimple_stmt_iterator *gsi,
thisarginfo.simd_lane_linear = false;
op = gimple_call_arg (stmt, i);
if (!vect_is_simple_use_1 (op, stmt, vinfo,
&def_stmt, &def, &thisarginfo.dt,
&thisarginfo.vectype)
if (!vect_is_simple_use (op, vinfo, &def_stmt, &thisarginfo.dt,
&thisarginfo.vectype)
|| thisarginfo.dt == vect_uninitialized_def)
{
if (dump_enabled_p ())
@ -3047,7 +2989,7 @@ vectorizable_simd_clone_call (gimple *stmt, gimple_stmt_iterator *gsi,
gcc_assert ((k & (k - 1)) == 0);
if (m == 0)
vec_oprnd0
= vect_get_vec_def_for_operand (op, stmt, NULL);
= vect_get_vec_def_for_operand (op, stmt);
else
{
vec_oprnd0 = arginfo[i].op;
@ -3081,7 +3023,7 @@ vectorizable_simd_clone_call (gimple *stmt, gimple_stmt_iterator *gsi,
{
if (m == 0 && l == 0)
vec_oprnd0
= vect_get_vec_def_for_operand (op, stmt, NULL);
= vect_get_vec_def_for_operand (op, stmt);
else
vec_oprnd0
= vect_get_vec_def_for_stmt_copy (arginfo[i].dt,
@ -3395,7 +3337,7 @@ vect_get_loop_based_defs (tree *oprnd, gimple *stmt, enum vect_def_type dt,
/* All the vector operands except the very first one (that is scalar oprnd)
are stmt copies. */
if (TREE_CODE (TREE_TYPE (*oprnd)) != VECTOR_TYPE)
vec_oprnd = vect_get_vec_def_for_operand (*oprnd, stmt, NULL);
vec_oprnd = vect_get_vec_def_for_operand (*oprnd, stmt);
else
vec_oprnd = vect_get_vec_def_for_stmt_copy (dt, *oprnd);
@ -3555,7 +3497,6 @@ vectorizable_conversion (gimple *stmt, gimple_stmt_iterator *gsi,
enum tree_code codecvt1 = ERROR_MARK, codecvt2 = ERROR_MARK;
tree decl1 = NULL_TREE, decl2 = NULL_TREE;
tree new_temp;
tree def;
gimple *def_stmt;
enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
gimple *new_stmt = NULL;
@ -3633,8 +3574,7 @@ vectorizable_conversion (gimple *stmt, gimple_stmt_iterator *gsi,
}
/* Check the operands of the operation. */
if (!vect_is_simple_use_1 (op0, stmt, vinfo,
&def_stmt, &def, &dt[0], &vectype_in))
if (!vect_is_simple_use (op0, vinfo, &def_stmt, &dt[0], &vectype_in))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -3650,11 +3590,9 @@ vectorizable_conversion (gimple *stmt, gimple_stmt_iterator *gsi,
/* For WIDEN_MULT_EXPR, if OP0 is a constant, use the type of
OP1. */
if (CONSTANT_CLASS_P (op0))
ok = vect_is_simple_use_1 (op1, stmt, vinfo,
&def_stmt, &def, &dt[1], &vectype_in);
ok = vect_is_simple_use (op1, vinfo, &def_stmt, &dt[1], &vectype_in);
else
ok = vect_is_simple_use (op1, stmt, vinfo, &def_stmt,
&def, &dt[1]);
ok = vect_is_simple_use (op1, vinfo, &def_stmt, &dt[1]);
if (!ok)
{
@ -3971,15 +3909,14 @@ vectorizable_conversion (gimple *stmt, gimple_stmt_iterator *gsi,
}
else
{
vec_oprnd0 = vect_get_vec_def_for_operand (op0, stmt, NULL);
vec_oprnd0 = vect_get_vec_def_for_operand (op0, stmt);
vec_oprnds0.quick_push (vec_oprnd0);
if (op_type == binary_op)
{
if (code == WIDEN_LSHIFT_EXPR)
vec_oprnd1 = op1;
else
vec_oprnd1 = vect_get_vec_def_for_operand (op1, stmt,
NULL);
vec_oprnd1 = vect_get_vec_def_for_operand (op1, stmt);
vec_oprnds1.quick_push (vec_oprnd1);
}
}
@ -4133,7 +4070,6 @@ vectorizable_assignment (gimple *stmt, gimple_stmt_iterator *gsi,
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
tree new_temp;
tree def;
gimple *def_stmt;
enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
int ncopies;
@ -4185,8 +4121,7 @@ vectorizable_assignment (gimple *stmt, gimple_stmt_iterator *gsi,
gcc_assert (ncopies >= 1);
if (!vect_is_simple_use_1 (op, stmt, vinfo,
&def_stmt, &def, &dt[0], &vectype_in))
if (!vect_is_simple_use (op, vinfo, &def_stmt, &dt[0], &vectype_in))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -4340,7 +4275,6 @@ vectorizable_shift (gimple *stmt, gimple_stmt_iterator *gsi,
optab optab;
int icode;
machine_mode optab_op2_mode;
tree def;
gimple *def_stmt;
enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
gimple *new_stmt = NULL;
@ -4391,8 +4325,7 @@ vectorizable_shift (gimple *stmt, gimple_stmt_iterator *gsi,
}
op0 = gimple_assign_rhs1 (stmt);
if (!vect_is_simple_use_1 (op0, stmt, vinfo,
&def_stmt, &def, &dt[0], &vectype))
if (!vect_is_simple_use (op0, vinfo, &def_stmt, &dt[0], &vectype))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -4419,8 +4352,7 @@ vectorizable_shift (gimple *stmt, gimple_stmt_iterator *gsi,
return false;
op1 = gimple_assign_rhs2 (stmt);
if (!vect_is_simple_use_1 (op1, stmt, vinfo, &def_stmt,
&def, &dt[1], &op1_vectype))
if (!vect_is_simple_use (op1, vinfo, &def_stmt, &dt[1], &op1_vectype))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -4705,7 +4637,6 @@ vectorizable_operation (gimple *stmt, gimple_stmt_iterator *gsi,
int op_type;
optab optab;
bool target_support_p;
tree def;
gimple *def_stmt;
enum vect_def_type dt[3]
= {vect_unknown_def_type, vect_unknown_def_type, vect_unknown_def_type};
@ -4774,8 +4705,7 @@ vectorizable_operation (gimple *stmt, gimple_stmt_iterator *gsi,
}
op0 = gimple_assign_rhs1 (stmt);
if (!vect_is_simple_use_1 (op0, stmt, vinfo,
&def_stmt, &def, &dt[0], &vectype))
if (!vect_is_simple_use (op0, vinfo, &def_stmt, &dt[0], &vectype))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -4810,8 +4740,7 @@ vectorizable_operation (gimple *stmt, gimple_stmt_iterator *gsi,
if (op_type == binary_op || op_type == ternary_op)
{
op1 = gimple_assign_rhs2 (stmt);
if (!vect_is_simple_use (op1, stmt, vinfo, &def_stmt,
&def, &dt[1]))
if (!vect_is_simple_use (op1, vinfo, &def_stmt, &dt[1]))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -4822,8 +4751,7 @@ vectorizable_operation (gimple *stmt, gimple_stmt_iterator *gsi,
if (op_type == ternary_op)
{
op2 = gimple_assign_rhs3 (stmt);
if (!vect_is_simple_use (op2, stmt, vinfo, &def_stmt,
&def, &dt[2]))
if (!vect_is_simple_use (op2, vinfo, &def_stmt, &dt[2]))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -4984,8 +4912,7 @@ vectorizable_operation (gimple *stmt, gimple_stmt_iterator *gsi,
{
vec_oprnds2.create (1);
vec_oprnds2.quick_push (vect_get_vec_def_for_operand (op2,
stmt,
NULL));
stmt));
}
}
else
@ -5102,7 +5029,6 @@ vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
machine_mode vec_mode;
tree dummy;
enum dr_alignment_support alignment_support_scheme;
tree def;
gimple *def_stmt;
enum vect_def_type dt;
stmt_vec_info prev_stmt_info = NULL;
@ -5186,8 +5112,7 @@ vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
}
op = gimple_assign_rhs1 (stmt);
if (!vect_is_simple_use (op, stmt, vinfo, &def_stmt,
&def, &dt))
if (!vect_is_simple_use (op, vinfo, &def_stmt, &dt))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -5277,8 +5202,7 @@ vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
{
gcc_assert (gimple_assign_single_p (next_stmt));
op = gimple_assign_rhs1 (next_stmt);
if (!vect_is_simple_use (op, next_stmt, vinfo,
&def_stmt, &def, &dt))
if (!vect_is_simple_use (op, vinfo, &def_stmt, &dt))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -5293,13 +5217,11 @@ vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
if (STMT_VINFO_GATHER_SCATTER_P (stmt_info))
{
gimple *def_stmt;
tree def;
scatter_decl = vect_check_gather_scatter (stmt, loop_vinfo, &scatter_base,
&scatter_off, &scatter_scale);
gcc_assert (scatter_decl);
if (!vect_is_simple_use_1 (scatter_off, NULL, vinfo,
&def_stmt, &def, &scatter_idx_dt,
&scatter_off_vectype))
if (!vect_is_simple_use (scatter_off, vinfo, &def_stmt, &scatter_idx_dt,
&scatter_off_vectype))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -5393,9 +5315,9 @@ vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
if (j == 0)
{
src = vec_oprnd1
= vect_get_vec_def_for_operand (gimple_assign_rhs1 (stmt), stmt, NULL);
= vect_get_vec_def_for_operand (gimple_assign_rhs1 (stmt), stmt);
op = vec_oprnd0
= vect_get_vec_def_for_operand (scatter_off, stmt, NULL);
= vect_get_vec_def_for_operand (scatter_off, stmt);
}
else if (modifier != NONE && (j & 1))
{
@ -5613,8 +5535,7 @@ vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
{
gcc_assert (gimple_assign_single_p (next_stmt));
op = gimple_assign_rhs1 (next_stmt);
vec_oprnd = vect_get_vec_def_for_operand (op, next_stmt,
NULL);
vec_oprnd = vect_get_vec_def_for_operand (op, next_stmt);
}
}
else
@ -5623,8 +5544,7 @@ vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
vec_oprnd = vec_oprnds[j];
else
{
vect_is_simple_use (vec_oprnd, NULL, vinfo,
&def_stmt, &def, &dt);
vect_is_simple_use (vec_oprnd, vinfo, &def_stmt, &dt);
vec_oprnd = vect_get_vec_def_for_stmt_copy (dt, vec_oprnd);
}
}
@ -5767,8 +5687,7 @@ vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
&& gimple_assign_single_p (next_stmt));
op = gimple_assign_rhs1 (next_stmt);
vec_oprnd = vect_get_vec_def_for_operand (op, next_stmt,
NULL);
vec_oprnd = vect_get_vec_def_for_operand (op, next_stmt);
dr_chain.quick_push (vec_oprnd);
oprnds.quick_push (vec_oprnd);
next_stmt = GROUP_NEXT_ELEMENT (vinfo_for_stmt (next_stmt));
@ -5813,8 +5732,7 @@ vectorizable_store (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
for (i = 0; i < group_size; i++)
{
op = oprnds[i];
vect_is_simple_use (op, NULL, vinfo, &def_stmt,
&def, &dt);
vect_is_simple_use (op, vinfo, &def_stmt, &dt);
vec_oprnd = vect_get_vec_def_for_stmt_copy (dt, op);
dr_chain[i] = vec_oprnd;
oprnds[i] = vec_oprnd;
@ -6288,13 +6206,11 @@ vectorizable_load (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
if (STMT_VINFO_GATHER_SCATTER_P (stmt_info))
{
gimple *def_stmt;
tree def;
gather_decl = vect_check_gather_scatter (stmt, loop_vinfo, &gather_base,
&gather_off, &gather_scale);
gcc_assert (gather_decl);
if (!vect_is_simple_use_1 (gather_off, NULL, vinfo,
&def_stmt, &def, &gather_dt,
&gather_off_vectype))
if (!vect_is_simple_use (gather_off, vinfo, &def_stmt, &gather_dt,
&gather_off_vectype))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -6483,7 +6399,7 @@ vectorizable_load (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
perm_mask, stmt, gsi);
else if (j == 0)
op = vec_oprnd0
= vect_get_vec_def_for_operand (gather_off, stmt, NULL);
= vect_get_vec_def_for_operand (gather_off, stmt);
else
op = vec_oprnd0
= vect_get_vec_def_for_stmt_copy (gather_dt, vec_oprnd0);
@ -7224,11 +7140,9 @@ vectorizable_load (gimple *stmt, gimple_stmt_iterator *gsi, gimple **vec_stmt,
condition operands are supportable using vec_is_simple_use. */
static bool
vect_is_simple_cond (tree cond, gimple *stmt, vec_info *vinfo,
tree *comp_vectype)
vect_is_simple_cond (tree cond, vec_info *vinfo, tree *comp_vectype)
{
tree lhs, rhs;
tree def;
enum vect_def_type dt;
tree vectype1 = NULL_TREE, vectype2 = NULL_TREE;
@ -7241,8 +7155,7 @@ vect_is_simple_cond (tree cond, gimple *stmt, vec_info *vinfo,
if (TREE_CODE (lhs) == SSA_NAME)
{
gimple *lhs_def_stmt = SSA_NAME_DEF_STMT (lhs);
if (!vect_is_simple_use_1 (lhs, stmt, vinfo,
&lhs_def_stmt, &def, &dt, &vectype1))
if (!vect_is_simple_use (lhs, vinfo, &lhs_def_stmt, &dt, &vectype1))
return false;
}
else if (TREE_CODE (lhs) != INTEGER_CST && TREE_CODE (lhs) != REAL_CST
@ -7252,8 +7165,7 @@ vect_is_simple_cond (tree cond, gimple *stmt, vec_info *vinfo,
if (TREE_CODE (rhs) == SSA_NAME)
{
gimple *rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
if (!vect_is_simple_use_1 (rhs, stmt, vinfo,
&rhs_def_stmt, &def, &dt, &vectype2))
if (!vect_is_simple_use (rhs, vinfo, &rhs_def_stmt, &dt, &vectype2))
return false;
}
else if (TREE_CODE (rhs) != INTEGER_CST && TREE_CODE (rhs) != REAL_CST
@ -7292,7 +7204,6 @@ vectorizable_condition (gimple *stmt, gimple_stmt_iterator *gsi,
tree vec_compare, vec_cond_expr;
tree new_temp;
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
tree def;
enum vect_def_type dt, dts[4];
int ncopies;
enum tree_code code;
@ -7350,32 +7261,14 @@ vectorizable_condition (gimple *stmt, gimple_stmt_iterator *gsi,
then_clause = gimple_assign_rhs2 (stmt);
else_clause = gimple_assign_rhs3 (stmt);
if (!vect_is_simple_cond (cond_expr, stmt, stmt_info->vinfo, &comp_vectype)
if (!vect_is_simple_cond (cond_expr, stmt_info->vinfo, &comp_vectype)
|| !comp_vectype)
return false;
if (TREE_CODE (then_clause) == SSA_NAME)
{
gimple *then_def_stmt = SSA_NAME_DEF_STMT (then_clause);
if (!vect_is_simple_use (then_clause, stmt, stmt_info->vinfo,
&then_def_stmt, &def, &dt))
return false;
}
else if (TREE_CODE (then_clause) != INTEGER_CST
&& TREE_CODE (then_clause) != REAL_CST
&& TREE_CODE (then_clause) != FIXED_CST)
gimple *def_stmt;
if (!vect_is_simple_use (then_clause, stmt_info->vinfo, &def_stmt, &dt))
return false;
if (TREE_CODE (else_clause) == SSA_NAME)
{
gimple *else_def_stmt = SSA_NAME_DEF_STMT (else_clause);
if (!vect_is_simple_use (else_clause, stmt, stmt_info->vinfo,
&else_def_stmt, &def, &dt))
return false;
}
else if (TREE_CODE (else_clause) != INTEGER_CST
&& TREE_CODE (else_clause) != REAL_CST
&& TREE_CODE (else_clause) != FIXED_CST)
if (!vect_is_simple_use (else_clause, stmt_info->vinfo, &def_stmt, &dt))
return false;
unsigned int prec = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (vectype)));
@ -7433,33 +7326,31 @@ vectorizable_condition (gimple *stmt, gimple_stmt_iterator *gsi,
{
gimple *gtemp;
vec_cond_lhs =
vect_get_vec_def_for_operand (TREE_OPERAND (cond_expr, 0),
stmt, NULL);
vect_is_simple_use (TREE_OPERAND (cond_expr, 0), stmt,
loop_vinfo, &gtemp, &def, &dts[0]);
vect_get_vec_def_for_operand (TREE_OPERAND (cond_expr, 0), stmt);
vect_is_simple_use (TREE_OPERAND (cond_expr, 0),
loop_vinfo, &gtemp, &dts[0]);
vec_cond_rhs =
vect_get_vec_def_for_operand (TREE_OPERAND (cond_expr, 1),
stmt, NULL);
vect_is_simple_use (TREE_OPERAND (cond_expr, 1), stmt,
loop_vinfo, &gtemp, &def, &dts[1]);
stmt);
vect_is_simple_use (TREE_OPERAND (cond_expr, 1),
loop_vinfo, &gtemp, &dts[1]);
if (reduc_index == 1)
vec_then_clause = reduc_def;
else
{
vec_then_clause = vect_get_vec_def_for_operand (then_clause,
stmt, NULL);
vect_is_simple_use (then_clause, stmt, loop_vinfo,
&gtemp, &def, &dts[2]);
stmt);
vect_is_simple_use (then_clause, loop_vinfo,
&gtemp, &dts[2]);
}
if (reduc_index == 2)
vec_else_clause = reduc_def;
else
{
vec_else_clause = vect_get_vec_def_for_operand (else_clause,
stmt, NULL);
vect_is_simple_use (else_clause, stmt, loop_vinfo,
&gtemp, &def, &dts[3]);
stmt);
vect_is_simple_use (else_clause, loop_vinfo, &gtemp, &dts[3]);
}
}
}
@ -8200,10 +8091,11 @@ get_same_sized_vectype (tree scalar_type, tree vector_type)
/* Function vect_is_simple_use.
Input:
LOOP_VINFO - the vect info of the loop that is being vectorized.
BB_VINFO - the vect info of the basic block that is being vectorized.
OPERAND - operand of STMT in the loop or bb.
DEF - the defining stmt in case OPERAND is an SSA_NAME.
VINFO - the vect info of the loop or basic block that is being vectorized.
OPERAND - operand in the loop or bb.
Output:
DEF_STMT - the defining stmt in case OPERAND is an SSA_NAME.
DT - the type of definition
Returns whether a stmt with OPERAND can be vectorized.
For loops, supportable operands are constants, loop invariants, and operands
@ -8214,11 +8106,10 @@ get_same_sized_vectype (tree scalar_type, tree vector_type)
For now, operands defined outside the basic block are not supported. */
bool
vect_is_simple_use (tree operand, gimple *stmt, vec_info *vinfo,
gimple **def_stmt, tree *def, enum vect_def_type *dt)
vect_is_simple_use (tree operand, vec_info *vinfo,
gimple **def_stmt, enum vect_def_type *dt)
{
*def_stmt = NULL;
*def = NULL_TREE;
*dt = vect_unknown_def_type;
if (dump_enabled_p ())
@ -8237,7 +8128,6 @@ vect_is_simple_use (tree operand, gimple *stmt, vec_info *vinfo,
if (is_gimple_min_invariant (operand))
{
*def = operand;
*dt = vect_external_def;
return true;
}
@ -8252,7 +8142,6 @@ vect_is_simple_use (tree operand, gimple *stmt, vec_info *vinfo,
if (SSA_NAME_IS_DEFAULT_DEF (operand))
{
*def = operand;
*dt = vect_external_def;
return true;
}
@ -8315,10 +8204,7 @@ vect_is_simple_use (tree operand, gimple *stmt, vec_info *vinfo,
}
}
if (*dt == vect_unknown_def_type
|| (stmt
&& *dt == vect_double_reduction_def
&& gimple_code (stmt) != GIMPLE_PHI))
if (*dt == vect_unknown_def_type)
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -8329,18 +8215,9 @@ vect_is_simple_use (tree operand, gimple *stmt, vec_info *vinfo,
switch (gimple_code (*def_stmt))
{
case GIMPLE_PHI:
*def = gimple_phi_result (*def_stmt);
break;
case GIMPLE_ASSIGN:
*def = gimple_assign_lhs (*def_stmt);
break;
case GIMPLE_CALL:
*def = gimple_call_lhs (*def_stmt);
if (*def != NULL)
break;
/* FALLTHRU */
break;
default:
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@ -8351,9 +8228,9 @@ vect_is_simple_use (tree operand, gimple *stmt, vec_info *vinfo,
return true;
}
/* Function vect_is_simple_use_1.
/* Function vect_is_simple_use.
Same as vect_is_simple_use_1 but also determines the vector operand
Same as vect_is_simple_use but also determines the vector operand
type of OPERAND and stores it to *VECTYPE. If the definition of
OPERAND is vect_uninitialized_def, vect_constant_def or
vect_external_def *VECTYPE will be set to NULL_TREE and the caller
@ -8361,11 +8238,10 @@ vect_is_simple_use (tree operand, gimple *stmt, vec_info *vinfo,
scalar operand. */
bool
vect_is_simple_use_1 (tree operand, gimple *stmt, vec_info *vinfo,
gimple **def_stmt,
tree *def, enum vect_def_type *dt, tree *vectype)
vect_is_simple_use (tree operand, vec_info *vinfo,
gimple **def_stmt, enum vect_def_type *dt, tree *vectype)
{
if (!vect_is_simple_use (operand, stmt, vinfo, def_stmt, def, dt))
if (!vect_is_simple_use (operand, vinfo, def_stmt, dt))
return false;
/* Now get a vector type if the def is internal, otherwise supply
@ -8506,6 +8382,16 @@ supportable_widening_operation (enum tree_code code, gimple *stmt,
c2 = VEC_WIDEN_MULT_HI_EXPR;
break;
case DOT_PROD_EXPR:
c1 = DOT_PROD_EXPR;
c2 = DOT_PROD_EXPR;
break;
case SAD_EXPR:
c1 = SAD_EXPR;
c2 = SAD_EXPR;
break;
case VEC_WIDEN_MULT_EVEN_EXPR:
/* Support the recursion induced just above. */
c1 = VEC_WIDEN_MULT_EVEN_EXPR;

View File

@ -932,10 +932,10 @@ extern bool vect_can_advance_ivs_p (loop_vec_info);
extern unsigned int current_vector_size;
extern tree get_vectype_for_scalar_type (tree);
extern tree get_same_sized_vectype (tree, tree);
extern bool vect_is_simple_use (tree, gimple *, vec_info *, gimple **,
tree *, enum vect_def_type *);
extern bool vect_is_simple_use_1 (tree, gimple *, vec_info *, gimple **,
tree *, enum vect_def_type *, tree *);
extern bool vect_is_simple_use (tree, vec_info *, gimple **,
enum vect_def_type *);
extern bool vect_is_simple_use (tree, vec_info *, gimple **,
enum vect_def_type *, tree *);
extern bool supportable_widening_operation (enum tree_code, gimple *, tree,
tree, enum tree_code *,
enum tree_code *, int *,
@ -962,7 +962,7 @@ extern unsigned record_stmt_cost (stmt_vector_for_cost *, int,
extern void vect_finish_stmt_generation (gimple *, gimple *,
gimple_stmt_iterator *);
extern bool vect_mark_stmts_to_be_vectorized (loop_vec_info);
extern tree vect_get_vec_def_for_operand (tree, gimple *, tree *);
extern tree vect_get_vec_def_for_operand (tree, gimple *);
extern tree vect_init_vector (gimple *, tree, tree,
gimple_stmt_iterator *);
extern tree vect_get_vec_def_for_stmt_copy (enum vect_def_type, tree);