Remove parameter keep_aligning from get_inner_reference.
2013-11-26 Bernd Edlinger <bernd.edlinger@hotmail.de> Remove parameter keep_aligning from get_inner_reference. * tree.h (get_inner_reference): Adjust header. * expr.c (get_inner_reference): Remove parameter keep_aligning. (get_bit_range, expand_assignment, expand_expr_addr_expr_1, expand_expr_real_1): Adjust. * asan.c (instrument_derefs): Adjust. * builtins.c (get_object_alignment_2): Adjust. Remove handling of VIEW_CONVERT_EXPR. * cfgexpand.c (expand_debug_expr): Adjust. * dbxout.c (dbxout_expand_expr): Adjust. * dwarf2out.c (loc_list_for_address_of_addr_expr_of_indirect_ref, loc_list_from_tree, fortran_common): Adjust. * fold-const.c (optimize_bit_field_compare, decode_field_reference, fold_unary_loc, fold_comparison, split_address_to_core_and_offset): Adjust. * gimple-ssa-strength-reduction.c (slsr_process_ref): Adjust. * simplifx-rtx.c (delegitimize_mem_from_attrs): Adjust. * tree-affine.c (tree_to_aff_combination, get_inner_reference_aff): Adjust. * tree-data-ref.c (split_constant_offset_1, dr_analyze_innermost): Adjust. * tree-vect-data-refs.c (vect_check_gather, vect_analyze_data_refs): Adjust. * tree-scalar-evolution.c (interpret_rhs_expr): Adjust. * tree-ssa-loop-ivopts.c (may_be_unaligned_p, split_address_cost): Adjust. * tsan.c (instrument_expr): Adjust. * ada/gcc-interface/decl.c (elaborate_expression_1): Adjust. * ada/gcc-interface/trans.c (Attribute_to_gnu): Adjust. * ada/gcc-interface/utils2.c (build_unary_op): Adjust. * config/mips/mips.c (r10k_safe_mem_expr_p): Adjust. From-SVN: r205398
This commit is contained in:
parent
97c34bdbe2
commit
08af82b017
@ -1,3 +1,37 @@
|
||||
2013-11-26 Bernd Edlinger <bernd.edlinger@hotmail.de>
|
||||
|
||||
Remove parameter keep_aligning from get_inner_reference.
|
||||
* tree.h (get_inner_reference): Adjust header.
|
||||
* expr.c (get_inner_reference): Remove parameter keep_aligning.
|
||||
(get_bit_range, expand_assignment,
|
||||
expand_expr_addr_expr_1, expand_expr_real_1): Adjust.
|
||||
* asan.c (instrument_derefs): Adjust.
|
||||
* builtins.c (get_object_alignment_2): Adjust. Remove handling of
|
||||
VIEW_CONVERT_EXPR.
|
||||
* cfgexpand.c (expand_debug_expr): Adjust.
|
||||
* dbxout.c (dbxout_expand_expr): Adjust.
|
||||
* dwarf2out.c (loc_list_for_address_of_addr_expr_of_indirect_ref,
|
||||
loc_list_from_tree, fortran_common): Adjust.
|
||||
* fold-const.c (optimize_bit_field_compare,
|
||||
decode_field_reference, fold_unary_loc, fold_comparison,
|
||||
split_address_to_core_and_offset): Adjust.
|
||||
* gimple-ssa-strength-reduction.c (slsr_process_ref): Adjust.
|
||||
* simplifx-rtx.c (delegitimize_mem_from_attrs): Adjust.
|
||||
* tree-affine.c (tree_to_aff_combination,
|
||||
get_inner_reference_aff): Adjust.
|
||||
* tree-data-ref.c (split_constant_offset_1,
|
||||
dr_analyze_innermost): Adjust.
|
||||
* tree-vect-data-refs.c (vect_check_gather,
|
||||
vect_analyze_data_refs): Adjust.
|
||||
* tree-scalar-evolution.c (interpret_rhs_expr): Adjust.
|
||||
* tree-ssa-loop-ivopts.c (may_be_unaligned_p,
|
||||
split_address_cost): Adjust.
|
||||
* tsan.c (instrument_expr): Adjust.
|
||||
* ada/gcc-interface/decl.c (elaborate_expression_1): Adjust.
|
||||
* ada/gcc-interface/trans.c (Attribute_to_gnu): Adjust.
|
||||
* ada/gcc-interface/utils2.c (build_unary_op): Adjust.
|
||||
* config/mips/mips.c (r10k_safe_mem_expr_p): Adjust.
|
||||
|
||||
2013-11-26 Yufeng Zhang <yufeng.zhang@arm.com>
|
||||
|
||||
* config/arm/arm.c (arm_legitimize_address): Check xop1 is not
|
||||
|
@ -6269,7 +6269,7 @@ elaborate_expression_1 (tree gnu_expr, Entity_Id gnat_entity, tree gnu_name,
|
||||
int unsignedp, volatilep;
|
||||
|
||||
inner = get_inner_reference (inner, &bitsize, &bitpos, &offset,
|
||||
&mode, &unsignedp, &volatilep, false);
|
||||
&mode, &unsignedp, &volatilep);
|
||||
/* If the offset is variable, err on the side of caution. */
|
||||
if (offset)
|
||||
inner = NULL_TREE;
|
||||
|
@ -2060,7 +2060,7 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute)
|
||||
&& TREE_CODE (gnu_prefix) == FIELD_DECL));
|
||||
|
||||
get_inner_reference (gnu_prefix, &bitsize, &bitpos, &gnu_offset,
|
||||
&mode, &unsignedp, &volatilep, false);
|
||||
&mode, &unsignedp, &volatilep);
|
||||
|
||||
if (TREE_CODE (gnu_prefix) == COMPONENT_REF)
|
||||
{
|
||||
|
@ -1312,8 +1312,7 @@ build_unary_op (enum tree_code op_code, tree result_type, tree operand)
|
||||
int unsignedp, volatilep;
|
||||
|
||||
inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
|
||||
&mode, &unsignedp, &volatilep,
|
||||
false);
|
||||
&mode, &unsignedp, &volatilep);
|
||||
|
||||
/* If INNER is a padding type whose field has a self-referential
|
||||
size, convert to that inner type. We know the offset is zero
|
||||
|
@ -1488,7 +1488,7 @@ instrument_derefs (gimple_stmt_iterator *iter, tree t,
|
||||
enum machine_mode mode;
|
||||
int volatilep = 0, unsignedp = 0;
|
||||
tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
|
||||
&mode, &unsignedp, &volatilep, false);
|
||||
&mode, &unsignedp, &volatilep);
|
||||
if (bitpos % (size_in_bytes * BITS_PER_UNIT)
|
||||
|| bitsize != size_in_bytes * BITS_PER_UNIT)
|
||||
{
|
||||
|
@ -329,7 +329,7 @@ get_object_alignment_2 (tree exp, unsigned int *alignp,
|
||||
/* Get the innermost object and the constant (bitpos) and possibly
|
||||
variable (offset) offset of the access. */
|
||||
exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
|
||||
&mode, &unsignedp, &volatilep, true);
|
||||
&mode, &unsignedp, &volatilep);
|
||||
|
||||
/* Extract alignment information from the innermost object and
|
||||
possibly adjust bitpos and offset. */
|
||||
@ -360,10 +360,6 @@ get_object_alignment_2 (tree exp, unsigned int *alignp,
|
||||
align = DECL_ALIGN (exp);
|
||||
known_alignment = true;
|
||||
}
|
||||
else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
|
||||
{
|
||||
align = TYPE_ALIGN (TREE_TYPE (exp));
|
||||
}
|
||||
else if (TREE_CODE (exp) == INDIRECT_REF
|
||||
|| TREE_CODE (exp) == MEM_REF
|
||||
|| TREE_CODE (exp) == TARGET_MEM_REF)
|
||||
|
@ -3941,7 +3941,7 @@ expand_debug_expr (tree exp)
|
||||
tree offset;
|
||||
int volatilep = 0;
|
||||
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
|
||||
&mode1, &unsignedp, &volatilep, false);
|
||||
&mode1, &unsignedp, &volatilep);
|
||||
rtx orig_op0;
|
||||
|
||||
if (bitsize == 0)
|
||||
|
@ -14948,7 +14948,7 @@ r10k_safe_mem_expr_p (tree expr, unsigned HOST_WIDE_INT offset)
|
||||
int unsigned_p, volatile_p;
|
||||
|
||||
inner = get_inner_reference (expr, &bitsize, &bitoffset, &var_offset, &mode,
|
||||
&unsigned_p, &volatile_p, false);
|
||||
&unsigned_p, &volatile_p);
|
||||
if (!DECL_P (inner) || !DECL_SIZE_UNIT (inner) || var_offset)
|
||||
return false;
|
||||
|
||||
|
@ -2515,7 +2515,7 @@ dbxout_expand_expr (tree expr)
|
||||
rtx x;
|
||||
|
||||
tem = get_inner_reference (expr, &bitsize, &bitpos, &offset,
|
||||
&mode, &unsignedp, &volatilep, true);
|
||||
&mode, &unsignedp, &volatilep);
|
||||
|
||||
x = dbxout_expand_expr (tem);
|
||||
if (x == NULL || !MEM_P (x))
|
||||
|
@ -13934,7 +13934,7 @@ loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev)
|
||||
|
||||
obj = get_inner_reference (TREE_OPERAND (loc, 0),
|
||||
&bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &volatilep, false);
|
||||
&unsignedp, &volatilep);
|
||||
STRIP_NOPS (obj);
|
||||
if (bitpos % BITS_PER_UNIT)
|
||||
{
|
||||
@ -14211,7 +14211,7 @@ loc_list_from_tree (tree loc, int want_address)
|
||||
int unsignedp, volatilep = 0;
|
||||
|
||||
obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &volatilep, false);
|
||||
&unsignedp, &volatilep);
|
||||
|
||||
gcc_assert (obj != loc);
|
||||
|
||||
@ -15521,7 +15521,7 @@ fortran_common (tree decl, HOST_WIDE_INT *value)
|
||||
return NULL_TREE;
|
||||
|
||||
cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset,
|
||||
&mode, &unsignedp, &volatilep, true);
|
||||
&mode, &unsignedp, &volatilep);
|
||||
|
||||
if (cvar == NULL_TREE
|
||||
|| TREE_CODE (cvar) != VAR_DECL
|
||||
|
37
gcc/expr.c
37
gcc/expr.c
@ -4648,7 +4648,7 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart,
|
||||
int unsignedp;
|
||||
int volatilep = 0;
|
||||
get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
|
||||
&roffset, &rmode, &unsignedp, &volatilep, false);
|
||||
&roffset, &rmode, &unsignedp, &volatilep);
|
||||
if ((rbitpos % BITS_PER_UNIT) != 0)
|
||||
{
|
||||
*bitstart = *bitend = 0;
|
||||
@ -4802,7 +4802,7 @@ expand_assignment (tree to, tree from, bool nontemporal)
|
||||
|
||||
push_temp_slots ();
|
||||
tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
|
||||
&unsignedp, &volatilep, true);
|
||||
&unsignedp, &volatilep);
|
||||
|
||||
/* Make sure bitpos is not negative, it can wreak havoc later. */
|
||||
if (bitpos < 0)
|
||||
@ -6644,27 +6644,13 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
|
||||
|
||||
If the field describes a variable-sized object, *PMODE is set to
|
||||
BLKmode and *PBITSIZE is set to -1. An access cannot be made in
|
||||
this case, but the address of the object can be found.
|
||||
|
||||
If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
|
||||
look through nodes that serve as markers of a greater alignment than
|
||||
the one that can be deduced from the expression. These nodes make it
|
||||
possible for front-ends to prevent temporaries from being created by
|
||||
the middle-end on alignment considerations. For that purpose, the
|
||||
normal operating mode at high-level is to always pass FALSE so that
|
||||
the ultimate containing object is really returned; moreover, the
|
||||
associated predicate handled_component_p will always return TRUE
|
||||
on these nodes, thus indicating that they are essentially handled
|
||||
by get_inner_reference. TRUE should only be passed when the caller
|
||||
is scanning the expression in order to build another representation
|
||||
and specifically knows how to handle these nodes; as such, this is
|
||||
the normal operating mode in the RTL expanders. */
|
||||
this case, but the address of the object can be found. */
|
||||
|
||||
tree
|
||||
get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
|
||||
HOST_WIDE_INT *pbitpos, tree *poffset,
|
||||
enum machine_mode *pmode, int *punsignedp,
|
||||
int *pvolatilep, bool keep_aligning)
|
||||
int *pvolatilep)
|
||||
{
|
||||
tree size_tree = 0;
|
||||
enum machine_mode mode = VOIDmode;
|
||||
@ -6784,14 +6770,6 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
|
||||
break;
|
||||
|
||||
case VIEW_CONVERT_EXPR:
|
||||
if (keep_aligning && STRICT_ALIGNMENT
|
||||
&& (TYPE_ALIGN (TREE_TYPE (exp))
|
||||
> TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
|
||||
&& (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
|
||||
< BIGGEST_ALIGNMENT)
|
||||
&& (TYPE_ALIGN_OK (TREE_TYPE (exp))
|
||||
|| TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
|
||||
goto done;
|
||||
break;
|
||||
|
||||
case MEM_REF:
|
||||
@ -7656,7 +7634,7 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
|
||||
they won't change the final object whose address will be returned
|
||||
(they actually exist only for that purpose). */
|
||||
inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
|
||||
&mode1, &unsignedp, &volatilep, false);
|
||||
&mode1, &unsignedp, &volatilep);
|
||||
break;
|
||||
}
|
||||
|
||||
@ -9933,7 +9911,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
||||
tree offset;
|
||||
int volatilep = 0, must_force_mem;
|
||||
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
|
||||
&mode1, &unsignedp, &volatilep, true);
|
||||
&mode1, &unsignedp, &volatilep);
|
||||
rtx orig_op0, memloc;
|
||||
bool mem_attrs_from_type = false;
|
||||
|
||||
@ -10294,8 +10272,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
||||
int volatilep = 0;
|
||||
tree tem
|
||||
= get_inner_reference (treeop0, &bitsize, &bitpos,
|
||||
&offset, &mode1, &unsignedp, &volatilep,
|
||||
true);
|
||||
&offset, &mode1, &unsignedp, &volatilep);
|
||||
rtx orig_op0;
|
||||
|
||||
/* ??? We should work harder and deal with non-zero offsets. */
|
||||
|
@ -3488,7 +3488,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
|
||||
do anything if the inner expression is a PLACEHOLDER_EXPR since we
|
||||
then will no longer be able to replace it. */
|
||||
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
|
||||
&lunsignedp, &lvolatilep, false);
|
||||
&lunsignedp, &lvolatilep);
|
||||
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
|
||||
|| offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
|
||||
return 0;
|
||||
@ -3498,7 +3498,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
|
||||
/* If this is not a constant, we can only do something if bit positions,
|
||||
sizes, and signedness are the same. */
|
||||
rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
|
||||
&runsignedp, &rvolatilep, false);
|
||||
&runsignedp, &rvolatilep);
|
||||
|
||||
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|
||||
|| lunsignedp != runsignedp || offset != 0
|
||||
@ -3672,7 +3672,7 @@ decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
|
||||
}
|
||||
|
||||
inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
|
||||
punsignedp, pvolatilep, false);
|
||||
punsignedp, pvolatilep);
|
||||
if ((inner == exp && and_mask == 0)
|
||||
|| *pbitsize < 0 || offset != 0
|
||||
|| TREE_CODE (inner) == PLACEHOLDER_EXPR)
|
||||
@ -8056,7 +8056,7 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
|
||||
int unsignedp, volatilep;
|
||||
tree base = TREE_OPERAND (op0, 0);
|
||||
base = get_inner_reference (base, &bitsize, &bitpos, &offset,
|
||||
&mode, &unsignedp, &volatilep, false);
|
||||
&mode, &unsignedp, &volatilep);
|
||||
/* If the reference was to a (constant) zero offset, we can use
|
||||
the address of the base if it has the same base type
|
||||
as the result type and the pointer type is unqualified. */
|
||||
@ -9081,7 +9081,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
|
||||
{
|
||||
base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
|
||||
&bitsize, &bitpos0, &offset0, &mode,
|
||||
&unsignedp, &volatilep, false);
|
||||
&unsignedp, &volatilep);
|
||||
if (TREE_CODE (base0) == INDIRECT_REF)
|
||||
base0 = TREE_OPERAND (base0, 0);
|
||||
else
|
||||
@ -9115,7 +9115,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
|
||||
{
|
||||
base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
|
||||
&bitsize, &bitpos1, &offset1, &mode,
|
||||
&unsignedp, &volatilep, false);
|
||||
&unsignedp, &volatilep);
|
||||
if (TREE_CODE (base1) == INDIRECT_REF)
|
||||
base1 = TREE_OPERAND (base1, 0);
|
||||
else
|
||||
@ -16982,8 +16982,7 @@ split_address_to_core_and_offset (tree exp,
|
||||
if (TREE_CODE (exp) == ADDR_EXPR)
|
||||
{
|
||||
core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
|
||||
poffset, &mode, &unsignedp, &volatilep,
|
||||
false);
|
||||
poffset, &mode, &unsignedp, &volatilep);
|
||||
core = build_fold_addr_expr_loc (loc, core);
|
||||
}
|
||||
else
|
||||
|
@ -940,7 +940,7 @@ slsr_process_ref (gimple gs)
|
||||
return;
|
||||
|
||||
base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
|
||||
&unsignedp, &volatilep, false);
|
||||
&unsignedp, &volatilep);
|
||||
index = double_int::from_uhwi (bitpos);
|
||||
|
||||
if (!restructure_reference (&base, &offset, &index, &type))
|
||||
|
@ -296,7 +296,7 @@ delegitimize_mem_from_attrs (rtx x)
|
||||
int unsignedp, volatilep = 0;
|
||||
|
||||
decl = get_inner_reference (decl, &bitsize, &bitpos, &toffset,
|
||||
&mode, &unsignedp, &volatilep, false);
|
||||
&mode, &unsignedp, &volatilep);
|
||||
if (bitsize != GET_MODE_BITSIZE (mode)
|
||||
|| (bitpos % BITS_PER_UNIT)
|
||||
|| (toffset && !tree_fits_shwi_p (toffset)))
|
||||
|
@ -325,8 +325,7 @@ tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
|
||||
return;
|
||||
}
|
||||
core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
|
||||
&toffset, &mode, &unsignedp, &volatilep,
|
||||
false);
|
||||
&toffset, &mode, &unsignedp, &volatilep);
|
||||
if (bitpos % BITS_PER_UNIT != 0)
|
||||
break;
|
||||
aff_combination_const (comb, type,
|
||||
@ -895,7 +894,7 @@ get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
|
||||
int uns, vol;
|
||||
aff_tree tmp;
|
||||
tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
|
||||
&uns, &vol, false);
|
||||
&uns, &vol);
|
||||
tree base_addr = build_fold_addr_expr (base);
|
||||
|
||||
/* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
|
||||
|
@ -619,7 +619,7 @@ split_constant_offset_1 (tree type, tree op0, enum tree_code code, tree op1,
|
||||
|
||||
op0 = TREE_OPERAND (op0, 0);
|
||||
base = get_inner_reference (op0, &pbitsize, &pbitpos, &poffset,
|
||||
&pmode, &punsignedp, &pvolatilep, false);
|
||||
&pmode, &punsignedp, &pvolatilep);
|
||||
|
||||
if (pbitpos % BITS_PER_UNIT != 0)
|
||||
return false;
|
||||
@ -769,7 +769,7 @@ dr_analyze_innermost (struct data_reference *dr, struct loop *nest)
|
||||
fprintf (dump_file, "analyze_innermost: ");
|
||||
|
||||
base = get_inner_reference (ref, &pbitsize, &pbitpos, &poffset,
|
||||
&pmode, &punsignedp, &pvolatilep, false);
|
||||
&pmode, &punsignedp, &pvolatilep);
|
||||
gcc_assert (base != NULL_TREE);
|
||||
|
||||
if (pbitpos % BITS_PER_UNIT != 0)
|
||||
|
@ -1658,7 +1658,7 @@ interpret_rhs_expr (struct loop *loop, gimple at_stmt,
|
||||
|
||||
base = get_inner_reference (TREE_OPERAND (rhs1, 0),
|
||||
&bitsize, &bitpos, &offset,
|
||||
&mode, &unsignedp, &volatilep, false);
|
||||
&mode, &unsignedp, &volatilep);
|
||||
|
||||
if (TREE_CODE (base) == MEM_REF)
|
||||
{
|
||||
|
@ -1684,7 +1684,7 @@ may_be_unaligned_p (tree ref, tree step)
|
||||
does to check whether the object must be loaded by parts when
|
||||
STRICT_ALIGNMENT is true. */
|
||||
base = get_inner_reference (ref, &bitsize, &bitpos, &toffset, &mode,
|
||||
&unsignedp, &volatilep, true);
|
||||
&unsignedp, &volatilep);
|
||||
base_type = TREE_TYPE (base);
|
||||
base_align = get_object_alignment (base);
|
||||
base_align = MAX (base_align, TYPE_ALIGN (base_type));
|
||||
@ -3781,7 +3781,7 @@ split_address_cost (struct ivopts_data *data,
|
||||
int unsignedp, volatilep;
|
||||
|
||||
core = get_inner_reference (addr, &bitsize, &bitpos, &toffset, &mode,
|
||||
&unsignedp, &volatilep, false);
|
||||
&unsignedp, &volatilep);
|
||||
|
||||
if (toffset != 0
|
||||
|| bitpos % BITS_PER_UNIT != 0
|
||||
|
@ -2970,7 +2970,7 @@ vect_check_gather (gimple stmt, loop_vec_info loop_vinfo, tree *basep,
|
||||
SSA_NAME OFF and put the loop invariants into a tree BASE
|
||||
that can be gimplified before the loop. */
|
||||
base = get_inner_reference (DR_REF (dr), &pbitsize, &pbitpos, &off,
|
||||
&pmode, &punsignedp, &pvolatilep, false);
|
||||
&pmode, &punsignedp, &pvolatilep);
|
||||
gcc_assert (base != NULL_TREE && (pbitpos % BITS_PER_UNIT) == 0);
|
||||
|
||||
if (TREE_CODE (base) == MEM_REF)
|
||||
@ -3466,7 +3466,7 @@ again:
|
||||
}
|
||||
|
||||
outer_base = get_inner_reference (inner_base, &pbitsize, &pbitpos,
|
||||
&poffset, &pmode, &punsignedp, &pvolatilep, false);
|
||||
&poffset, &pmode, &punsignedp, &pvolatilep);
|
||||
gcc_assert (outer_base != NULL_TREE);
|
||||
|
||||
if (pbitpos % BITS_PER_UNIT != 0)
|
||||
|
@ -4511,8 +4511,7 @@ extern tree build_personality_function (const char *);
|
||||
look for the ultimate containing object, which is returned and specify
|
||||
the access position and size. */
|
||||
extern tree get_inner_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
|
||||
tree *, enum machine_mode *, int *, int *,
|
||||
bool);
|
||||
tree *, enum machine_mode *, int *, int *);
|
||||
|
||||
/* Return a tree representing the lower bound of the array mentioned in
|
||||
EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
|
||||
|
@ -121,7 +121,7 @@ instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
|
||||
enum machine_mode mode;
|
||||
int volatilep = 0, unsignedp = 0;
|
||||
base = get_inner_reference (expr, &bitsize, &bitpos, &offset,
|
||||
&mode, &unsignedp, &volatilep, false);
|
||||
&mode, &unsignedp, &volatilep);
|
||||
|
||||
/* No need to instrument accesses to decls that don't escape,
|
||||
they can't escape to other threads then. */
|
||||
|
Loading…
x
Reference in New Issue
Block a user