alias.c (alias_sets_conflict_p): New function.
* alias.c (alias_sets_conflict_p): New function. (mems_in_disjoint_alias_sets_p): Use it. (readonly_fields_p): Moved from expr.c; check for record type. (objects_must_conflict_p): New function. * calls.c (expand_call): Use assign_temp as much as possible, use readonly variant if assigned once, and don't set memory attributes. (emit_library_call_value_1, store_one_arg): Likewise. * integrate.c (expand_inline_function): Likewise. * stmt.c (expand_asm_operands, expand_return): Likewise. * expr.c (copy_blkmode_from_reg, store_constructor): Likewise. (store_field, save_noncopied_parts, expand_expr): Likewise. (expand_expr_unaligned): Likewise. (readonly_fields_p): Moved to alias.c. (safe_from_p): Rework handling of SAVE_EXPR. MEMs ony conflict if alias sets conflict; likewise for INDIRECT_REF. * function.c (struct temp_slot): Delete field ALIAS_SET; add TYPE. (assign_stack_for_temp): Use objects_must_confict_p. Set all memory attributes from type, if specified. (mark_temp_slot): Mark TYPE field. * tree.h (alias_sets_conflict_p, readonly_fields_p): New decls. (objects_must_conflict_p): Likewise. * stmt.c (expand_decl): Don't use assign_stack_temp in error case. (add_case_node): No need to copy nodes anymore. From-SVN: r38559
This commit is contained in:
parent
2e9ab75de4
commit
1da68f560d
@ -1,9 +1,35 @@
|
||||
2000-12-30 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
|
||||
|
||||
* alias.c (alias_sets_conflict_p): New function.
|
||||
(mems_in_disjoint_alias_sets_p): Use it.
|
||||
(readonly_fields_p): Moved from expr.c; check for record type.
|
||||
(objects_must_conflict_p): New function.
|
||||
* calls.c (expand_call): Use assign_temp as much as possible, use
|
||||
readonly variant if assigned once, and don't set memory attributes.
|
||||
(emit_library_call_value_1, store_one_arg): Likewise.
|
||||
* integrate.c (expand_inline_function): Likewise.
|
||||
* stmt.c (expand_asm_operands, expand_return): Likewise.
|
||||
* expr.c (copy_blkmode_from_reg, store_constructor): Likewise.
|
||||
(store_field, save_noncopied_parts, expand_expr): Likewise.
|
||||
(expand_expr_unaligned): Likewise.
|
||||
(readonly_fields_p): Moved to alias.c.
|
||||
(safe_from_p): Rework handling of SAVE_EXPR.
|
||||
MEMs ony conflict if alias sets conflict; likewise for INDIRECT_REF.
|
||||
* function.c (struct temp_slot): Delete field ALIAS_SET; add TYPE.
|
||||
(assign_stack_for_temp): Use objects_must_confict_p.
|
||||
Set all memory attributes from type, if specified.
|
||||
(mark_temp_slot): Mark TYPE field.
|
||||
* tree.h (alias_sets_conflict_p, readonly_fields_p): New decls.
|
||||
(objects_must_conflict_p): Likewise.
|
||||
|
||||
* stmt.c (expand_decl): Don't use assign_stack_temp in error case.
|
||||
(add_case_node): No need to copy nodes anymore.
|
||||
|
||||
2000-12-30 Alexandre Oliva <aoliva@redhat.com>
|
||||
|
||||
* config/sh/sh.c (split_branches): Don't dereference re-computed
|
||||
`beyond' before checking it's non-NULL.
|
||||
|
||||
|
||||
2000-12-29 Robert Lipe <robertl@sco.com>
|
||||
|
||||
Remove COFF support from i?86-pc-sco3.2v5.
|
||||
|
121
gcc/alias.c
121
gcc/alias.c
@ -211,8 +211,6 @@ mems_in_disjoint_alias_sets_p (mem1, mem2)
|
||||
rtx mem1;
|
||||
rtx mem2;
|
||||
{
|
||||
alias_set_entry ase;
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
/* Perform a basic sanity check. Namely, that there are no alias sets
|
||||
if we're not using strict aliasing. This helps to catch bugs
|
||||
@ -226,34 +224,7 @@ mems_in_disjoint_alias_sets_p (mem1, mem2)
|
||||
abort ();
|
||||
#endif
|
||||
|
||||
/* If have no alias set information for one of the MEMs, we have to assume
|
||||
it can alias anything. */
|
||||
if (MEM_ALIAS_SET (mem1) == 0 || MEM_ALIAS_SET (mem2) == 0)
|
||||
return 0;
|
||||
|
||||
/* If the two alias sets are the same, they may alias. */
|
||||
if (MEM_ALIAS_SET (mem1) == MEM_ALIAS_SET (mem2))
|
||||
return 0;
|
||||
|
||||
/* See if the first alias set is a subset of the second. */
|
||||
ase = get_alias_set_entry (MEM_ALIAS_SET (mem1));
|
||||
if (ase != 0
|
||||
&& (ase->has_zero_child
|
||||
|| splay_tree_lookup (ase->children,
|
||||
(splay_tree_key) MEM_ALIAS_SET (mem2))))
|
||||
return 0;
|
||||
|
||||
/* Now do the same, but with the alias sets reversed. */
|
||||
ase = get_alias_set_entry (MEM_ALIAS_SET (mem2));
|
||||
if (ase != 0
|
||||
&& (ase->has_zero_child
|
||||
|| splay_tree_lookup (ase->children,
|
||||
(splay_tree_key) MEM_ALIAS_SET (mem1))))
|
||||
return 0;
|
||||
|
||||
/* The two MEMs are in distinct alias sets, and neither one is the
|
||||
child of the other. Therefore, they cannot alias. */
|
||||
return 1;
|
||||
return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
|
||||
}
|
||||
|
||||
/* Insert the NODE into the splay tree given by DATA. Used by
|
||||
@ -268,6 +239,96 @@ insert_subset_children (node, data)
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Return 1 if the two specified alias sets may conflict. */
|
||||
|
||||
int
|
||||
alias_sets_conflict_p (set1, set2)
|
||||
HOST_WIDE_INT set1, set2;
|
||||
{
|
||||
alias_set_entry ase;
|
||||
|
||||
/* If have no alias set information for one of the operands, we have
|
||||
to assume it can alias anything. */
|
||||
if (set1 == 0 || set2 == 0
|
||||
/* If the two alias sets are the same, they may alias. */
|
||||
|| set1 == set2)
|
||||
return 1;
|
||||
|
||||
/* See if the first alias set is a subset of the second. */
|
||||
ase = get_alias_set_entry (set1);
|
||||
if (ase != 0
|
||||
&& (ase->has_zero_child
|
||||
|| splay_tree_lookup (ase->children,
|
||||
(splay_tree_key) set2)))
|
||||
return 1;
|
||||
|
||||
/* Now do the same, but with the alias sets reversed. */
|
||||
ase = get_alias_set_entry (set2);
|
||||
if (ase != 0
|
||||
&& (ase->has_zero_child
|
||||
|| splay_tree_lookup (ase->children,
|
||||
(splay_tree_key) set1)))
|
||||
return 1;
|
||||
|
||||
/* The two alias sets are distinct and neither one is the
|
||||
child of the other. Therefore, they cannot alias. */
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Return 1 if TYPE is a RECORD_TYPE, UNION_TYPE, or QUAL_UNION_TYPE and has
|
||||
has any readonly fields. If any of the fields have types that
|
||||
contain readonly fields, return true as well. */
|
||||
|
||||
int
|
||||
readonly_fields_p (type)
|
||||
tree type;
|
||||
{
|
||||
tree field;
|
||||
|
||||
if (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
|
||||
&& TREE_CODE (type) != QUAL_UNION_TYPE)
|
||||
return 0;
|
||||
|
||||
for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
|
||||
if (TREE_CODE (field) == FIELD_DECL
|
||||
&& (TREE_READONLY (field)
|
||||
|| readonly_fields_p (TREE_TYPE (field))))
|
||||
return 1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Return 1 if any MEM object of type T1 will always conflict (using the
|
||||
dependency routines in this file) with any MEM object of type T2.
|
||||
This is used when allocating temporary storage. If T1 and/or T2 are
|
||||
NULL_TREE, it means we know nothing about the storage. */
|
||||
|
||||
int
|
||||
objects_must_conflict_p (t1, t2)
|
||||
tree t1, t2;
|
||||
{
|
||||
/* If they are the same type, they must conflict. */
|
||||
if (t1 == t2
|
||||
/* Likewise if both are volatile. */
|
||||
|| (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
|
||||
return 1;
|
||||
|
||||
/* We now know they are different types. If one or both has readonly fields
|
||||
or if one is readonly and the other not, they may not conflict.
|
||||
Likewise if one is aggregate and the other is scalar. */
|
||||
if ((t1 != 0 && readonly_fields_p (t1))
|
||||
|| (t2 != 0 && readonly_fields_p (t2))
|
||||
|| ((t1 != 0 && TYPE_READONLY (t1))
|
||||
!= (t2 != 0 && TYPE_READONLY (t2)))
|
||||
|| ((t1 != 0 && AGGREGATE_TYPE_P (t1))
|
||||
!= (t2 != 0 && AGGREGATE_TYPE_P (t2))))
|
||||
return 0;
|
||||
|
||||
/* Otherwise they conflict only if the alias sets conflict. */
|
||||
return alias_sets_conflict_p (t1 ? get_alias_set (t1) : 0,
|
||||
t2 ? get_alias_set (t2) : 0);
|
||||
}
|
||||
|
||||
/* T is an expression with pointer type. Find the DECL on which this
|
||||
expression is based. (For example, in `a[i]' this would be `a'.)
|
||||
|
40
gcc/calls.c
40
gcc/calls.c
@ -2260,16 +2260,11 @@ expand_call (exp, target, ignore)
|
||||
structure_value_addr = XEXP (target, 0);
|
||||
else
|
||||
{
|
||||
rtx d;
|
||||
|
||||
/* For variable-sized objects, we must be called with a target
|
||||
specified. If we were to allocate space on the stack here,
|
||||
we would have no way of knowing when to free it. */
|
||||
rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
|
||||
|
||||
if (struct_value_size < 0)
|
||||
abort ();
|
||||
|
||||
d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
|
||||
mark_temp_addr_taken (d);
|
||||
structure_value_addr = XEXP (d, 0);
|
||||
target = 0;
|
||||
@ -3230,18 +3225,20 @@ expand_call (exp, target, ignore)
|
||||
The Irix 6 ABI has examples of this. */
|
||||
else if (GET_CODE (valreg) == PARALLEL)
|
||||
{
|
||||
int bytes = int_size_in_bytes (TREE_TYPE (exp));
|
||||
|
||||
if (target == 0)
|
||||
{
|
||||
target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)),
|
||||
bytes, 0);
|
||||
MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
|
||||
/* This will only be assigned once, so it can be readonly. */
|
||||
tree nt = build_qualified_type (TREE_TYPE (exp),
|
||||
(TYPE_QUALS (TREE_TYPE (exp))
|
||||
| TYPE_QUAL_CONST));
|
||||
|
||||
target = assign_temp (nt, 0, 1, 1);
|
||||
preserve_temp_slots (target);
|
||||
}
|
||||
|
||||
if (! rtx_equal_p (target, valreg))
|
||||
emit_group_store (target, valreg, bytes,
|
||||
emit_group_store (target, valreg,
|
||||
int_size_in_bytes (TREE_TYPE (exp)),
|
||||
TYPE_ALIGN (TREE_TYPE (exp)));
|
||||
|
||||
/* We can not support sibling calls for this case. */
|
||||
@ -3562,7 +3559,7 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
|
||||
if (value != 0 && GET_CODE (value) == MEM)
|
||||
mem_value = value;
|
||||
else
|
||||
mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
|
||||
mem_value = assign_temp (type_for_mode (outmode, 0), 0, 1, 1);
|
||||
#endif
|
||||
|
||||
/* This call returns a big structure. */
|
||||
@ -3666,7 +3663,8 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
|
||||
{
|
||||
/* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
|
||||
be viewed as just an efficiency improvement. */
|
||||
rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
|
||||
rtx slot = assign_temp (type_for_mode (mode, 0), 0, 1, 1);
|
||||
|
||||
call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
|
||||
gen_rtx_USE (VOIDmode, slot),
|
||||
call_fusage);
|
||||
@ -4339,15 +4337,15 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
|
||||
|
||||
if (save_mode == BLKmode)
|
||||
{
|
||||
arg->save_area = assign_stack_temp (BLKmode,
|
||||
arg->size.constant, 0);
|
||||
MEM_SET_IN_STRUCT_P (arg->save_area,
|
||||
AGGREGATE_TYPE_P (TREE_TYPE
|
||||
(arg->tree_value)));
|
||||
tree ot = TREE_TYPE (arg->tree_value);
|
||||
tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
|
||||
| TYPE_QUAL_CONST));
|
||||
|
||||
arg->save_area = assign_temp (nt, 0, 1, 1);
|
||||
preserve_temp_slots (arg->save_area);
|
||||
emit_block_move (validize_mem (arg->save_area), stack_area,
|
||||
GEN_INT (arg->size.constant),
|
||||
PARM_BOUNDARY);
|
||||
expr_size (arg->tree_value),
|
||||
MIN (PARM_BOUNDARY, TYPE_ALIGN (nt)));
|
||||
}
|
||||
else
|
||||
{
|
||||
|
241
gcc/expr.c
241
gcc/expr.c
@ -181,7 +181,6 @@ static tree save_noncopied_parts PARAMS ((tree, tree));
|
||||
static tree init_noncopied_parts PARAMS ((tree, tree));
|
||||
static int fixed_type_p PARAMS ((tree));
|
||||
static rtx var_rtx PARAMS ((tree));
|
||||
static int readonly_fields_p PARAMS ((tree));
|
||||
static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
|
||||
static rtx expand_increment PARAMS ((tree, int, int));
|
||||
static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
|
||||
@ -2145,8 +2144,10 @@ copy_blkmode_from_reg (tgtblk, srcreg, type)
|
||||
|
||||
if (tgtblk == 0)
|
||||
{
|
||||
tgtblk = assign_stack_temp (BLKmode, bytes, 0);
|
||||
MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
|
||||
tgtblk = assign_temp (build_qualified_type (type,
|
||||
(TYPE_QUALS (type)
|
||||
| TYPE_QUAL_CONST)),
|
||||
0, 1, 1);
|
||||
preserve_temp_slots (tgtblk);
|
||||
}
|
||||
|
||||
@ -2822,17 +2823,17 @@ emit_move_insn_1 (x, y)
|
||||
enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
|
||||
? MODE_FLOAT : MODE_INT);
|
||||
|
||||
enum machine_mode reg_mode =
|
||||
mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
|
||||
enum machine_mode reg_mode
|
||||
= mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
|
||||
|
||||
if (reg_mode != BLKmode)
|
||||
{
|
||||
rtx mem = assign_stack_temp (reg_mode,
|
||||
GET_MODE_SIZE (mode), 0);
|
||||
|
||||
rtx cmem = change_address (mem, mode, NULL_RTX);
|
||||
|
||||
cfun->cannot_inline = N_("function using short complex types cannot be inline");
|
||||
cfun->cannot_inline
|
||||
= N_("function using short complex types cannot be inline");
|
||||
|
||||
if (packed_dest_p)
|
||||
{
|
||||
@ -4921,9 +4922,11 @@ store_constructor (exp, target, align, cleared, size)
|
||||
|
||||
if (REG_P (target))
|
||||
{
|
||||
targetx = assign_stack_temp (GET_MODE (target),
|
||||
GET_MODE_SIZE (GET_MODE (target)),
|
||||
0);
|
||||
targetx
|
||||
= assign_temp
|
||||
((build_qualified_type (type_for_mode (GET_MODE (target), 0),
|
||||
TYPE_QUAL_CONST)),
|
||||
0, 1, 1);
|
||||
emit_move_insn (targetx, target);
|
||||
}
|
||||
|
||||
@ -5022,12 +5025,13 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode,
|
||||
if (mode == BLKmode
|
||||
&& (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
|
||||
{
|
||||
rtx object = assign_stack_temp (GET_MODE (target),
|
||||
GET_MODE_SIZE (GET_MODE (target)), 0);
|
||||
rtx object
|
||||
= assign_temp
|
||||
(build_qualified_type (type_for_mode (GET_MODE (target), 0),
|
||||
TYPE_QUAL_CONST),
|
||||
0, 1, 1);
|
||||
rtx blk_object = copy_rtx (object);
|
||||
|
||||
MEM_SET_IN_STRUCT_P (object, 1);
|
||||
MEM_SET_IN_STRUCT_P (blk_object, 1);
|
||||
PUT_MODE (blk_object, BLKmode);
|
||||
|
||||
if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
|
||||
@ -5506,7 +5510,12 @@ save_noncopied_parts (lhs, list)
|
||||
tree part = TREE_VALUE (tail);
|
||||
tree part_type = TREE_TYPE (part);
|
||||
tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
|
||||
rtx target = assign_temp (part_type, 0, 1, 1);
|
||||
rtx target
|
||||
= assign_temp (build_qualified_type (part_type,
|
||||
(TYPE_QUALS (part_type)
|
||||
| TYPE_QUAL_CONST)),
|
||||
0, 1, 1);
|
||||
|
||||
if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
|
||||
target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
|
||||
parts = tree_cons (to_be_saved,
|
||||
@ -5559,10 +5568,7 @@ safe_from_p (x, exp, top_p)
|
||||
{
|
||||
rtx exp_rtl = 0;
|
||||
int i, nops;
|
||||
static int save_expr_count;
|
||||
static int save_expr_size = 0;
|
||||
static tree *save_expr_rewritten;
|
||||
static tree save_expr_trees[256];
|
||||
static tree save_expr_list;
|
||||
|
||||
if (x == 0
|
||||
/* If EXP has varying size, we MUST use a target since we currently
|
||||
@ -5577,31 +5583,14 @@ safe_from_p (x, exp, top_p)
|
||||
|| TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
|
||||
|| TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
|
||||
!= INTEGER_CST)
|
||||
&& GET_MODE (x) == BLKmode))
|
||||
&& GET_MODE (x) == BLKmode)
|
||||
/* If X is in the outgoing argument area, it is always safe. */
|
||||
|| (GET_CODE (x) == MEM
|
||||
&& (XEXP (x, 0) == virtual_outgoing_args_rtx
|
||||
|| (GET_CODE (XEXP (x, 0)) == PLUS
|
||||
&& XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
|
||||
return 1;
|
||||
|
||||
if (top_p && save_expr_size == 0)
|
||||
{
|
||||
int rtn;
|
||||
|
||||
save_expr_count = 0;
|
||||
save_expr_size = ARRAY_SIZE (save_expr_trees);
|
||||
save_expr_rewritten = &save_expr_trees[0];
|
||||
|
||||
rtn = safe_from_p (x, exp, 1);
|
||||
|
||||
for (i = 0; i < save_expr_count; ++i)
|
||||
{
|
||||
if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
|
||||
abort ();
|
||||
TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
|
||||
}
|
||||
|
||||
save_expr_size = 0;
|
||||
|
||||
return rtn;
|
||||
}
|
||||
|
||||
/* If this is a subreg of a hard register, declare it unsafe, otherwise,
|
||||
find the underlying pseudo. */
|
||||
if (GET_CODE (x) == SUBREG)
|
||||
@ -5611,13 +5600,31 @@ safe_from_p (x, exp, top_p)
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* If X is a location in the outgoing argument area, it is always safe. */
|
||||
if (GET_CODE (x) == MEM
|
||||
&& (XEXP (x, 0) == virtual_outgoing_args_rtx
|
||||
|| (GET_CODE (XEXP (x, 0)) == PLUS
|
||||
&& XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
|
||||
return 1;
|
||||
/* A SAVE_EXPR might appear many times in the expression passed to the
|
||||
top-level safe_from_p call, and if it has a complex subexpression,
|
||||
examining it multiple times could result in a combinatorial explosion.
|
||||
E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
|
||||
with optimization took about 28 minutes to compile -- even though it was
|
||||
only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
|
||||
and turn that off when we are done. We keep a list of the SAVE_EXPRs
|
||||
we have processed. Note that the only test of top_p was above. */
|
||||
|
||||
if (top_p)
|
||||
{
|
||||
int rtn;
|
||||
tree t;
|
||||
|
||||
save_expr_list = 0;
|
||||
|
||||
rtn = safe_from_p (x, exp, 0);
|
||||
|
||||
for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
|
||||
TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
|
||||
|
||||
return rtn;
|
||||
}
|
||||
|
||||
/* Now look at our tree code and possibly recurse. */
|
||||
switch (TREE_CODE_CLASS (TREE_CODE (exp)))
|
||||
{
|
||||
case 'd':
|
||||
@ -5657,11 +5664,13 @@ safe_from_p (x, exp, top_p)
|
||||
{
|
||||
case ADDR_EXPR:
|
||||
return (staticp (TREE_OPERAND (exp, 0))
|
||||
|| safe_from_p (x, TREE_OPERAND (exp, 0), 0)
|
||||
|| TREE_STATIC (exp));
|
||||
|| TREE_STATIC (exp)
|
||||
|| safe_from_p (x, TREE_OPERAND (exp, 0), 0));
|
||||
|
||||
case INDIRECT_REF:
|
||||
if (GET_CODE (x) == MEM)
|
||||
if (GET_CODE (x) == MEM
|
||||
&& alias_sets_conflict_p (MEM_ALIAS_SET (x),
|
||||
get_alias_set (exp)))
|
||||
return 0;
|
||||
break;
|
||||
|
||||
@ -5695,37 +5704,20 @@ safe_from_p (x, exp, top_p)
|
||||
if (exp_rtl)
|
||||
break;
|
||||
|
||||
/* This SAVE_EXPR might appear many times in the top-level
|
||||
safe_from_p() expression, and if it has a complex
|
||||
subexpression, examining it multiple times could result
|
||||
in a combinatorial explosion. E.g. on an Alpha
|
||||
running at least 200MHz, a Fortran test case compiled with
|
||||
optimization took about 28 minutes to compile -- even though
|
||||
it was only a few lines long, and the complicated line causing
|
||||
so much time to be spent in the earlier version of safe_from_p()
|
||||
had only 293 or so unique nodes.
|
||||
/* If we've already scanned this, don't do it again. Otherwise,
|
||||
show we've scanned it and record for clearing the flag if we're
|
||||
going on. */
|
||||
if (TREE_PRIVATE (exp))
|
||||
return 1;
|
||||
|
||||
So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
|
||||
where it is so we can turn it back in the top-level safe_from_p()
|
||||
when we're done. */
|
||||
|
||||
/* For now, don't bother re-sizing the array. */
|
||||
if (save_expr_count >= save_expr_size)
|
||||
return 0;
|
||||
save_expr_rewritten[save_expr_count++] = exp;
|
||||
|
||||
nops = TREE_CODE_LENGTH (SAVE_EXPR);
|
||||
for (i = 0; i < nops; i++)
|
||||
TREE_PRIVATE (exp) = 1;
|
||||
if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
|
||||
{
|
||||
tree operand = TREE_OPERAND (exp, i);
|
||||
if (operand == NULL_TREE)
|
||||
continue;
|
||||
TREE_SET_CODE (exp, ERROR_MARK);
|
||||
if (!safe_from_p (x, operand, 0))
|
||||
return 0;
|
||||
TREE_SET_CODE (exp, SAVE_EXPR);
|
||||
TREE_PRIVATE (exp) = 0;
|
||||
return 0;
|
||||
}
|
||||
TREE_SET_CODE (exp, ERROR_MARK);
|
||||
|
||||
save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
|
||||
return 1;
|
||||
|
||||
case BIND_EXPR:
|
||||
@ -5772,10 +5764,11 @@ safe_from_p (x, exp, top_p)
|
||||
}
|
||||
|
||||
/* If the rtl is X, then it is not safe. Otherwise, it is unless both
|
||||
are memory and EXP is not readonly. */
|
||||
are memory and they conflict. */
|
||||
return ! (rtx_equal_p (x, exp_rtl)
|
||||
|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
|
||||
&& ! TREE_READONLY (exp)));
|
||||
&& true_dependence (exp_rtl, GET_MODE (x), x,
|
||||
rtx_addr_varies_p)));
|
||||
}
|
||||
|
||||
/* If we reach here, it is safe. */
|
||||
@ -5870,26 +5863,6 @@ check_max_integer_computation_mode (exp)
|
||||
}
|
||||
#endif
|
||||
|
||||
/* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
|
||||
has any readonly fields. If any of the fields have types that
|
||||
contain readonly fields, return true as well. */
|
||||
|
||||
static int
|
||||
readonly_fields_p (type)
|
||||
tree type;
|
||||
{
|
||||
tree field;
|
||||
|
||||
for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
|
||||
if (TREE_CODE (field) == FIELD_DECL
|
||||
&& (TREE_READONLY (field)
|
||||
|| (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
|
||||
&& readonly_fields_p (TREE_TYPE (field)))))
|
||||
return 1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* expand_expr: generate code for computing expression EXP.
|
||||
An rtx for the computed value is returned. The value is never null.
|
||||
In the case of a void EXP, const0_rtx is returned.
|
||||
@ -6350,11 +6323,10 @@ expand_expr (exp, target, tmode, modifier)
|
||||
if (mode == VOIDmode)
|
||||
temp = const0_rtx;
|
||||
else
|
||||
{
|
||||
temp = assign_temp (type, 3, 0, 0);
|
||||
if (GET_CODE (temp) == MEM)
|
||||
RTX_UNCHANGING_P (temp) = 1;
|
||||
}
|
||||
temp = assign_temp (build_qualified_type (type,
|
||||
(TYPE_QUALS (type)
|
||||
| TYPE_QUAL_CONST)),
|
||||
3, 0, 0);
|
||||
|
||||
SAVE_EXPR_RTL (exp) = temp;
|
||||
if (!optimize && GET_CODE (temp) == REG)
|
||||
@ -6606,27 +6578,18 @@ expand_expr (exp, target, tmode, modifier)
|
||||
XEXP (constructor, 0));
|
||||
return constructor;
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
/* Handle calls that pass values in multiple non-contiguous
|
||||
locations. The Irix 6 ABI has examples of this. */
|
||||
if (target == 0 || ! safe_from_p (target, exp, 1)
|
||||
|| GET_CODE (target) == PARALLEL)
|
||||
{
|
||||
if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
|
||||
target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
|
||||
else
|
||||
target = assign_temp (type, 0, 1, 1);
|
||||
}
|
||||
|
||||
if (TREE_READONLY (exp))
|
||||
{
|
||||
if (GET_CODE (target) == MEM)
|
||||
target = copy_rtx (target);
|
||||
|
||||
RTX_UNCHANGING_P (target) = 1;
|
||||
}
|
||||
target
|
||||
= assign_temp (build_qualified_type (type,
|
||||
(TYPE_QUALS (type)
|
||||
| (TREE_READONLY (exp)
|
||||
* TYPE_QUAL_CONST))),
|
||||
TREE_ADDRESSABLE (exp), 1, 1);
|
||||
|
||||
store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
|
||||
int_size_in_bytes (TREE_TYPE (exp)));
|
||||
@ -6685,8 +6648,7 @@ expand_expr (exp, target, tmode, modifier)
|
||||
/* If we are writing to this object and its type is a record with
|
||||
readonly fields, we must mark it as readonly so it will
|
||||
conflict with readonly references to those fields. */
|
||||
if (modifier == EXPAND_MEMORY_USE_WO
|
||||
&& TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
|
||||
if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
|
||||
RTX_UNCHANGING_P (temp) = 1;
|
||||
|
||||
return temp;
|
||||
@ -6902,7 +6864,10 @@ expand_expr (exp, target, tmode, modifier)
|
||||
if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
|
||||
|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
|
||||
{
|
||||
rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
|
||||
tree nt = build_qualified_type (TREE_TYPE (tem),
|
||||
(TYPE_QUALS (TREE_TYPE (tem))
|
||||
| TYPE_QUAL_CONST));
|
||||
rtx memloc = assign_temp (nt, 1, 1, 1);
|
||||
|
||||
mark_temp_addr_taken (memloc);
|
||||
emit_move_insn (memloc, op0);
|
||||
@ -7077,13 +7042,13 @@ expand_expr (exp, target, tmode, modifier)
|
||||
|
||||
if (mode == BLKmode)
|
||||
{
|
||||
rtx new = assign_stack_temp (ext_mode,
|
||||
bitsize / BITS_PER_UNIT, 0);
|
||||
tree nt = build_qualified_type (type_for_size (ext_mode, 0),
|
||||
TYPE_QUAL_CONST);
|
||||
rtx new = assign_temp (nt, 0, 1, 1);
|
||||
|
||||
emit_move_insn (new, op0);
|
||||
op0 = copy_rtx (new);
|
||||
PUT_MODE (op0, BLKmode);
|
||||
MEM_SET_IN_STRUCT_P (op0, 1);
|
||||
}
|
||||
|
||||
return op0;
|
||||
@ -7303,12 +7268,7 @@ expand_expr (exp, target, tmode, modifier)
|
||||
modifier);
|
||||
|
||||
if (target == 0)
|
||||
{
|
||||
if (mode != BLKmode)
|
||||
target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
|
||||
else
|
||||
target = assign_temp (type, 0, 1, 1);
|
||||
}
|
||||
target = assign_temp (type, 0, 1, 1);
|
||||
|
||||
if (GET_CODE (target) == MEM)
|
||||
/* Store data into beginning of memory target. */
|
||||
@ -8514,7 +8474,10 @@ expand_expr (exp, target, tmode, modifier)
|
||||
/* If this object is in a register, it must be not
|
||||
be BLKmode. */
|
||||
tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
|
||||
rtx memloc = assign_temp (inner_type, 1, 1, 1);
|
||||
tree nt = build_qualified_type (inner_type,
|
||||
(TYPE_QUALS (inner_type)
|
||||
| TYPE_QUAL_CONST));
|
||||
rtx memloc = assign_temp (nt, 1, 1, 1);
|
||||
|
||||
mark_temp_addr_taken (memloc);
|
||||
if (GET_CODE (op0) == PARALLEL)
|
||||
@ -8914,7 +8877,10 @@ expand_expr_unaligned (exp, palign)
|
||||
if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
|
||||
|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
|
||||
{
|
||||
rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
|
||||
tree nt = build_qualified_type (TREE_TYPE (tem),
|
||||
(TYPE_QUALS (TREE_TYPE (tem))
|
||||
| TYPE_QUAL_CONST));
|
||||
rtx memloc = assign_temp (nt, 1, 1, 1);
|
||||
|
||||
mark_temp_addr_taken (memloc);
|
||||
emit_move_insn (memloc, op0);
|
||||
@ -8996,8 +8962,9 @@ expand_expr_unaligned (exp, palign)
|
||||
}
|
||||
else
|
||||
{
|
||||
rtx new = assign_stack_temp (ext_mode,
|
||||
bitsize / BITS_PER_UNIT, 0);
|
||||
tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
|
||||
TYPE_QUAL_CONST);
|
||||
rtx new = assign_temp (nt, 0, 1, 1);
|
||||
|
||||
op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
|
||||
unsignedp, NULL_RTX, ext_mode,
|
||||
|
@ -193,17 +193,11 @@ struct temp_slot
|
||||
int align;
|
||||
/* The size, in units, of the slot. */
|
||||
HOST_WIDE_INT size;
|
||||
/* The alias set for the slot. If the alias set is zero, we don't
|
||||
know anything about the alias set of the slot. We must only
|
||||
reuse a slot if it is assigned an object of the same alias set.
|
||||
Otherwise, the rest of the compiler may assume that the new use
|
||||
of the slot cannot alias the old use of the slot, which is
|
||||
false. If the slot has alias set zero, then we can't reuse the
|
||||
slot at all, since we have no idea what alias set may have been
|
||||
imposed on the memory. For example, if the stack slot is the
|
||||
call frame for an inline functioned, we have no idea what alias
|
||||
sets will be assigned to various pieces of the call frame. */
|
||||
HOST_WIDE_INT alias_set;
|
||||
/* The type of the object in the slot, or zero if it doesn't correspond
|
||||
to a type. We use this to determine whether a slot can be reused.
|
||||
It can be reused if objects of the type of the new slot will always
|
||||
conflict with objects of the type of the old slot. */
|
||||
tree type;
|
||||
/* The value of `sequence_rtl_expr' when this temporary is allocated. */
|
||||
tree rtl_expr;
|
||||
/* Non-zero if this temporary is currently in use. */
|
||||
@ -658,7 +652,6 @@ assign_stack_temp_for_type (mode, size, keep, type)
|
||||
tree type;
|
||||
{
|
||||
int align;
|
||||
HOST_WIDE_INT alias_set;
|
||||
struct temp_slot *p, *best_p = 0;
|
||||
|
||||
/* If SIZE is -1 it means that somebody tried to allocate a temporary
|
||||
@ -666,14 +659,6 @@ assign_stack_temp_for_type (mode, size, keep, type)
|
||||
if (size == -1)
|
||||
abort ();
|
||||
|
||||
/* If we know the alias set for the memory that will be used, use
|
||||
it. If there's no TYPE, then we don't know anything about the
|
||||
alias set for the memory. */
|
||||
if (type)
|
||||
alias_set = get_alias_set (type);
|
||||
else
|
||||
alias_set = 0;
|
||||
|
||||
if (mode == BLKmode)
|
||||
align = BIGGEST_ALIGNMENT;
|
||||
else
|
||||
@ -691,8 +676,7 @@ assign_stack_temp_for_type (mode, size, keep, type)
|
||||
for (p = temp_slots; p; p = p->next)
|
||||
if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
|
||||
&& ! p->in_use
|
||||
&& (! flag_strict_aliasing
|
||||
|| (alias_set && p->alias_set == alias_set))
|
||||
&& objects_must_conflict_p (p->type, type)
|
||||
&& (best_p == 0 || best_p->size > p->size
|
||||
|| (best_p->size == p->size && best_p->align > p->align)))
|
||||
{
|
||||
@ -728,7 +712,7 @@ assign_stack_temp_for_type (mode, size, keep, type)
|
||||
p->align = best_p->align;
|
||||
p->address = 0;
|
||||
p->rtl_expr = 0;
|
||||
p->alias_set = best_p->alias_set;
|
||||
p->type = best_p->type;
|
||||
p->next = temp_slots;
|
||||
temp_slots = p;
|
||||
|
||||
@ -766,7 +750,6 @@ assign_stack_temp_for_type (mode, size, keep, type)
|
||||
align);
|
||||
|
||||
p->align = align;
|
||||
p->alias_set = alias_set;
|
||||
|
||||
/* The following slot size computation is necessary because we don't
|
||||
know the actual size of the temporary slot until assign_stack_local
|
||||
@ -797,6 +780,7 @@ assign_stack_temp_for_type (mode, size, keep, type)
|
||||
p->in_use = 1;
|
||||
p->addr_taken = 0;
|
||||
p->rtl_expr = seq_rtl_expr;
|
||||
p->type = type;
|
||||
|
||||
if (keep == 2)
|
||||
{
|
||||
@ -819,10 +803,23 @@ assign_stack_temp_for_type (mode, size, keep, type)
|
||||
RTX_UNCHANGING_P (p->slot) = 0;
|
||||
MEM_IN_STRUCT_P (p->slot) = 0;
|
||||
MEM_SCALAR_P (p->slot) = 0;
|
||||
MEM_ALIAS_SET (p->slot) = alias_set;
|
||||
MEM_VOLATILE_P (p->slot) = 0;
|
||||
|
||||
/* If we know the alias set for the memory that will be used, use
|
||||
it. If there's no TYPE, then we don't know anything about the
|
||||
alias set for the memory. */
|
||||
if (type)
|
||||
MEM_ALIAS_SET (p->slot) = get_alias_set (type);
|
||||
else
|
||||
MEM_ALIAS_SET (p->slot) = 0;
|
||||
|
||||
/* If a type is specified, set the relevant flags. */
|
||||
if (type != 0)
|
||||
MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
|
||||
{
|
||||
RTX_UNCHANGING_P (p->slot) = TYPE_READONLY (type);
|
||||
MEM_VOLATILE_P (p->slot) = TYPE_VOLATILE (type);
|
||||
MEM_SET_IN_STRUCT_P (p->slot, AGGREGATE_TYPE_P (type));
|
||||
}
|
||||
|
||||
return p->slot;
|
||||
}
|
||||
@ -1509,6 +1506,7 @@ put_reg_into_stack (function, reg, type, promoted_mode, decl_mode, volatile_p,
|
||||
/* Make sure that all refs to the variable, previously made
|
||||
when it was a register, are fixed up to be valid again.
|
||||
See function above for meaning of arguments. */
|
||||
|
||||
static void
|
||||
schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
|
||||
struct function *function;
|
||||
@ -7453,6 +7451,7 @@ mark_temp_slot (t)
|
||||
ggc_mark_rtx (t->slot);
|
||||
ggc_mark_rtx (t->address);
|
||||
ggc_mark_tree (t->rtl_expr);
|
||||
ggc_mark_tree (t->type);
|
||||
|
||||
t = t->next;
|
||||
}
|
||||
|
@ -716,14 +716,9 @@ expand_inline_function (fndecl, parms, target, ignore, type,
|
||||
if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
|
||||
&& REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
|
||||
{
|
||||
rtx stack_slot
|
||||
= assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
|
||||
int_size_in_bytes (TREE_TYPE (arg)), 1);
|
||||
MEM_SET_IN_STRUCT_P (stack_slot,
|
||||
AGGREGATE_TYPE_P (TREE_TYPE (arg)));
|
||||
rtx stack_slot = assign_temp (TREE_TYPE (arg), 1, 1, 1);
|
||||
|
||||
store_expr (arg, stack_slot, 0);
|
||||
|
||||
arg_vals[i] = XEXP (stack_slot, 0);
|
||||
invisiref = 1;
|
||||
}
|
||||
|
22
gcc/stmt.c
22
gcc/stmt.c
@ -1757,7 +1757,10 @@ expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
|
||||
|| GET_CODE (op) == CONCAT)
|
||||
{
|
||||
tree type = TREE_TYPE (TREE_VALUE (tail));
|
||||
rtx memloc = assign_temp (type, 1, 1, 1);
|
||||
tree qual_type = build_qualified_type (type,
|
||||
(TYPE_QUALS (type)
|
||||
| TYPE_QUAL_CONST));
|
||||
rtx memloc = assign_temp (qual_type, 1, 1, 1);
|
||||
|
||||
emit_move_insn (memloc, op);
|
||||
op = memloc;
|
||||
@ -3100,8 +3103,10 @@ expand_return (retval)
|
||||
{
|
||||
/* Calculate the return value into a temporary (usually a pseudo
|
||||
reg). */
|
||||
val = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)),
|
||||
0, 0, 1);
|
||||
tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
|
||||
tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
|
||||
|
||||
val = assign_temp (nt, 0, 0, 1);
|
||||
val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
|
||||
val = force_not_mem (val);
|
||||
emit_queue ();
|
||||
@ -3822,12 +3827,13 @@ expand_decl (decl)
|
||||
|
||||
if (type == error_mark_node)
|
||||
DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
|
||||
|
||||
else if (DECL_SIZE (decl) == 0)
|
||||
/* Variable with incomplete type. */
|
||||
{
|
||||
if (DECL_INITIAL (decl) == 0)
|
||||
/* Error message was already done; now avoid a crash. */
|
||||
DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
|
||||
DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
|
||||
else
|
||||
/* An initializer is going to decide the size of this array.
|
||||
Until we know the size, represent its address with a reg. */
|
||||
@ -4735,18 +4741,16 @@ add_case_node (low, high, label, duplicate)
|
||||
}
|
||||
}
|
||||
|
||||
/* Add this label to the chain, and succeed.
|
||||
Copy LOW, HIGH so they are on temporary rather than momentary
|
||||
obstack and will thus survive till the end of the case statement. */
|
||||
/* Add this label to the chain, and succeed. */
|
||||
|
||||
r = (struct case_node *) xmalloc (sizeof (struct case_node));
|
||||
r->low = copy_node (low);
|
||||
r->low = low;
|
||||
|
||||
/* If the bounds are equal, turn this into the one-value case. */
|
||||
if (tree_int_cst_equal (low, high))
|
||||
r->high = r->low;
|
||||
else
|
||||
r->high = copy_node (high);
|
||||
r->high = high;
|
||||
|
||||
r->code_label = label;
|
||||
expand_label (label);
|
||||
|
@ -2623,6 +2623,10 @@ extern void rest_of_type_compilation PARAMS ((tree, int));
|
||||
/* In alias.c */
|
||||
extern void record_component_aliases PARAMS ((tree));
|
||||
extern HOST_WIDE_INT get_alias_set PARAMS ((tree));
|
||||
extern int alias_sets_conflict_p PARAMS ((HOST_WIDE_INT,
|
||||
HOST_WIDE_INT));
|
||||
extern int readonly_fields_p PARAMS ((tree));
|
||||
extern int objects_must_conflict_p PARAMS ((tree, tree));
|
||||
|
||||
/* In c-common.c */
|
||||
extern HOST_WIDE_INT lang_get_alias_set PARAMS ((tree));
|
||||
|
Loading…
Reference in New Issue
Block a user