re PR rtl-optimization/37889 (SEGV, conditional execution proactively executed the false arm.)

2009-02-04  Paolo Bonzini  <bonzini@gnu.org>
	    Hans-Peter Nilsson  <hp@axis.com>

	PR rtl-optimization/37889
	* rtlanal.c (rtx_addr_can_trap_p_1): Add offset and size arguments.
	Move offset handling from PLUS to before the switch.  Use new
	arguments when considering SYMBOL_REFs too.
	(rtx_addr_can_trap_p): Pass dummy offset and size.
	(enum may_trap_p_flags): Remove.
	(may_trap_p_1): Pass size from MEM_SIZE.

	PR rtl-optimization/38921
	* loop-invariant.c (find_invariant_insn): Use may_trap_or_fault_p.
	* rtl.h (may_trap_after_code_motion_p): Delete prototype.
	* rtlanal.c (may_trap_after_code_motion_p): Delete.
	(may_trap_p, may_trap_or_fault_p): Pass 0/1 as flags.


Co-Authored-By: Hans-Peter Nilsson <hp@axis.com>

From-SVN: r143939
This commit is contained in:
Paolo Bonzini 2009-02-04 20:54:36 +00:00 committed by Paolo Bonzini
parent 69349ea551
commit 48e8382e95
4 changed files with 109 additions and 65 deletions

View File

@ -1,3 +1,20 @@
2009-02-04 Paolo Bonzini <bonzini@gnu.org>
Hans-Peter Nilsson <hp@axis.com>
PR rtl-optimization/37889
* rtlanal.c (rtx_addr_can_trap_p_1): Add offset and size arguments.
Move offset handling from PLUS to before the switch. Use new
arguments when considering SYMBOL_REFs too.
(rtx_addr_can_trap_p): Pass dummy offset and size.
(enum may_trap_p_flags): Remove.
(may_trap_p_1): Pass size from MEM_SIZE.
PR rtl-optimization/38921
* loop-invariant.c (find_invariant_insn): Use may_trap_or_fault_p.
* rtl.h (may_trap_after_code_motion_p): Delete prototype.
* rtlanal.c (may_trap_after_code_motion_p): Delete.
(may_trap_p, may_trap_or_fault_p): Pass 0/1 as flags.
2009-02-04 H.J. Lu <hongjiu.lu@intel.com>
AVX Programming Reference (January, 2009)

View File

@ -824,7 +824,7 @@ find_invariant_insn (rtx insn, bool always_reached, bool always_executed)
return;
/* We cannot make trapping insn executed, unless it was executed before. */
if (may_trap_after_code_motion_p (PATTERN (insn)) && !always_reached)
if (may_trap_or_fault_p (PATTERN (insn)) && !always_reached)
return;
depends_on = BITMAP_ALLOC (NULL);

View File

@ -1767,7 +1767,6 @@ extern int volatile_refs_p (const_rtx);
extern int volatile_insn_p (const_rtx);
extern int may_trap_p_1 (const_rtx, unsigned);
extern int may_trap_p (const_rtx);
extern int may_trap_after_code_motion_p (const_rtx);
extern int may_trap_or_fault_p (const_rtx);
extern int inequality_comparisons_p (const_rtx);
extern rtx replace_rtx (rtx, rtx, rtx);

View File

@ -263,14 +263,68 @@ rtx_varies_p (const_rtx x, bool for_alias)
alignment machines. */
static int
rtx_addr_can_trap_p_1 (const_rtx x, enum machine_mode mode, bool unaligned_mems)
rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
enum machine_mode mode, bool unaligned_mems)
{
enum rtx_code code = GET_CODE (x);
if (STRICT_ALIGNMENT
&& unaligned_mems
&& GET_MODE_SIZE (mode) != 0)
{
HOST_WIDE_INT actual_offset = offset;
#ifdef SPARC_STACK_BOUNDARY_HACK
/* ??? The SPARC port may claim a STACK_BOUNDARY higher than
the real alignment of %sp. However, when it does this, the
alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
if (SPARC_STACK_BOUNDARY_HACK
&& (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
actual_offset -= STACK_POINTER_OFFSET;
#endif
return actual_offset % GET_MODE_SIZE (mode) != 0;
}
switch (code)
{
case SYMBOL_REF:
return SYMBOL_REF_WEAK (x);
if (SYMBOL_REF_WEAK (x))
return 1;
if (!CONSTANT_POOL_ADDRESS_P (x))
{
tree decl;
HOST_WIDE_INT decl_size;
if (offset < 0)
return 1;
if (size == 0)
size = GET_MODE_SIZE (mode);
if (size == 0)
return offset != 0;
/* If the size of the access or of the symbol is unknown,
assume the worst. */
decl = SYMBOL_REF_DECL (x);
/* Else check that the access is in bounds. TODO: restructure
expr_size/lhd_expr_size/int_expr_size and just use the latter. */
if (!decl)
decl_size = -1;
else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
decl_size = (host_integerp (DECL_SIZE_UNIT (decl), 0)
? tree_low_cst (DECL_SIZE_UNIT (decl), 0)
: -1);
else if (TREE_CODE (decl) == STRING_CST)
decl_size = TREE_STRING_LENGTH (decl);
else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
decl_size = int_size_in_bytes (TREE_TYPE (decl));
else
decl_size = -1;
return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
}
return 0;
case LABEL_REF:
return 0;
@ -289,54 +343,37 @@ rtx_addr_can_trap_p_1 (const_rtx x, enum machine_mode mode, bool unaligned_mems)
return 1;
case CONST:
return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
mode, unaligned_mems);
case PLUS:
/* An address is assumed not to trap if:
- it is an address that can't trap plus a constant integer,
- it is the pic register plus a constant. */
if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
return 0;
/* - or it is an address that can't trap plus a constant integer,
with the proper remainder modulo the mode size if we are
considering unaligned memory references. */
if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
&& GET_CODE (XEXP (x, 1)) == CONST_INT)
{
HOST_WIDE_INT offset;
if (!STRICT_ALIGNMENT
|| !unaligned_mems
|| GET_MODE_SIZE (mode) == 0)
return 0;
offset = INTVAL (XEXP (x, 1));
#ifdef SPARC_STACK_BOUNDARY_HACK
/* ??? The SPARC port may claim a STACK_BOUNDARY higher than
the real alignment of %sp. However, when it does this, the
alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
if (SPARC_STACK_BOUNDARY_HACK
&& (XEXP (x, 0) == stack_pointer_rtx
|| XEXP (x, 0) == hard_frame_pointer_rtx))
offset -= STACK_POINTER_OFFSET;
#endif
return offset % GET_MODE_SIZE (mode) != 0;
}
/* - or it is the pic register plus a constant. */
if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
if (GET_CODE (XEXP (x, 1)) == CONST_INT
&& !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
size, mode, unaligned_mems))
return 0;
return 1;
case LO_SUM:
case PRE_MODIFY:
return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
mode, unaligned_mems);
case PRE_DEC:
case PRE_INC:
case POST_DEC:
case POST_INC:
case POST_MODIFY:
return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
mode, unaligned_mems);
default:
break;
@ -351,7 +388,7 @@ rtx_addr_can_trap_p_1 (const_rtx x, enum machine_mode mode, bool unaligned_mems)
int
rtx_addr_can_trap_p (const_rtx x)
{
return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
}
/* Return true if X is an address that is known to not be zero. */
@ -2170,17 +2207,10 @@ side_effects_p (const_rtx x)
return 0;
}
enum may_trap_p_flags
{
MTP_UNALIGNED_MEMS = 1,
MTP_AFTER_MOVE = 2
};
/* Return nonzero if evaluating rtx X might cause a trap.
(FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
unaligned memory accesses on strict alignment machines. If
(FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
cannot trap at its current location, but it might become trapping if moved
elsewhere. */
FLAGS controls how to consider MEMs. A nonzero means the context
of the access may have changed from the original, such that the
address may have become invalid. */
int
may_trap_p_1 (const_rtx x, unsigned flags)
@ -2188,7 +2218,11 @@ may_trap_p_1 (const_rtx x, unsigned flags)
int i;
enum rtx_code code;
const char *fmt;
bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
/* We make no distinction currently, but this function is part of
the internal target-hooks ABI so we keep the parameter as
"unsigned flags". */
bool code_changed = flags != 0;
if (x == 0)
return 0;
@ -2223,14 +2257,17 @@ may_trap_p_1 (const_rtx x, unsigned flags)
/* Memory ref can trap unless it's a static var or a stack slot. */
case MEM:
if (/* MEM_NOTRAP_P only relates to the actual position of the memory
reference; moving it out of condition might cause its address
become invalid. */
!(flags & MTP_AFTER_MOVE)
&& MEM_NOTRAP_P (x)
&& (!STRICT_ALIGNMENT || !unaligned_mems))
return 0;
return
rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
reference; moving it out of context such as when moving code
when optimizing, might cause its address to become invalid. */
code_changed
|| !MEM_NOTRAP_P (x))
{
HOST_WIDE_INT size = MEM_SIZE (x) ? INTVAL (MEM_SIZE (x)) : 0;
return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
GET_MODE (x), code_changed);
}
return 0;
/* Division by a non-constant might trap. */
case DIV:
@ -2328,15 +2365,6 @@ may_trap_p (const_rtx x)
return may_trap_p_1 (x, 0);
}
/* Return nonzero if evaluating rtx X might cause a trap, when the expression
is moved from its current location by some optimization. */
int
may_trap_after_code_motion_p (const_rtx x)
{
return may_trap_p_1 (x, MTP_AFTER_MOVE);
}
/* Same as above, but additionally return nonzero if evaluating rtx X might
cause a fault. We define a fault for the purpose of this function as a
erroneous execution condition that cannot be encountered during the normal
@ -2380,7 +2408,7 @@ may_trap_after_code_motion_p (const_rtx x)
int
may_trap_or_fault_p (const_rtx x)
{
return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
return may_trap_p_1 (x, 1);
}
/* Return nonzero if X contains a comparison that is not either EQ or NE,