stor-layout.c (bit_from_pos): Distribute conversion to bitsizetype into a PLUS_EXPR byte offset.

* stor-layout.c (bit_from_pos): Distribute conversion to bitsizetype
	into a PLUS_EXPR byte offset.

	* tree-ssa-pre.c (can_value_number_call): Delete.
	(compute_avail): Skip all statements with side effects.
	<GIMPLE_CALL>: Skip calls to internal functions.

From-SVN: r187450
This commit is contained in:
Eric Botcazou 2012-05-14 08:46:33 +00:00 committed by Eric Botcazou
parent 5341ab8095
commit 9cbbba287d
3 changed files with 31 additions and 32 deletions

View File

@ -1,3 +1,12 @@
2012-05-14 Eric Botcazou <ebotcazou@adacore.com>
* stor-layout.c (bit_from_pos): Distribute conversion to bitsizetype
into a PLUS_EXPR byte offset.
* tree-ssa-pre.c (can_value_number_call): Delete.
(compute_avail): Skip all statements with side effects.
<GIMPLE_CALL>: Skip calls to internal functions.
2012-05-13 Steven Bosscher <steven@gcc.gnu.org>
* config/pa/pa.md: Use define_c_enum for "unspec" and "unspecv".

View File

@ -786,26 +786,30 @@ start_record_layout (tree t)
}
/* Return the combined bit position for the byte offset OFFSET and the
bit position BITPOS. */
bit position BITPOS.
tree
bit_from_pos (tree offset, tree bitpos)
{
return size_binop (PLUS_EXPR, bitpos,
size_binop (MULT_EXPR,
fold_convert (bitsizetype, offset),
bitsize_unit_node));
}
/* Return the combined truncated byte position for the byte offset OFFSET and
the bit position BITPOS.
These functions operate on byte and bit positions as present in FIELD_DECLs
These functions operate on byte and bit positions present in FIELD_DECLs
and assume that these expressions result in no (intermediate) overflow.
This assumption is necessary to fold the expressions as much as possible,
so as to avoid creating artificially variable-sized types in languages
supporting variable-sized types like Ada. */
tree
bit_from_pos (tree offset, tree bitpos)
{
if (TREE_CODE (offset) == PLUS_EXPR)
offset = size_binop (PLUS_EXPR,
fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
else
offset = fold_convert (bitsizetype, offset);
return size_binop (PLUS_EXPR, bitpos,
size_binop (MULT_EXPR, offset, bitsize_unit_node));
}
/* Return the combined truncated byte position for the byte offset OFFSET and
the bit position BITPOS. */
tree
byte_from_pos (tree offset, tree bitpos)
{

View File

@ -2586,19 +2586,6 @@ compute_antic (void)
sbitmap_free (changed_blocks);
}
/* Return true if we can value number the call in STMT. This is true
if we have a pure or constant call to a real function. */
static bool
can_value_number_call (gimple stmt)
{
if (gimple_call_internal_p (stmt))
return false;
if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
return true;
return false;
}
/* Return true if OP is a tree which we can perform PRE on.
This may not match the operations we can value number, but in
a perfect world would. */
@ -3975,8 +3962,7 @@ compute_avail (void)
or control flow.
If this isn't a call or it is the last stmt in the
basic-block then the CFG represents things correctly. */
if (is_gimple_call (stmt)
&& !stmt_ends_bb_p (stmt))
if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
{
/* Non-looping const functions always return normally.
Otherwise the call might not return or have side-effects
@ -3998,8 +3984,7 @@ compute_avail (void)
bitmap_value_insert_into_set (AVAIL_OUT (block), e);
}
if (gimple_has_volatile_ops (stmt)
|| stmt_could_throw_p (stmt))
if (gimple_has_side_effects (stmt) || stmt_could_throw_p (stmt))
continue;
switch (gimple_code (stmt))
@ -4017,7 +4002,8 @@ compute_avail (void)
pre_expr result = NULL;
VEC(vn_reference_op_s, heap) *ops = NULL;
if (!can_value_number_call (stmt))
/* We can value number only calls to real functions. */
if (gimple_call_internal_p (stmt))
continue;
copy_reference_ops_from_call (stmt, &ops);