Fix volatile issues in optimize_bit_field_compare.
2013-10-21 Bernd Edlinger <bernd.edlinger@hotmail.de> Fix volatile issues in optimize_bit_field_compare. * fold-const.c (optimize_bit_field_compare): Bail out if lvolatilep or rvolatilep. From-SVN: r203899
This commit is contained in:
parent
5dfd32cdda
commit
0cad6830a4
|
@ -1,3 +1,9 @@
|
||||||
|
2013-10-21 Bernd Edlinger <bernd.edlinger@hotmail.de>
|
||||||
|
|
||||||
|
Fix volatile issues in optimize_bit_field_compare.
|
||||||
|
* fold-const.c (optimize_bit_field_compare): Bail out if
|
||||||
|
lvolatilep or rvolatilep.
|
||||||
|
|
||||||
2013-10-21 Bernd Edlinger <bernd.edlinger@hotmail.de>
|
2013-10-21 Bernd Edlinger <bernd.edlinger@hotmail.de>
|
||||||
|
|
||||||
Fix DECL_BIT_FIELD depencency on flag_strict_volatile_bitfields
|
Fix DECL_BIT_FIELD depencency on flag_strict_volatile_bitfields
|
||||||
|
|
|
@ -3473,11 +3473,6 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
|
||||||
tree mask;
|
tree mask;
|
||||||
tree offset;
|
tree offset;
|
||||||
|
|
||||||
/* In the strict volatile bitfields case, doing code changes here may prevent
|
|
||||||
other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
|
|
||||||
if (flag_strict_volatile_bitfields > 0)
|
|
||||||
return 0;
|
|
||||||
|
|
||||||
/* Get all the information about the extractions being done. If the bit size
|
/* Get all the information about the extractions being done. If the bit size
|
||||||
if the same as the size of the underlying object, we aren't doing an
|
if the same as the size of the underlying object, we aren't doing an
|
||||||
extraction at all and so can do nothing. We also don't want to
|
extraction at all and so can do nothing. We also don't want to
|
||||||
|
@ -3486,7 +3481,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
|
||||||
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
|
linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
|
||||||
&lunsignedp, &lvolatilep, false);
|
&lunsignedp, &lvolatilep, false);
|
||||||
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
|
if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
|
||||||
|| offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
|
|| offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
if (!const_p)
|
if (!const_p)
|
||||||
|
@ -3498,22 +3493,17 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
|
||||||
|
|
||||||
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|
if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
|
||||||
|| lunsignedp != runsignedp || offset != 0
|
|| lunsignedp != runsignedp || offset != 0
|
||||||
|| TREE_CODE (rinner) == PLACEHOLDER_EXPR)
|
|| TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* See if we can find a mode to refer to this field. We should be able to,
|
/* See if we can find a mode to refer to this field. We should be able to,
|
||||||
but fail if we can't. */
|
but fail if we can't. */
|
||||||
if (lvolatilep
|
nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
|
||||||
&& GET_MODE_BITSIZE (lmode) > 0
|
const_p ? TYPE_ALIGN (TREE_TYPE (linner))
|
||||||
&& flag_strict_volatile_bitfields > 0)
|
: MIN (TYPE_ALIGN (TREE_TYPE (linner)),
|
||||||
nmode = lmode;
|
TYPE_ALIGN (TREE_TYPE (rinner))),
|
||||||
else
|
word_mode, false);
|
||||||
nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
|
|
||||||
const_p ? TYPE_ALIGN (TREE_TYPE (linner))
|
|
||||||
: MIN (TYPE_ALIGN (TREE_TYPE (linner)),
|
|
||||||
TYPE_ALIGN (TREE_TYPE (rinner))),
|
|
||||||
word_mode, lvolatilep || rvolatilep);
|
|
||||||
if (nmode == VOIDmode)
|
if (nmode == VOIDmode)
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
|
@ -3602,11 +3592,6 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
|
||||||
appropriate number of bits and mask it with the computed mask
|
appropriate number of bits and mask it with the computed mask
|
||||||
(in case this was a signed field). If we changed it, make a new one. */
|
(in case this was a signed field). If we changed it, make a new one. */
|
||||||
lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
|
lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
|
||||||
if (lvolatilep)
|
|
||||||
{
|
|
||||||
TREE_SIDE_EFFECTS (lhs) = 1;
|
|
||||||
TREE_THIS_VOLATILE (lhs) = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
rhs = const_binop (BIT_AND_EXPR,
|
rhs = const_binop (BIT_AND_EXPR,
|
||||||
const_binop (LSHIFT_EXPR,
|
const_binop (LSHIFT_EXPR,
|
||||||
|
|
Loading…
Reference in New Issue