Avoid signed left shift undefined behaviour in sext_hwi
gcc/ PR other/67042 * hwint.h (sext_hwi): Switch to unsigned for the left shift, and conditionalize the whole on __GNUC__. Add fallback code depending neither on undefined nor implementation-defined behaviour. From-SVN: r227008
This commit is contained in:
parent
dce573c43f
commit
d63c864834
@ -1,9 +1,16 @@
|
|||||||
|
2015-08-19 Mikael Morin <mikael@gcc.gnu.org>
|
||||||
|
|
||||||
|
PR other/67042
|
||||||
|
* hwint.h (sext_hwi): Switch to unsigned for the left shift, and
|
||||||
|
conditionalize the whole on __GNUC__. Add fallback code
|
||||||
|
depending neither on undefined nor implementation-defined behaviour.
|
||||||
|
|
||||||
2015-08-19 Jiong Wang <jiong.wang@arm.com>
|
2015-08-19 Jiong Wang <jiong.wang@arm.com>
|
||||||
|
|
||||||
* config/aarch64/aarch64.c (aarch64_load_symref_appropriately): Replace
|
* config/aarch64/aarch64.c (aarch64_load_symref_appropriately): Replace
|
||||||
whitespaces with tab.
|
whitespaces with tab.
|
||||||
|
|
||||||
2015-08-13 Florian Weimer <fweimer@redhat.com>
|
2015-08-19 Florian Weimer <fweimer@redhat.com>
|
||||||
|
|
||||||
* prj.adb (For_Every_Project_Imported_Context.Recursive_Check_Context):
|
* prj.adb (For_Every_Project_Imported_Context.Recursive_Check_Context):
|
||||||
Move Name_Id_Set instantiation to the Prj package, to avoid trampolines.
|
Move Name_Id_Set instantiation to the Prj package, to avoid trampolines.
|
||||||
|
18
gcc/hwint.h
18
gcc/hwint.h
@ -244,11 +244,27 @@ sext_hwi (HOST_WIDE_INT src, unsigned int prec)
|
|||||||
if (prec == HOST_BITS_PER_WIDE_INT)
|
if (prec == HOST_BITS_PER_WIDE_INT)
|
||||||
return src;
|
return src;
|
||||||
else
|
else
|
||||||
|
#if defined (__GNUC__)
|
||||||
{
|
{
|
||||||
|
/* Take the faster path if the implementation-defined bits it's relying
|
||||||
|
on are implemented the way we expect them to be. Namely, conversion
|
||||||
|
from unsigned to signed preserves bit pattern, and right shift of
|
||||||
|
a signed value propagates the sign bit.
|
||||||
|
We have to convert from signed to unsigned and back, because when left
|
||||||
|
shifting signed values, any overflow is undefined behaviour. */
|
||||||
gcc_checking_assert (prec < HOST_BITS_PER_WIDE_INT);
|
gcc_checking_assert (prec < HOST_BITS_PER_WIDE_INT);
|
||||||
int shift = HOST_BITS_PER_WIDE_INT - prec;
|
int shift = HOST_BITS_PER_WIDE_INT - prec;
|
||||||
return (src << shift) >> shift;
|
return ((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) src << shift)) >> shift;
|
||||||
}
|
}
|
||||||
|
#else
|
||||||
|
{
|
||||||
|
/* Fall back to the slower, well defined path otherwise. */
|
||||||
|
gcc_checking_assert (prec < HOST_BITS_PER_WIDE_INT);
|
||||||
|
HOST_WIDE_INT sign_mask = HOST_WIDE_INT_1 << (prec - 1);
|
||||||
|
HOST_WIDE_INT value_mask = (HOST_WIDE_INT_1U << prec) - HOST_WIDE_INT_1U;
|
||||||
|
return (((src & value_mask) ^ sign_mask) - sign_mask);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Zero extend SRC starting from PREC. */
|
/* Zero extend SRC starting from PREC. */
|
||||||
|
Loading…
Reference in New Issue
Block a user