diff --git a/gcc/ChangeLog b/gcc/ChangeLog index e799c942b5b..10a3626dedb 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,10 @@ +2010-01-22 Richard Henderson + + PR tree-opt/42833 + * tree-sra.c (sra_modify_assign): Delay re-gimplification of + the RHS until after generate_subtree_copies has insertted its + code before the current statement. + 2010-01-22 Joern Rennecke * doc/tm.texi (TARGET_MIN_DIVISIONS_FOR_RECIP_MUL): Fix return type. diff --git a/gcc/testsuite/gcc.c-torture/execute/pr42833.c b/gcc/testsuite/gcc.c-torture/execute/pr42833.c new file mode 100644 index 00000000000..5494f98686e --- /dev/null +++ b/gcc/testsuite/gcc.c-torture/execute/pr42833.c @@ -0,0 +1,171 @@ +typedef __INT_LEAST8_TYPE__ int8_t; +typedef __UINT_LEAST32_TYPE__ uint32_t; +typedef int ssize_t; +typedef struct { int8_t v1; int8_t v2; int8_t v3; int8_t v4; } neon_s8; + +uint32_t helper_neon_rshl_s8 (uint32_t arg1, uint32_t arg2); + +uint32_t +helper_neon_rshl_s8 (uint32_t arg1, uint32_t arg2) +{ + uint32_t res; + neon_s8 vsrc1; + neon_s8 vsrc2; + neon_s8 vdest; + do + { + union + { + neon_s8 v; + uint32_t i; + } conv_u; + conv_u.i = (arg1); + vsrc1 = conv_u.v; + } + while (0); + do + { + union + { + neon_s8 v; + uint32_t i; + } conv_u; + conv_u.i = (arg2); + vsrc2 = conv_u.v; + } + while (0); + do + { + int8_t tmp; + tmp = (int8_t) vsrc2.v1; + if (tmp >= (ssize_t) sizeof (vsrc1.v1) * 8) + { + vdest.v1 = 0; + } + else if (tmp < -(ssize_t) sizeof (vsrc1.v1) * 8) + { + vdest.v1 = vsrc1.v1 >> (sizeof (vsrc1.v1) * 8 - 1); + } + else if (tmp == -(ssize_t) sizeof (vsrc1.v1) * 8) + { + vdest.v1 = vsrc1.v1 >> (tmp - 1); + vdest.v1++; + vdest.v1 >>= 1; + } + else if (tmp < 0) + { + vdest.v1 = (vsrc1.v1 + (1 << (-1 - tmp))) >> -tmp; + } + else + { + vdest.v1 = vsrc1.v1 << tmp; + } + } + while (0); + do + { + int8_t tmp; + tmp = (int8_t) vsrc2.v2; + if (tmp >= (ssize_t) sizeof (vsrc1.v2) * 8) + { + vdest.v2 = 0; + } + else if (tmp < -(ssize_t) sizeof (vsrc1.v2) * 8) + { + vdest.v2 = vsrc1.v2 >> (sizeof (vsrc1.v2) * 8 - 1); + } + else if (tmp == -(ssize_t) sizeof (vsrc1.v2) * 8) + { + vdest.v2 = vsrc1.v2 >> (tmp - 1); + vdest.v2++; + vdest.v2 >>= 1; + } + else if (tmp < 0) + { + vdest.v2 = (vsrc1.v2 + (1 << (-1 - tmp))) >> -tmp; + } + else + { + vdest.v2 = vsrc1.v2 << tmp; + } + } + while (0); + do + { + int8_t tmp; + tmp = (int8_t) vsrc2.v3; + if (tmp >= (ssize_t) sizeof (vsrc1.v3) * 8) + { + vdest.v3 = 0; + } + else if (tmp < -(ssize_t) sizeof (vsrc1.v3) * 8) + { + vdest.v3 = vsrc1.v3 >> (sizeof (vsrc1.v3) * 8 - 1); + } + else if (tmp == -(ssize_t) sizeof (vsrc1.v3) * 8) + { + vdest.v3 = vsrc1.v3 >> (tmp - 1); + vdest.v3++; + vdest.v3 >>= 1; + } + else if (tmp < 0) + { + vdest.v3 = (vsrc1.v3 + (1 << (-1 - tmp))) >> -tmp; + } + else + { + vdest.v3 = vsrc1.v3 << tmp; + } + } + while (0); + do + { + int8_t tmp; + tmp = (int8_t) vsrc2.v4; + if (tmp >= (ssize_t) sizeof (vsrc1.v4) * 8) + { + vdest.v4 = 0; + } + else if (tmp < -(ssize_t) sizeof (vsrc1.v4) * 8) + { + vdest.v4 = vsrc1.v4 >> (sizeof (vsrc1.v4) * 8 - 1); + } + else if (tmp == -(ssize_t) sizeof (vsrc1.v4) * 8) + { + vdest.v4 = vsrc1.v4 >> (tmp - 1); + vdest.v4++; + vdest.v4 >>= 1; + } + else if (tmp < 0) + { + vdest.v4 = (vsrc1.v4 + (1 << (-1 - tmp))) >> -tmp; + } + else + { + vdest.v4 = vsrc1.v4 << tmp; + } + } + while (0);; + do + { + union + { + neon_s8 v; + uint32_t i; + } conv_u; + conv_u.v = (vdest); + res = conv_u.i; + } + while (0); + return res; +} + +extern void abort(void); + +int main() +{ + uint32_t r = helper_neon_rshl_s8 (0x05050505, 0x01010101); + if (r != 0x0a0a0a0a) + abort (); + return 0; +} diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c index 07658bddae9..79c5c76c5e1 100644 --- a/gcc/tree-sra.c +++ b/gcc/tree-sra.c @@ -2533,6 +2533,7 @@ sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi, bool modify_this_stmt = false; bool force_gimple_rhs = false; location_t loc = gimple_location (*stmt); + gimple_stmt_iterator orig_gsi = *gsi; if (!gimple_assign_single_p (*stmt)) return SRA_SA_NONE; @@ -2611,15 +2612,6 @@ sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi, force_gimple_rhs = true; } } - - if (force_gimple_rhs) - rhs = force_gimple_operand_gsi (gsi, rhs, true, NULL_TREE, - true, GSI_SAME_STMT); - if (gimple_assign_rhs1 (*stmt) != rhs) - { - gimple_assign_set_rhs_from_tree (gsi, rhs); - gcc_assert (*stmt == gsi_stmt (*gsi)); - } } /* From this point on, the function deals with assignments in between @@ -2721,6 +2713,18 @@ sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi, 0, 0, gsi, true, true); } } + + /* This gimplification must be done after generate_subtree_copies, lest we + insert the subtree copies in the middle of the gimplified sequence. */ + if (force_gimple_rhs) + rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE, + true, GSI_SAME_STMT); + if (gimple_assign_rhs1 (*stmt) != rhs) + { + gimple_assign_set_rhs_from_tree (&orig_gsi, rhs); + gcc_assert (*stmt == gsi_stmt (orig_gsi)); + } + return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE; }