diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 81e5b4fdee4..19049bf3f52 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,9 @@ +2013-11-08 Richard Biener + + PR tree-optimization/59047 + * tree-predcom.c (ref_at_iteration): Handle bitfield accesses + properly. + 2013-11-08 Ilya Enkovich * common.opt (fcheck-pointer-bounds): Move to ... diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog index e83d64e4dbd..4fc3066b528 100644 --- a/gcc/testsuite/ChangeLog +++ b/gcc/testsuite/ChangeLog @@ -1,3 +1,14 @@ +2013-11-08 Richard Biener + + PR tree-optimization/59047 + * gcc.dg/torture/pr59047.c: New testcase. + +2013-11-08 Richard Biener + + PR tree-optimization/59038 + PR tree-optimization/58955 + * gcc.dg/torture/pr59038.c: New testcase. + 2013-11-07 Janus Weil PR fortran/58471 diff --git a/gcc/testsuite/gcc.dg/torture/pr59047.c b/gcc/testsuite/gcc.dg/torture/pr59047.c new file mode 100644 index 00000000000..fcedfcba870 --- /dev/null +++ b/gcc/testsuite/gcc.dg/torture/pr59047.c @@ -0,0 +1,39 @@ +/* { dg-do run } */ + +extern void abort (void); + +struct +{ + int f0; + int f1:1; + int f2:2; +} a = {0, 0, 1}; + +int b, c, *d, e, f; + +int +fn1 () +{ + for (; b < 1; ++b) + { + for (e = 0; e < 1; e = 1) + { + int **g = &d; + *g = &c; + } + *d = 0; + f = a.f1; + if (f) + return 0; + } + return 0; +} + +int +main () +{ + fn1 (); + if (b != 1) + abort (); + return 0; +} diff --git a/gcc/tree-predcom.c b/gcc/tree-predcom.c index 3358f8b9331..6084cf62ef6 100644 --- a/gcc/tree-predcom.c +++ b/gcc/tree-predcom.c @@ -1353,10 +1353,24 @@ ref_at_iteration (data_reference_p dr, int iter, gimple_seq *stmts) tree addr = fold_build_pointer_plus (DR_BASE_ADDRESS (dr), off); addr = force_gimple_operand_1 (addr, stmts, is_gimple_mem_ref_addr, NULL_TREE); - return fold_build2 (MEM_REF, TREE_TYPE (DR_REF (dr)), - addr, - fold_convert (reference_alias_ptr_type (DR_REF (dr)), - coff)); + tree alias_ptr = fold_convert (reference_alias_ptr_type (DR_REF (dr)), coff); + /* While data-ref analysis punts on bit offsets it still handles + bitfield accesses at byte boundaries. Cope with that. Note that + we cannot simply re-apply the outer COMPONENT_REF because the + byte-granular portion of it is already applied via DR_INIT and + DR_OFFSET, so simply build a BIT_FIELD_REF knowing that the bits + start at offset zero. */ + if (TREE_CODE (DR_REF (dr)) == COMPONENT_REF + && DECL_BIT_FIELD (TREE_OPERAND (DR_REF (dr), 1))) + { + tree field = TREE_OPERAND (DR_REF (dr), 1); + return build3 (BIT_FIELD_REF, TREE_TYPE (DR_REF (dr)), + build2 (MEM_REF, DECL_BIT_FIELD_TYPE (field), + addr, alias_ptr), + DECL_SIZE (field), bitsize_zero_node); + } + else + return fold_build2 (MEM_REF, TREE_TYPE (DR_REF (dr)), addr, alias_ptr); } /* Get the initialization expression for the INDEX-th temporary variable