diff --git a/gcc/ChangeLog b/gcc/ChangeLog index d5f8559c1ac..47a62ad29cf 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,22 @@ +2007-04-06 Zdenek Dvorak + + * tree-ssa-loop-niter.c (idx_infer_loop_bounds): Add and use + argument "reliable". + (infer_loop_bounds_from_ref, infer_loop_bounds_from_array): + Add argument "reliable". Propagate it through calls. + (infer_loop_bounds_from_undefined): Derive number of iterations + estimates from references in blocks that do not dominate loop latch. + (gcov_type_to_double_int): New function. + (estimate_numbers_of_iterations_loop): Use gcov_type_to_double_int + and expected_loop_iterations_unbounded. + * cfgloopanal.c (expected_loop_iterations_unbounded): New function. + (expected_loop_iterations): Use expected_loop_iterations_unbounded. + * tree-data-ref.c (estimated_loop_iterations): Export. + (get_references_in_stmt): Fix -- do not return addresses of local + objects. + * cfgloop.h (expected_loop_iterations_unbounded, + estimated_loop_iterations): Declare. + 2007-04-06 Andreas Tobler * tree-sra.c (sra_build_elt_assignment): Initialize min/maxshift. diff --git a/gcc/cfgloop.h b/gcc/cfgloop.h index c5c1cf25b42..cbad81d27f0 100644 --- a/gcc/cfgloop.h +++ b/gcc/cfgloop.h @@ -262,11 +262,13 @@ extern void verify_loop_structure (void); /* Loop analysis. */ extern bool just_once_each_iteration_p (const struct loop *, basic_block); +gcov_type expected_loop_iterations_unbounded (const struct loop *); extern unsigned expected_loop_iterations (const struct loop *); extern rtx doloop_condition_get (rtx); void estimate_numbers_of_iterations_loop (struct loop *); HOST_WIDE_INT estimated_loop_iterations_int (struct loop *, bool); +bool estimated_loop_iterations (struct loop *, bool, double_int *); /* Loop manipulation. */ extern bool can_duplicate_loop_p (struct loop *loop); diff --git a/gcc/cfgloopanal.c b/gcc/cfgloopanal.c index 5a9189787ea..4612ab8f1a1 100644 --- a/gcc/cfgloopanal.c +++ b/gcc/cfgloopanal.c @@ -423,11 +423,12 @@ average_num_loop_insns (struct loop *loop) return ninsns; } -/* Returns expected number of LOOP iterations. - Compute upper bound on number of iterations in case they do not fit integer - to help loop peeling heuristics. Use exact counts if at all possible. */ -unsigned -expected_loop_iterations (const struct loop *loop) +/* Returns expected number of iterations of LOOP, according to + measured or guessed profile. No bounding is done on the + value. */ + +gcov_type +expected_loop_iterations_unbounded (const struct loop *loop) { edge e; edge_iterator ei; @@ -450,8 +451,7 @@ expected_loop_iterations (const struct loop *loop) else expected = (count_latch + count_in - 1) / count_in; - /* Avoid overflows. */ - return (expected > REG_BR_PROB_BASE ? REG_BR_PROB_BASE : expected); + return expected; } else { @@ -473,6 +473,16 @@ expected_loop_iterations (const struct loop *loop) } } +/* Returns expected number of LOOP iterations. The returned value is bounded + by REG_BR_PROB_BASE. */ + +unsigned +expected_loop_iterations (const struct loop *loop) +{ + gcov_type expected = expected_loop_iterations_unbounded (loop); + return (expected > REG_BR_PROB_BASE ? REG_BR_PROB_BASE : expected); +} + /* Returns the maximum level of nesting of subloops of LOOP. */ unsigned diff --git a/gcc/tree-data-ref.c b/gcc/tree-data-ref.c index d8a291d6b12..3f24c6a5147 100644 --- a/gcc/tree-data-ref.c +++ b/gcc/tree-data-ref.c @@ -2553,7 +2553,7 @@ analyze_ziv_subscript (tree chrec_a, large as the number of iterations. If we have no reliable estimate, the function returns false, otherwise returns true. */ -static bool +bool estimated_loop_iterations (struct loop *loop, bool conservative, double_int *nit) { @@ -4873,8 +4873,7 @@ get_references_in_stmt (tree stmt, VEC (data_ref_loc, heap) **references) { bool clobbers_memory = false; data_ref_loc *ref; - tree *op0, *op1, arg, call; - call_expr_arg_iterator iter; + tree *op0, *op1, call; *references = NULL; @@ -4915,9 +4914,12 @@ get_references_in_stmt (tree stmt, VEC (data_ref_loc, heap) **references) if (call) { - FOR_EACH_CALL_EXPR_ARG (arg, iter, call) + unsigned i, n = call_expr_nargs (call); + + for (i = 0; i < n; i++) { - op0 = &arg; + op0 = &CALL_EXPR_ARG (call, i); + if (DECL_P (*op0) || REFERENCE_CLASS_P (*op0)) { diff --git a/gcc/tree-ssa-loop-niter.c b/gcc/tree-ssa-loop-niter.c index 909f5fcd30a..362be0699d3 100644 --- a/gcc/tree-ssa-loop-niter.c +++ b/gcc/tree-ssa-loop-niter.c @@ -2581,12 +2581,15 @@ array_at_struct_end_p (tree ref) } /* Determine information about number of iterations a LOOP from the index - IDX of a data reference accessed in STMT. Callback for for_each_index. */ + IDX of a data reference accessed in STMT. RELIABLE is true if STMT is + guaranteed to be executed in every iteration of LOOP. Callback for + for_each_index. */ struct ilb_data { struct loop *loop; tree stmt; + bool reliable; }; static bool @@ -2595,7 +2598,7 @@ idx_infer_loop_bounds (tree base, tree *idx, void *dta) struct ilb_data *data = dta; tree ev, init, step; tree low, high, type, next; - bool sign, upper = true; + bool sign, upper = data->reliable, at_end = false; struct loop *loop = data->loop; if (TREE_CODE (base) != ARRAY_REF) @@ -2605,7 +2608,10 @@ idx_infer_loop_bounds (tree base, tree *idx, void *dta) do not really extend over their declared size. However, for arrays of size greater than one, this is unlikely to be intended. */ if (array_at_struct_end_p (base)) - upper = false; + { + at_end = true; + upper = false; + } ev = instantiate_parameters (loop, analyze_scalar_evolution (loop, *idx)); init = initial_condition (ev); @@ -2633,7 +2639,7 @@ idx_infer_loop_bounds (tree base, tree *idx, void *dta) /* The array of length 1 at the end of a structure most likely extends beyond its bounds. */ - if (!upper + if (at_end && operand_equal_p (low, high, 0)) return true; @@ -2665,23 +2671,27 @@ idx_infer_loop_bounds (tree base, tree *idx, void *dta) } /* Determine information about number of iterations a LOOP from the bounds - of arrays in the data reference REF accessed in STMT. */ + of arrays in the data reference REF accessed in STMT. RELIABLE is true if + STMT is guaranteed to be executed in every iteration of LOOP.*/ static void -infer_loop_bounds_from_ref (struct loop *loop, tree stmt, tree ref) +infer_loop_bounds_from_ref (struct loop *loop, tree stmt, tree ref, + bool reliable) { struct ilb_data data; data.loop = loop; data.stmt = stmt; + data.reliable = reliable; for_each_index (&ref, idx_infer_loop_bounds, &data); } /* Determine information about number of iterations of a LOOP from the way - arrays are used in STMT. */ + arrays are used in STMT. RELIABLE is true if STMT is guaranteed to be + executed in every iteration of LOOP. */ static void -infer_loop_bounds_from_array (struct loop *loop, tree stmt) +infer_loop_bounds_from_array (struct loop *loop, tree stmt, bool reliable) { tree call; @@ -2693,10 +2703,10 @@ infer_loop_bounds_from_array (struct loop *loop, tree stmt) /* For each memory access, analyze its access function and record a bound on the loop iteration domain. */ if (REFERENCE_CLASS_P (op0)) - infer_loop_bounds_from_ref (loop, stmt, op0); + infer_loop_bounds_from_ref (loop, stmt, op0, reliable); if (REFERENCE_CLASS_P (op1)) - infer_loop_bounds_from_ref (loop, stmt, op1); + infer_loop_bounds_from_ref (loop, stmt, op1, reliable); } @@ -2708,7 +2718,7 @@ infer_loop_bounds_from_array (struct loop *loop, tree stmt) FOR_EACH_CALL_EXPR_ARG (arg, iter, call) if (REFERENCE_CLASS_P (arg)) - infer_loop_bounds_from_ref (loop, stmt, arg); + infer_loop_bounds_from_ref (loop, stmt, arg, reliable); } } @@ -2768,6 +2778,7 @@ infer_loop_bounds_from_undefined (struct loop *loop) basic_block *bbs; block_stmt_iterator bsi; basic_block bb; + bool reliable; bbs = get_loop_body (loop); @@ -2776,16 +2787,18 @@ infer_loop_bounds_from_undefined (struct loop *loop) bb = bbs[i]; /* If BB is not executed in each iteration of the loop, we cannot - use it to infer any information about # of iterations of the loop. */ - if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb)) - continue; + use the operations in it to infer reliable upper bound on the + # of iterations of the loop. However, we can use it as a guess. */ + reliable = dominated_by_p (CDI_DOMINATORS, loop->latch, bb); for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) { tree stmt = bsi_stmt (bsi); - infer_loop_bounds_from_array (loop, stmt); - infer_loop_bounds_from_signedness (loop, stmt); + infer_loop_bounds_from_array (loop, stmt, reliable); + + if (reliable) + infer_loop_bounds_from_signedness (loop, stmt); } } @@ -2793,6 +2806,23 @@ infer_loop_bounds_from_undefined (struct loop *loop) free (bbs); } +/* Converts VAL to double_int. */ + +static double_int +gcov_type_to_double_int (gcov_type val) +{ + double_int ret; + + ret.low = (unsigned HOST_WIDE_INT) val; + /* If HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_WIDEST_INT, avoid shifting by + the size of type. */ + val >>= HOST_BITS_PER_WIDE_INT - 1; + val >>= 1; + ret.high = (unsigned HOST_WIDE_INT) val; + + return ret; +} + /* Records estimates on numbers of iterations of LOOP. */ void @@ -2836,7 +2866,8 @@ estimate_numbers_of_iterations_loop (struct loop *loop) iterations. */ if (loop->header->count != 0) { - bound = uhwi_to_double_int (expected_loop_iterations (loop) + 1); + gcov_type nit = expected_loop_iterations_unbounded (loop) + 1; + bound = gcov_type_to_double_int (nit); record_niter_bound (loop, bound, true, false); }