Makefile.in (value-prof.o): New dependencies on $(DIAGNOSTIC_H) $(TREE_H) and $(COVERAGE_H).

* Makefile.in (value-prof.o): New dependencies on $(DIAGNOSTIC_H)
	$(TREE_H) and $(COVERAGE_H).
	* coverage.c (compute_checksum): Use DECL_NAME not DECL_ASSEMBLER_NAME.
	* opts.c (common_handle_option): Enable tree-based value transforms.
	* toplev.c (process_options): Ditto.
	* value-prof.h (struct histogram_value_t): Redefine. "Adjust" below
	refers to references to this type.
	* tree-flow.h: (struct stmt_ann_d): Add histograms field.
	* rtl-profile.c (rtl_gen_interval_profiler): Adjust. Remove checks
	for may_be_more, may_be_less.
	(rtl_gen_pow2_profiler): Adjust.
	(rtl_gen_one_value_profiler_no_edge_manip): Adjust.
	(rtl_gen_one_value_profiler): Adjust.
	(rtl_gen_const_delta_profiler): Adjust.
	* tree-profile.c (tree_gen_interval_profiler): Implement.
	(tree_gen_pow2_profiler): Ditto.
	(tree_gen_one_value_profiler): Ditto.
	(tree_profiling): New.
	(pass_tree_profile): Reference it.
	* value-prof.c: Include tree-flow.h, tree-flow-inline.h, diagnostic.h,
	tree.h, gcov-io.h.
	(insn_divmod_values_to_profile): Rename to
	rtl_divmod_values_to_profile. Adjust.
	(insn_values_to_profile): Rename to rtl_values_to_profile. Adjust.
	(insn_prefetch_values_to_profile): Adjust.
	(rtl_value_profile_transformations): Adjust.
	(gen_divmod_fixed_value): Rename to rtl_divmod_fixed_value.
	(gen_mod_pow2): Rename to rtl_mod_pow2.
	(gen_mod_subtract): Rename to rtl_mod_subtract.
	(divmod_fixed_value_transform): Rename to
	rtl_divmod_fixed_value_transform.
	(mod_pow2_value_transform): Rename to rtl_mod_pow2_value_transform.
	(mod_subtract_transform): Rename to rtl_mod_subtract_transform.
	(rtl_find_values_to_profile): Adjust.
	(tree_value_profile_transformations): Implement.
	(tree_divmod_values_to_profile): New.
	(tree_values_to_profile): New.
	(tree_divmod_fixed_value): New.
	(tree_mod_pow2): New.
	(tree_mod_subtract): New.
	(tree_divmod_fixed_value_transform): New.
	(tree_mod_pow2_value_transform): New.
	(tree_mod_subtract_transform): New.
	(tree_find_values_to_profile): Implement.
	* profile.c (instrument_values):  Free histograms.
	(compute_value_histograms): Adjust. Implement tree version.

From-SVN: r97156
This commit is contained in:
Dale Johannesen 2005-03-29 11:45:51 +00:00 committed by Jan Hubicka
parent 82498ed4b9
commit 1f1e85278a
9 changed files with 1435 additions and 232 deletions

View File

@ -1,3 +1,52 @@
2005-03-29 Dale Johannesen <dalej@apple.com>
* Makefile.in (value-prof.o): New dependencies on $(DIAGNOSTIC_H)
$(TREE_H) and $(COVERAGE_H).
* coverage.c (compute_checksum): Use DECL_NAME not DECL_ASSEMBLER_NAME.
* opts.c (common_handle_option): Enable tree-based value transforms.
* toplev.c (process_options): Ditto.
* value-prof.h (struct histogram_value_t): Redefine. "Adjust" below
refers to references to this type.
* tree-flow.h: (struct stmt_ann_d): Add histograms field.
* rtl-profile.c (rtl_gen_interval_profiler): Adjust. Remove checks
for may_be_more, may_be_less.
(rtl_gen_pow2_profiler): Adjust.
(rtl_gen_one_value_profiler_no_edge_manip): Adjust.
(rtl_gen_one_value_profiler): Adjust.
(rtl_gen_const_delta_profiler): Adjust.
* tree-profile.c (tree_gen_interval_profiler): Implement.
(tree_gen_pow2_profiler): Ditto.
(tree_gen_one_value_profiler): Ditto.
(tree_profiling): New.
(pass_tree_profile): Reference it.
* value-prof.c: Include tree-flow.h, tree-flow-inline.h, diagnostic.h,
tree.h, gcov-io.h.
(insn_divmod_values_to_profile): Rename to
rtl_divmod_values_to_profile. Adjust.
(insn_values_to_profile): Rename to rtl_values_to_profile. Adjust.
(insn_prefetch_values_to_profile): Adjust.
(rtl_value_profile_transformations): Adjust.
(gen_divmod_fixed_value): Rename to rtl_divmod_fixed_value.
(gen_mod_pow2): Rename to rtl_mod_pow2.
(gen_mod_subtract): Rename to rtl_mod_subtract.
(divmod_fixed_value_transform): Rename to
rtl_divmod_fixed_value_transform.
(mod_pow2_value_transform): Rename to rtl_mod_pow2_value_transform.
(mod_subtract_transform): Rename to rtl_mod_subtract_transform.
(rtl_find_values_to_profile): Adjust.
(tree_value_profile_transformations): Implement.
(tree_divmod_values_to_profile): New.
(tree_values_to_profile): New.
(tree_divmod_fixed_value): New.
(tree_mod_pow2): New.
(tree_mod_subtract): New.
(tree_divmod_fixed_value_transform): New.
(tree_mod_pow2_value_transform): New.
(tree_mod_subtract_transform): New.
(tree_find_values_to_profile): Implement.
* profile.c (instrument_values): Free histograms.
(compute_value_histograms): Adjust. Implement tree version.
2005-03-29 Uros Bizjak <uros@kss-loka.si>
* reg-stack.c (subst_stack_regs_pat): Handle <UNSPEC_FIST> case.

View File

@ -2023,7 +2023,8 @@ rtl-profile.o : tree-profile.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
toplev.h $(BASIC_BLOCK_H) $(COVERAGE_H) $(TREE_FLOW_H) value-prof.h $(GGC_H)
value-prof.o : value-prof.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(BASIC_BLOCK_H) hard-reg-set.h value-prof.h $(EXPR_H) output.h $(FLAGS_H) \
$(RECOG_H) insn-config.h $(OPTABS_H) $(REGS_H) $(GGC_H)
$(RECOG_H) insn-config.h $(OPTABS_H) $(REGS_H) $(GGC_H) $(DIAGNOSTIC_H) \
$(TREE_H) $(COVERAGE_H)
loop.o : loop.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(FLAGS_H) \
insn-config.h $(REGS_H) hard-reg-set.h $(RECOG_H) $(EXPR_H) \
real.h $(PREDICT_H) $(BASIC_BLOCK_H) function.h $(CFGLOOP_H) \

View File

@ -224,6 +224,7 @@ instrument_values (histogram_values values)
abort ();
}
}
VEC_free (histogram_value, values);
}
@ -641,7 +642,7 @@ compute_branch_probabilities (void)
}
/* Load value histograms values whose description is stored in VALUES array
from .da file. */
from .gcda file. */
static void
compute_value_histograms (histogram_values values)
@ -688,21 +689,32 @@ compute_value_histograms (histogram_values values)
hist = VEC_index (histogram_value, values, i);
t = (int) hist->type;
/* FIXME: make this work for trees. */
aact_count = act_count[t];
act_count[t] += hist->n_counters;
if (!ir_type ())
{
aact_count = act_count[t];
act_count[t] += hist->n_counters;
for (j = hist->n_counters; j > 0; j--)
hist_list = alloc_EXPR_LIST (0, GEN_INT (aact_count[j - 1]),
hist_list);
hist_list = alloc_EXPR_LIST (0,
copy_rtx ((rtx) hist->value), hist_list);
hist_list = alloc_EXPR_LIST (0,
copy_rtx (hist->hvalue.rtl.value), hist_list);
hist_list = alloc_EXPR_LIST (0, GEN_INT (hist->type), hist_list);
REG_NOTES ((rtx) hist->insn) =
alloc_EXPR_LIST (REG_VALUE_PROFILE, hist_list,
REG_NOTES ((rtx) hist->insn));
REG_NOTES (hist->hvalue.rtl.insn) =
alloc_EXPR_LIST (REG_VALUE_PROFILE, hist_list,
REG_NOTES (hist->hvalue.rtl.insn));
}
else
{
tree stmt = hist->hvalue.tree.stmt;
stmt_ann_t ann = get_stmt_ann (stmt);
hist->hvalue.tree.next = ann->histograms;
ann->histograms = hist;
hist->hvalue.tree.counters =
xmalloc (sizeof (gcov_type) * hist->n_counters);
for (j = 0; j < hist->n_counters; j++)
hist->hvalue.tree.counters[j] = aact_count[j];
}
}
for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)

View File

@ -23,30 +23,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
02111-1307, USA. */
/* Generate basic block profile instrumentation and auxiliary files.
Profile generation is optimized, so that not all arcs in the basic
block graph need instrumenting. First, the BB graph is closed with
one entry (function start), and one exit (function exit). Any
ABNORMAL_EDGE cannot be instrumented (because there is no control
path to place the code). We close the graph by inserting fake
EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
edges that do not go to the exit_block. We ignore such abnormal
edges. Naturally these fake edges are never directly traversed,
and so *cannot* be directly instrumented. Some other graph
massaging is done. To optimize the instrumentation we generate the
BB minimal span tree, only edges that are not on the span tree
(plus the entry point) need instrumenting. From that information
all other edge counts can be deduced. By construction all fake
edges must be on the spanning tree. We also attempt to place
EDGE_CRITICAL edges on the spanning tree.
The auxiliary file generated is <dumpbase>.bbg. The format is
described in full in gcov-io.h. */
/* ??? Register allocation should use basic block execution counts to
give preference to the most commonly executed blocks. */
/* ??? Should calculate branch probabilities before instrumenting code, since
then we can use arc counts to help decide which arcs to instrument. */
RTL-based version. See profile.c for overview. */
#include "config.h"
#include "system.h"
@ -114,33 +91,33 @@ rtl_gen_interval_profiler (histogram_value value, unsigned tag, unsigned base)
rtx less_label = gen_label_rtx ();
rtx end_of_code_label = gen_label_rtx ();
int per_counter = gcov_size / BITS_PER_UNIT;
edge e = split_block (BLOCK_FOR_INSN ((rtx)value->insn),
PREV_INSN ((rtx)value->insn));
edge e = split_block (BLOCK_FOR_INSN (value->hvalue.rtl.insn),
PREV_INSN (value->hvalue.rtl.insn));
start_sequence ();
if (value->seq)
emit_insn (value->seq);
if (value->hvalue.rtl.seq)
emit_insn (value->hvalue.rtl.seq);
mr = gen_reg_rtx (Pmode);
tmp = rtl_coverage_counter_ref (tag, base);
tmp = force_reg (Pmode, XEXP (tmp, 0));
val = expand_simple_binop (value->mode, MINUS,
copy_rtx (value->value),
val = expand_simple_binop (value->hvalue.rtl.mode, MINUS,
copy_rtx (value->hvalue.rtl.value),
GEN_INT (value->hdata.intvl.int_start),
NULL_RTX, 0, OPTAB_WIDEN);
if (value->hdata.intvl.may_be_more)
do_compare_rtx_and_jump (copy_rtx (val), GEN_INT (value->hdata.intvl.steps),
GE, 0, value->mode, NULL_RTX, NULL_RTX, more_label);
if (value->hdata.intvl.may_be_less)
do_compare_rtx_and_jump (copy_rtx (val), const0_rtx, LT, 0, value->mode,
GE, 0, value->hvalue.rtl.mode, NULL_RTX, NULL_RTX,
more_label);
do_compare_rtx_and_jump (copy_rtx (val), const0_rtx, LT, 0,
value->hvalue.rtl.mode,
NULL_RTX, NULL_RTX, less_label);
/* We are in range. */
tmp1 = expand_simple_binop (value->mode, MULT,
tmp1 = expand_simple_binop (value->hvalue.rtl.mode, MULT,
copy_rtx (val), GEN_INT (per_counter),
NULL_RTX, 0, OPTAB_WIDEN);
tmp1 = expand_simple_binop (Pmode, PLUS, copy_rtx (tmp), tmp1, mr,
@ -148,43 +125,27 @@ rtl_gen_interval_profiler (histogram_value value, unsigned tag, unsigned base)
if (tmp1 != mr)
emit_move_insn (copy_rtx (mr), tmp1);
if (value->hdata.intvl.may_be_more
|| value->hdata.intvl.may_be_less)
{
emit_jump_insn (gen_jump (end_of_code_label));
emit_barrier ();
}
/* Above the interval. */
if (value->hdata.intvl.may_be_more)
{
emit_label (more_label);
tmp1 = expand_simple_binop (Pmode, PLUS, copy_rtx (tmp),
GEN_INT (per_counter * value->hdata.intvl.steps),
mr, 0, OPTAB_WIDEN);
if (tmp1 != mr)
emit_move_insn (copy_rtx (mr), tmp1);
if (value->hdata.intvl.may_be_less)
{
emit_jump_insn (gen_jump (end_of_code_label));
emit_barrier ();
}
}
/* Below the interval. */
if (value->hdata.intvl.may_be_less)
{
emit_label (less_label);
tmp1 = expand_simple_binop (Pmode, PLUS, copy_rtx (tmp),
GEN_INT (per_counter * (value->hdata.intvl.steps
+ (value->hdata.intvl.may_be_more ? 1 : 0))),
GEN_INT (per_counter * (value->hdata.intvl.steps +1)),
mr, 0, OPTAB_WIDEN);
if (tmp1 != mr)
emit_move_insn (copy_rtx (mr), tmp1);
}
if (value->hdata.intvl.may_be_more
|| value->hdata.intvl.may_be_less)
emit_label (end_of_code_label);
mem_ref = validize_mem (gen_rtx_MEM (mode, mr));
@ -215,32 +176,32 @@ rtl_gen_pow2_profiler (histogram_value value, unsigned tag, unsigned base)
rtx end_of_code_label = gen_label_rtx ();
rtx loop_label = gen_label_rtx ();
int per_counter = gcov_size / BITS_PER_UNIT;
edge e = split_block (BLOCK_FOR_INSN ((rtx)value->insn),
PREV_INSN ((rtx)value->insn));
edge e = split_block (BLOCK_FOR_INSN (value->hvalue.rtl.insn),
PREV_INSN (value->hvalue.rtl.insn));
start_sequence ();
if (value->seq)
emit_insn (value->seq);
if (value->hvalue.rtl.seq)
emit_insn (value->hvalue.rtl.seq);
mr = gen_reg_rtx (Pmode);
tmp = rtl_coverage_counter_ref (tag, base);
tmp = force_reg (Pmode, XEXP (tmp, 0));
emit_move_insn (mr, tmp);
uval = gen_reg_rtx (value->mode);
emit_move_insn (uval, copy_rtx (value->value));
uval = gen_reg_rtx (value->hvalue.rtl.mode);
emit_move_insn (uval, copy_rtx (value->hvalue.rtl.value));
/* Check for non-power of 2. */
if (value->hdata.pow2.may_be_other)
{
do_compare_rtx_and_jump (copy_rtx (uval), const0_rtx, LE, 0, value->mode,
do_compare_rtx_and_jump (copy_rtx (uval), const0_rtx, LE, 0, value->hvalue.rtl.mode,
NULL_RTX, NULL_RTX, end_of_code_label);
tmp = expand_simple_binop (value->mode, PLUS, copy_rtx (uval),
tmp = expand_simple_binop (value->hvalue.rtl.mode, PLUS, copy_rtx (uval),
constm1_rtx, NULL_RTX, 0, OPTAB_WIDEN);
tmp = expand_simple_binop (value->mode, AND, copy_rtx (uval), tmp,
tmp = expand_simple_binop (value->hvalue.rtl.mode, AND, copy_rtx (uval), tmp,
NULL_RTX, 0, OPTAB_WIDEN);
do_compare_rtx_and_jump (tmp, const0_rtx, NE, 0, value->mode, NULL_RTX,
do_compare_rtx_and_jump (tmp, const0_rtx, NE, 0, value->hvalue.rtl.mode, NULL_RTX,
NULL_RTX, end_of_code_label);
}
@ -251,12 +212,12 @@ rtl_gen_pow2_profiler (histogram_value value, unsigned tag, unsigned base)
if (tmp != mr)
emit_move_insn (copy_rtx (mr), tmp);
tmp = expand_simple_binop (value->mode, ASHIFTRT, copy_rtx (uval), const1_rtx,
tmp = expand_simple_binop (value->hvalue.rtl.mode, ASHIFTRT, copy_rtx (uval), const1_rtx,
uval, 0, OPTAB_WIDEN);
if (tmp != uval)
emit_move_insn (copy_rtx (uval), tmp);
do_compare_rtx_and_jump (copy_rtx (uval), const0_rtx, NE, 0, value->mode,
do_compare_rtx_and_jump (copy_rtx (uval), const0_rtx, NE, 0, value->hvalue.rtl.mode,
NULL_RTX, NULL_RTX, loop_label);
/* Increase the counter. */
@ -295,8 +256,8 @@ rtl_gen_one_value_profiler_no_edge_manipulation (histogram_value value,
start_sequence ();
if (value->seq)
emit_insn (value->seq);
if (value->hvalue.rtl.seq)
emit_insn (value->hvalue.rtl.seq);
stored_value_ref = rtl_coverage_counter_ref (tag, base);
counter_ref = rtl_coverage_counter_ref (tag, base + 1);
@ -306,7 +267,7 @@ rtl_gen_one_value_profiler_no_edge_manipulation (histogram_value value,
all = validize_mem (all_ref);
uval = gen_reg_rtx (mode);
convert_move (uval, copy_rtx (value->value), 0);
convert_move (uval, copy_rtx (value->hvalue.rtl.value), 0);
/* Check if the stored value matches. */
do_compare_rtx_and_jump (copy_rtx (uval), copy_rtx (stored_value), EQ,
@ -362,8 +323,8 @@ rtl_gen_one_value_profiler_no_edge_manipulation (histogram_value value,
static void
rtl_gen_one_value_profiler (histogram_value value, unsigned tag, unsigned base)
{
edge e = split_block (BLOCK_FOR_INSN ((rtx)value->insn),
PREV_INSN ((rtx)value->insn));
edge e = split_block (BLOCK_FOR_INSN (value->hvalue.rtl.insn),
PREV_INSN (value->hvalue.rtl.insn));
rtx sequence = rtl_gen_one_value_profiler_no_edge_manipulation (value,
tag, base);
rebuild_jump_labels (sequence);
@ -383,28 +344,28 @@ rtl_gen_const_delta_profiler (histogram_value value, unsigned tag, unsigned base
enum machine_mode mode = mode_for_size (gcov_size, MODE_INT, 0);
rtx stored_value_ref, stored_value, tmp, uval;
rtx sequence;
edge e = split_block (BLOCK_FOR_INSN ((rtx)value->insn),
PREV_INSN ((rtx)value->insn));
edge e = split_block (BLOCK_FOR_INSN (value->hvalue.rtl.insn),
PREV_INSN (value->hvalue.rtl.insn));
start_sequence ();
if (value->seq)
emit_insn (value->seq);
if (value->hvalue.rtl.seq)
emit_insn (value->hvalue.rtl.seq);
stored_value_ref = rtl_coverage_counter_ref (tag, base);
stored_value = validize_mem (stored_value_ref);
uval = gen_reg_rtx (mode);
convert_move (uval, copy_rtx (value->value), 0);
convert_move (uval, copy_rtx (value->hvalue.rtl.value), 0);
tmp = expand_simple_binop (mode, MINUS,
copy_rtx (uval), copy_rtx (stored_value),
NULL_RTX, 0, OPTAB_WIDEN);
one_value_delta = ggc_alloc (sizeof (*one_value_delta));
one_value_delta->value = tmp;
one_value_delta->mode = mode;
one_value_delta->seq = NULL_RTX;
one_value_delta->insn = value->insn;
one_value_delta->hvalue.rtl.value = tmp;
one_value_delta->hvalue.rtl.mode = mode;
one_value_delta->hvalue.rtl.seq = NULL_RTX;
one_value_delta->hvalue.rtl.insn = value->hvalue.rtl.insn;
one_value_delta->type = HIST_TYPE_SINGLE_VALUE;
emit_insn (rtl_gen_one_value_profiler_no_edge_manipulation (one_value_delta,
tag, base + 1));

View File

@ -1773,9 +1773,6 @@ process_options (void)
warning ("this target machine does not have delayed branches");
#endif
if (flag_tree_based_profiling && flag_profile_values)
sorry ("value-based profiling not yet implemented in trees.");
user_label_prefix = USER_LABEL_PREFIX;
if (flag_leading_underscore != -1)
{

View File

@ -309,6 +309,12 @@ struct stmt_ann_d GTY(())
by each pass on an as-needed basis in any order convenient for the
pass which needs statement UIDs. */
unsigned int uid;
/* Linked list of histograms for value-based profiling. This is really a
struct histogram_value*. We use void* to avoid having to export that
everywhere, and to avoid having to put it in GC memory. */
void * GTY ((skip (""))) histograms;
};
union tree_ann_d GTY((desc ("ann_type ((tree_ann_t)&%h)")))

View File

@ -24,30 +24,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
02111-1307, USA. */
/* Generate basic block profile instrumentation and auxiliary files.
Profile generation is optimized, so that not all arcs in the basic
block graph need instrumenting. First, the BB graph is closed with
one entry (function start), and one exit (function exit). Any
ABNORMAL_EDGE cannot be instrumented (because there is no control
path to place the code). We close the graph by inserting fake
EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
edges that do not go to the exit_block. We ignore such abnormal
edges. Naturally these fake edges are never directly traversed,
and so *cannot* be directly instrumented. Some other graph
massaging is done. To optimize the instrumentation we generate the
BB minimal span tree, only edges that are not on the span tree
(plus the entry point) need instrumenting. From that information
all other edge counts can be deduced. By construction all fake
edges must be on the spanning tree. We also attempt to place
EDGE_CRITICAL edges on the spanning tree.
The auxiliary file generated is <dumpbase>.bbg. The format is
described in full in gcov-io.h. */
/* ??? Register allocation should use basic block execution counts to
give preference to the most commonly executed blocks. */
/* ??? Should calculate branch probabilities before instrumenting code, since
then we can use arc counts to help decide which arcs to instrument. */
Tree-based version. See profile.c for overview. */
#include "config.h"
#include "system.h"
@ -102,15 +79,163 @@ tree_gen_edge_profiler (int edgeno, edge e)
tag of the section for counters, BASE is offset of the counter position. */
static void
tree_gen_interval_profiler (histogram_value value ATTRIBUTE_UNUSED,
unsigned tag ATTRIBUTE_UNUSED,
unsigned base ATTRIBUTE_UNUSED)
tree_gen_interval_profiler (histogram_value value, unsigned tag, unsigned base)
{
/* FIXME implement this. */
#ifdef ENABLE_CHECKING
internal_error ("unimplemented functionality");
#endif
gcc_unreachable ();
tree op, op1, op2, op1copy, op2copy;
tree tmp1, tmp2, tmp3, val, index;
tree label_decl2, label_decl3, label_decl4, label_decl5, label_decl6;
edge e12, e23, e34, e45, e56;
tree label2, label3, label4, label5, label6;
tree stmt1, stmt2, stmt3, stmt4;
/* Initializations are to prevent bogus uninitialized warnings. */
tree bb1end = NULL_TREE, bb2end = NULL_TREE, bb3end = NULL_TREE;
tree bb4end = NULL_TREE, bb5end = NULL_TREE;
tree ref = tree_coverage_counter_ref (tag, base), ref2;
basic_block bb2, bb3, bb4, bb5, bb6;
tree stmt = value->hvalue.tree.stmt;
block_stmt_iterator bsi = bsi_for_stmt (stmt);
basic_block bb = bb_for_stmt (stmt);
tree optype;
op = stmt;
if (TREE_CODE (stmt) == RETURN_EXPR
&& TREE_OPERAND (stmt, 0)
&& TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
op = TREE_OPERAND (stmt, 0);
/* op == MODIFY_EXPR */
op = TREE_OPERAND (op, 1);
/* op == TRUNC_DIV or TRUNC_MOD */
op1 = TREE_OPERAND (op, 0);
op2 = TREE_OPERAND (op, 1);
optype = TREE_TYPE (op);
/* Blocks:
Original = 1
For 2nd compare = 2
Normal case, neither more nor less = 3
More = 4
Less = 5
End = 6 */
label_decl2 = create_artificial_label ();
label_decl3 = create_artificial_label ();
label_decl4 = create_artificial_label ();
label_decl5 = create_artificial_label ();
label_decl6 = create_artificial_label ();
/* Do not evaluate op1 or op2 more than once. Probably
volatile loads are the only things that could cause
a problem, but this is harmless in any case. */
op1copy = create_tmp_var (optype, "PROF");
op2copy = create_tmp_var (optype, "PROF");
stmt1 = build2 (MODIFY_EXPR, optype, op1copy, op1);
stmt2 = build2 (MODIFY_EXPR, optype, op2copy, op2);
TREE_OPERAND (op, 0) = op1copy;
TREE_OPERAND (op, 1) = op2copy;
val = create_tmp_var (optype, "PROF");
stmt3 = build2 (MODIFY_EXPR, optype, val,
build2 (TRUNC_DIV_EXPR, optype, op1copy, op2copy));
stmt4 = build2 (MODIFY_EXPR, optype, val,
build2 (MINUS_EXPR, optype, val,
build_int_cst (optype, value->hdata.intvl.int_start)));
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt4, BSI_SAME_STMT);
index = create_tmp_var (GCOV_TYPE_NODE, "PROF");
/* Check for too big. */
stmt1 = build3 (COND_EXPR, void_type_node,
build2 (GE_EXPR, boolean_type_node, val,
build_int_cst (optype, value->hdata.intvl.steps)),
build1 (GOTO_EXPR, void_type_node, label_decl4),
build1 (GOTO_EXPR, void_type_node, label_decl2));
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bb1end = stmt1;
/* Check for too small. */
label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
stmt1 = build3 (COND_EXPR, void_type_node,
build2 (LT_EXPR, boolean_type_node, val, integer_zero_node),
build1 (GOTO_EXPR, void_type_node, label_decl5),
build1 (GOTO_EXPR, void_type_node, label_decl3));
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bb2end = stmt1;
/* Normal case, within range. */
label3 = build1 (LABEL_EXPR, void_type_node, label_decl3);
bsi_insert_before (&bsi, label3, BSI_SAME_STMT);
stmt1 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, index,
build1 (NOP_EXPR, GCOV_TYPE_NODE, val));
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bb3end = stmt1;
/* Too big */
label4 = build1 (LABEL_EXPR, void_type_node, label_decl4);
stmt1 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, index,
build_int_cst (GCOV_TYPE_NODE, value->hdata.intvl.steps));
bsi_insert_before (&bsi, label4, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bb4end = stmt1;
/* Too small */
label5 = build1 (LABEL_EXPR, void_type_node, label_decl5);
stmt1 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, index,
build_int_cst (GCOV_TYPE_NODE, value->hdata.intvl.steps + 1));
bsi_insert_before (&bsi, label5, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bb5end = stmt1;
/* Increment appropriate counter. */
label6 = build1 (LABEL_EXPR, void_type_node, label_decl6);
bsi_insert_before (&bsi, label6, BSI_SAME_STMT);
tmp1 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
tmp2 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
tmp3 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
stmt1 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp1,
build2 (PLUS_EXPR, GCOV_TYPE_NODE, index,
TREE_OPERAND (ref, 1)));
TREE_OPERAND (ref, 1) = tmp1;
/* Make a copy to avoid sharing complaints. */
ref2 = build4 (ARRAY_REF, TREE_TYPE (ref), TREE_OPERAND (ref, 0),
TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2),
TREE_OPERAND (ref, 3));
stmt2 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp2, ref);
stmt3 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp3,
build2 (PLUS_EXPR, GCOV_TYPE_NODE, tmp2, integer_one_node));
stmt4 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, ref2, tmp3);
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt4, BSI_SAME_STMT);
/* Now fix up the CFG. */
/* 1->2,4; 2->3,5; 3->6; 4->6; 5->6 */
e12 = split_block (bb, bb1end);
bb2 = e12->dest;
e23 = split_block (bb2, bb2end);
bb3 = e23->dest;
e34 = split_block (bb3, bb3end);
bb4 = e34->dest;
e45 = split_block (bb4, bb4end);
bb5 = e45->dest;
e56 = split_block (bb5, bb5end);
bb6 = e56->dest;
e12->flags &= ~EDGE_FALLTHRU;
e12->flags |= EDGE_FALSE_VALUE;
make_edge (bb, bb4, EDGE_TRUE_VALUE);
e23->flags &= ~EDGE_FALLTHRU;
e23->flags |= EDGE_FALSE_VALUE;
make_edge (bb2, bb5, EDGE_TRUE_VALUE);
remove_edge (e34);
make_edge (bb3, bb6, EDGE_FALLTHRU);
remove_edge (e45);
make_edge (bb4, bb6, EDGE_FALLTHRU);
}
/* Output instructions as GIMPLE trees to increment the power of two histogram
@ -118,15 +243,162 @@ tree_gen_interval_profiler (histogram_value value ATTRIBUTE_UNUSED,
of the section for counters, BASE is offset of the counter position. */
static void
tree_gen_pow2_profiler (histogram_value value ATTRIBUTE_UNUSED,
unsigned tag ATTRIBUTE_UNUSED,
unsigned base ATTRIBUTE_UNUSED)
tree_gen_pow2_profiler (histogram_value value, unsigned tag, unsigned base)
{
/* FIXME implement this. */
#ifdef ENABLE_CHECKING
internal_error ("unimplemented functionality");
#endif
gcc_unreachable ();
tree op;
tree tmp1, tmp2, tmp3;
tree index, denom;
tree label_decl1 = create_artificial_label ();
tree label_decl2 = create_artificial_label ();
tree label_decl3 = create_artificial_label ();
tree label1, label2, label3;
tree stmt1, stmt2, stmt3, stmt4;
tree bb1end, bb2end, bb3end;
tree ref = tree_coverage_counter_ref (tag, base), ref2;
basic_block bb2, bb3, bb4;
tree stmt = value->hvalue.tree.stmt;
block_stmt_iterator bsi = bsi_for_stmt (stmt);
basic_block bb = bb_for_stmt (stmt);
tree optype, optypesigned, optypeunsigned;
op = stmt;
if (TREE_CODE (stmt) == RETURN_EXPR
&& TREE_OPERAND (stmt, 0)
&& TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
op = TREE_OPERAND (stmt, 0);
/* op == MODIFY_EXPR */
op = TREE_OPERAND (op, 1);
/* op == TRUNC_DIV or TRUNC_MOD */
op = TREE_OPERAND (op, 1);
/* op == denominator */
optype = TREE_TYPE (op);
if (TYPE_UNSIGNED (optype))
{
/* Right shift must be unsigned. */
optypeunsigned = optype;
optypesigned = build_distinct_type_copy (optype);
TYPE_UNSIGNED (optypesigned) = false;
}
else
{
/* Compare to zero must be signed. */
optypesigned = optype;
optypeunsigned = build_distinct_type_copy (optype);
TYPE_UNSIGNED (optypeunsigned) = true;
}
/* Set up variables and check if denominator is negative when considered
as signed. */
index = create_tmp_var (GCOV_TYPE_NODE, "PROF");
denom = create_tmp_var (optype, "PROF");
stmt1 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, index, integer_zero_node);
stmt2 = build2 (MODIFY_EXPR, optype, denom, op);
if (optypesigned == optype)
{
tmp1 = denom;
stmt3 = NULL_TREE;
}
else
{
tmp1 = create_tmp_var (optypesigned, "PROF");
stmt3 = build2 (MODIFY_EXPR, optypesigned, tmp1,
build1 (NOP_EXPR, optypesigned, denom));
}
stmt4 = build3 (COND_EXPR, void_type_node,
build2 (LE_EXPR, boolean_type_node, tmp1, integer_zero_node),
build1 (GOTO_EXPR, void_type_node, label_decl3),
build1 (GOTO_EXPR, void_type_node, label_decl1));
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
if (stmt3)
bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt4, BSI_SAME_STMT);
bb1end = stmt4;
/* Nonnegative. Check if denominator is power of 2. */
label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
tmp1 = create_tmp_var (optype, "PROF");
tmp2 = create_tmp_var (optype, "PROF");
stmt1 = build2 (MODIFY_EXPR, optype, tmp1,
build2 (PLUS_EXPR, optype, denom, integer_minus_one_node));
stmt2 = build2 (MODIFY_EXPR, optype, tmp2,
build2 (BIT_AND_EXPR, optype, tmp1, denom));
stmt3 = build3 (COND_EXPR, void_type_node,
build2 (NE_EXPR, boolean_type_node, tmp2, integer_zero_node),
build1 (GOTO_EXPR, void_type_node, label_decl3),
build1 (GOTO_EXPR, void_type_node, label_decl2));
bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
bb2end = stmt3;
/* Loop. Increment index, shift denominator, repeat if denominator nonzero. */
label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
stmt1 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, index,
build2 (PLUS_EXPR, GCOV_TYPE_NODE, index, integer_one_node));
if (optypeunsigned == optype)
{
tmp1 = denom;
stmt2 = NULL_TREE;
}
else
{
tmp1 = create_tmp_var (optypeunsigned, "PROF");
stmt2 = build2 (MODIFY_EXPR, optypeunsigned, tmp1,
build1 (NOP_EXPR, optypeunsigned, denom));
}
stmt3 = build2 (MODIFY_EXPR, optype, denom,
build2 (RSHIFT_EXPR, optype, tmp1, integer_one_node));
stmt4 = build3 (COND_EXPR, void_type_node,
build2 (NE_EXPR, boolean_type_node, denom, integer_zero_node),
build1 (GOTO_EXPR, void_type_node, label_decl2),
build1 (GOTO_EXPR, void_type_node, label_decl3));
bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
if (stmt2)
bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt4, BSI_SAME_STMT);
bb3end = stmt4;
/* Increment the appropriate counter. */
label3 = build1 (LABEL_EXPR, void_type_node, label_decl3);
tmp1 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
tmp2 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
tmp3 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
stmt1 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp1,
build2 (PLUS_EXPR, GCOV_TYPE_NODE, index, TREE_OPERAND (ref, 1)));
TREE_OPERAND (ref, 1) = tmp1;
/* Make a copy to avoid sharing complaints. */
ref2 = build4 (ARRAY_REF, TREE_TYPE (ref), TREE_OPERAND (ref, 0),
TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
stmt2 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp2, ref);
stmt3 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp3,
build2 (PLUS_EXPR, GCOV_TYPE_NODE, tmp2, integer_one_node));
stmt4 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, ref2, tmp3);
bsi_insert_before (&bsi, label3, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt4, BSI_SAME_STMT);
/* Now fix up the CFG. */
bb2 = (split_block (bb, bb1end))->dest;
bb3 = (split_block (bb2, bb2end))->dest;
bb4 = (split_block (bb3, bb3end))->dest;
EDGE_SUCC (bb, 0)->flags &= ~EDGE_FALLTHRU;
EDGE_SUCC (bb, 0)->flags |= EDGE_FALSE_VALUE;
make_edge (bb, bb4, EDGE_TRUE_VALUE);
EDGE_SUCC (bb2, 0)->flags &= ~EDGE_FALLTHRU;
EDGE_SUCC (bb2, 0)->flags |= EDGE_FALSE_VALUE;
make_edge (bb2, bb4, EDGE_TRUE_VALUE);
EDGE_SUCC (bb3, 0)->flags &= ~EDGE_FALLTHRU;
EDGE_SUCC (bb3, 0)->flags |= EDGE_FALSE_VALUE;
make_edge (bb3, bb3, EDGE_TRUE_VALUE);
}
/* Output instructions as GIMPLE trees for code to find the most common value.
@ -134,15 +406,150 @@ tree_gen_pow2_profiler (histogram_value value ATTRIBUTE_UNUSED,
section for counters, BASE is offset of the counter position. */
static void
tree_gen_one_value_profiler (histogram_value value ATTRIBUTE_UNUSED,
unsigned tag ATTRIBUTE_UNUSED,
unsigned base ATTRIBUTE_UNUSED)
tree_gen_one_value_profiler (histogram_value value, unsigned tag, unsigned base)
{
/* FIXME implement this. */
#ifdef ENABLE_CHECKING
internal_error ("unimplemented functionality");
#endif
gcc_unreachable ();
tree op;
tree tmp1, tmp2, tmp3;
tree label_decl1 = create_artificial_label ();
tree label_decl2 = create_artificial_label ();
tree label_decl3 = create_artificial_label ();
tree label_decl4 = create_artificial_label ();
tree label_decl5 = create_artificial_label ();
tree label1, label2, label3, label4, label5;
tree stmt1, stmt2, stmt3, stmt4;
tree bb1end, bb2end, bb3end, bb4end, bb5end;
tree ref1 = tree_coverage_counter_ref (tag, base);
tree ref2 = tree_coverage_counter_ref (tag, base + 1);
tree ref3 = tree_coverage_counter_ref (tag, base + 2);
basic_block bb2, bb3, bb4, bb5, bb6;
tree stmt = value->hvalue.tree.stmt;
block_stmt_iterator bsi = bsi_for_stmt (stmt);
basic_block bb = bb_for_stmt (stmt);
tree optype;
op = stmt;
if (TREE_CODE (stmt) == RETURN_EXPR
&& TREE_OPERAND (stmt, 0)
&& TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR)
op = TREE_OPERAND (stmt, 0);
/* op == MODIFY_EXPR */
op = TREE_OPERAND (op, 1);
/* op == TRUNC_DIV or TRUNC_MOD */
op = TREE_OPERAND (op, 1);
/* op == denominator */
optype = TREE_TYPE (op);
/* Check if the stored value matches. */
tmp1 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
tmp2 = create_tmp_var (optype, "PROF");
tmp3 = create_tmp_var (optype, "PROF");
stmt1 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp1, ref1);
stmt2 = build2 (MODIFY_EXPR, optype, tmp2,
build1 (NOP_EXPR, optype, tmp1));
stmt3 = build2 (MODIFY_EXPR, optype, tmp3, op);
stmt4 = build3 (COND_EXPR, void_type_node,
build2 (EQ_EXPR, boolean_type_node, tmp2, tmp3),
build1 (GOTO_EXPR, void_type_node, label_decl4),
build1 (GOTO_EXPR, void_type_node, label_decl1));
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt4, BSI_SAME_STMT);
bb1end = stmt4;
/* Does not match; check whether the counter is zero. */
label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
tmp1 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
stmt1 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp1, ref2);
stmt2 = build3 (COND_EXPR, void_type_node,
build2 (EQ_EXPR, boolean_type_node, tmp1, integer_zero_node),
build1 (GOTO_EXPR, void_type_node, label_decl3),
build1 (GOTO_EXPR, void_type_node, label_decl2));
bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
bb2end = stmt2;
/* Counter is not zero yet, decrement. */
label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
tmp1 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
tmp2 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
stmt1 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp1, ref2);
stmt2 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp2,
build (MINUS_EXPR, GCOV_TYPE_NODE,
tmp1, integer_one_node));
stmt3 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, ref2, tmp2);
bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
bb3end = stmt3;
/* Counter was zero, store new value. */
label3 = build1 (LABEL_EXPR, void_type_node, label_decl3);
tmp1 = create_tmp_var (optype, "PROF");
tmp2 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
stmt1 = build2 (MODIFY_EXPR, optype, tmp1, op);
stmt2 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp2,
build1 (NOP_EXPR, GCOV_TYPE_NODE, tmp1));
stmt3 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, ref1, tmp2);
bsi_insert_before (&bsi, label3, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
bb4end = stmt3;
/* (fall through) */
/* Increment counter. */
label4 = build1 (LABEL_EXPR, void_type_node, label_decl4);
tmp1 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
tmp2 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
stmt1 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp1, ref2);
stmt2 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp2,
build (PLUS_EXPR, GCOV_TYPE_NODE,
tmp1, integer_one_node));
stmt3 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, ref2, tmp2);
bsi_insert_before (&bsi, label4, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
bb5end = stmt3;
/* Increment the counter of all executions; this seems redundant given
that we have counts for edges in cfg, but it may happen that some
optimization will change the counts for the block (either because
it is unable to update them correctly, or because it will duplicate
the block or its part). */
label5 = build1 (LABEL_EXPR, void_type_node, label_decl5);
tmp1 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
tmp2 = create_tmp_var (GCOV_TYPE_NODE, "PROF");
stmt1 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp1, ref3);
stmt2 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, tmp2,
build (PLUS_EXPR, GCOV_TYPE_NODE,
tmp1, integer_one_node));
stmt3 = build2 (MODIFY_EXPR, GCOV_TYPE_NODE, ref3, tmp2);
bsi_insert_before (&bsi, label5, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
/* Now fix up the CFG. */
bb2 = (split_block (bb, bb1end))->dest;
bb3 = (split_block (bb2, bb2end))->dest;
bb4 = (split_block (bb3, bb3end))->dest;
bb5 = (split_block (bb4, bb4end))->dest;
bb6 = (split_block (bb5, bb5end))->dest;
EDGE_SUCC (bb, 0)->flags &= ~EDGE_FALLTHRU;
EDGE_SUCC (bb, 0)->flags |= EDGE_FALSE_VALUE;
make_edge (bb, bb5, EDGE_TRUE_VALUE);
EDGE_SUCC (bb2, 0)->flags &= ~EDGE_FALLTHRU;
EDGE_SUCC (bb2, 0)->flags |= EDGE_FALSE_VALUE;
make_edge (bb2, bb4, EDGE_TRUE_VALUE);
remove_edge (EDGE_SUCC (bb3, 0));
make_edge (bb3, bb6, EDGE_FALLTHRU);
}
/* Output instructions as GIMPLE trees for code to find the most common value
@ -166,7 +573,8 @@ tree_gen_const_delta_profiler (histogram_value value ATTRIBUTE_UNUSED,
If it is, set up hooks for tree-based profiling.
Gate for pass_tree_profile. */
static bool do_tree_profiling (void)
static bool
do_tree_profiling (void)
{
if (flag_tree_based_profiling
&& (profile_arc_flag || flag_test_coverage || flag_branch_probabilities))
@ -184,11 +592,26 @@ static FILE *tree_profile_dump_file (void) {
return dump_file;
}
static void
tree_profiling (void)
{
branch_prob ();
if (flag_branch_probabilities
&& flag_profile_values
&& flag_value_profile_transformations)
value_profile_transformations ();
/* The above could hose dominator info. Currently there is
none coming in, this is a safety valve. It should be
easy to adjust it, if and when there is some. */
free_dominance_info (CDI_DOMINATORS);
free_dominance_info (CDI_POST_DOMINATORS);
}
struct tree_opt_pass pass_tree_profile =
{
"tree_profile", /* name */
do_tree_profiling, /* gate */
branch_prob, /* execute */
tree_profiling, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */

File diff suppressed because it is too large Load Diff

View File

@ -38,14 +38,25 @@ enum hist_type
((enum hist_type) ((COUNTER) - GCOV_FIRST_VALUE_COUNTER))
/* The value to measure. */
/* The void *'s are either rtx or tree, depending on which IR is in use. */
struct histogram_value_t GTY(())
struct histogram_value_t
{
PTR GTY ((skip (""))) value; /* The value to profile. */
enum machine_mode mode; /* And its mode. */
PTR GTY ((skip (""))) seq; /* Insns required to count the
profiled value. */
PTR GTY ((skip (""))) insn; /* Insn before that to measure. */
union
{
struct
{
rtx value; /* The value to profile. */
rtx seq; /* Insns required to count the profiled value. */
rtx insn; /* Insn before that to measure. */
enum machine_mode mode; /* Mode of value to profile. */
} rtl;
struct
{
tree value; /* The value to profile. */
tree stmt; /* Insn containing the value. */
gcov_type *counters; /* Pointer to first counter. */
struct histogram_value_t *next; /* Linked list pointer. */
} tree;
} hvalue;
enum hist_type type; /* Type of information to measure. */
unsigned n_counters; /* Number of required counters. */
union
@ -53,9 +64,7 @@ struct histogram_value_t GTY(())
struct
{
int int_start; /* First value in interval. */
int steps; /* Number of values in it. */
int may_be_less; /* May the value be below? */
int may_be_more; /* Or above. */
unsigned int steps; /* Number of values in it. */
} intvl; /* Interval histogram data. */
struct
{
@ -66,7 +75,7 @@ struct histogram_value_t GTY(())
typedef struct histogram_value_t *histogram_value;
DEF_VEC_GC_P(histogram_value);
DEF_VEC_MALLOC_P(histogram_value);
typedef VEC(histogram_value) *histogram_values;