jump.c: Add prototype for mark_modified_reg.

* jump.c: Add prototype for mark_modified_reg.

        * cse.c (set_live_p): Add unused attribute.

        * gcov.c (calculate_branch_probs): Use gcov_type to avoid
        overflow.
        (scan_for_source_files): Use long for count to avoid overflow.
        (output_data): Likewise.
        (output_data): Don't use string concatatenation to silence gcc
        -traditional.

        * predict.c: Fix typos and grammar.

        * gcse.c (insert_insn_end_bb): Remove unused variables.

For cp:
        * decl2.c: Remove unused var global_temp_name_counter.

From-SVN: r44479
This commit is contained in:
Andreas Jaeger 2001-07-30 20:04:33 +02:00 committed by Andreas Jaeger
parent d76cbbc844
commit 57cb6d521a
7 changed files with 75 additions and 62 deletions

View File

@ -1,3 +1,7 @@
2001-07-30 Andreas Jaeger <aj@suse.de>
* decl2.c: Remove unused var global_temp_name_counter.
2001-07-28 Richard Henderson <rth@redhat.com>
* method.c (pending_inlines): Remove.

View File

@ -104,10 +104,6 @@ static varray_type deferred_fns;
#define deferred_fns_used \
(deferred_fns ? deferred_fns->elements_used : 0)
/* Same, but not reset. Local temp variables and global temp variables
can have the same name. */
static int global_temp_name_counter;
/* Flag used when debugging spew.c */
extern int spew_debug;

View File

@ -7489,7 +7489,7 @@ count_reg_usage (x, counts, dest, incr)
static bool
set_live_p (set, insn, counts)
rtx set;
rtx insn;
rtx insn ATTRIBUTE_UNUSED; /* Only used with HAVE_cc0. */
int *counts;
{
#ifdef HAVE_cc0

View File

@ -788,7 +788,7 @@ scan_for_source_files ()
{
struct sourcefile *s_ptr = NULL;
char *ptr;
int count;
long count;
long line_num;
/* Search the bb_data to find:
@ -878,7 +878,7 @@ calculate_branch_probs (current_graph, block_num, branch_probs, last_line_num)
struct arcdata **branch_probs;
int last_line_num;
{
int total;
gcov_type total;
struct adj_list *arcptr;
struct arcdata *end_ptr, *a_ptr;
@ -990,7 +990,7 @@ output_data ()
char *source_file_name;
FILE *source_file;
struct bb_info_list *current_graph;
int count;
long count;
char *cptr;
long block_num;
long line_num;
@ -1360,16 +1360,23 @@ output_data ()
else
{
if (output_branch_counts)
fnotice (gcov_file,
"call %d returns = "
HOST_WIDEST_INT_PRINT_DEC "\n",
i, a_ptr->total - a_ptr->hits);
{
char c[20];
sprintf (c, HOST_WIDEST_INT_PRINT_DEC,
a_ptr->total - a_ptr->hits);
fnotice (gcov_file,
"call %d returns = %s\n", i, c);
}
else
fnotice (gcov_file,
"call %d returns = "
HOST_WIDEST_INT_PRINT_DEC "%%\n",
i, 100 - ((a_ptr->hits * 100) +
(a_ptr->total >> 1))/a_ptr->total);
{
char c[20];
sprintf (c, HOST_WIDEST_INT_PRINT_DEC,
100 - ((a_ptr->hits * 100)
+ (a_ptr->total >> 1))
/ a_ptr->total);
fnotice (gcov_file,
"call %d returns = %s%%\n", i, c);
}
}
}
else
@ -1380,18 +1387,23 @@ output_data ()
else
{
if (output_branch_counts)
fnotice (gcov_file,
"branch %d taken = "
HOST_WIDEST_INT_PRINT_DEC "\n",
i, a_ptr->hits);
{
char c[20];
sprintf (c, HOST_WIDEST_INT_PRINT_DEC,
a_ptr->hits);
fnotice (gcov_file,
"branch %d taken = %s\n", i, c);
}
else
{
char c[20];
sprintf (c, HOST_WIDEST_INT_PRINT_DEC,
((a_ptr->hits * 100)
+ (a_ptr->total >> 1))
/ a_ptr->total);
fnotice (gcov_file,
"branch %d taken = "
HOST_WIDEST_INT_PRINT_DEC "%%\n", i,
((a_ptr->hits * 100) +
(a_ptr->total >> 1))/
a_ptr->total);
"branch %d taken = %s%%\n", i, c);
}
}
}
}

View File

@ -4628,10 +4628,6 @@ insert_insn_end_bb (expr, bb, pre)
of exception handling. */
else if (GET_CODE (insn) == CALL_INSN)
{
HARD_REG_SET parm_regs;
int nparm_regs;
rtx p;
/* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
we search backward and place the instructions before the first
parameter is loaded. Do this for everyone for consistency and a

View File

@ -69,6 +69,7 @@ static void invert_exp_1 PARAMS ((rtx));
static int invert_exp PARAMS ((rtx));
static int returnjump_p_1 PARAMS ((rtx *, void *));
static void delete_prior_computation PARAMS ((rtx, rtx));
static void mark_modified_reg PARAMS ((rtx, rtx, void *));
/* Alternate entry into the jump optimizer. This entry point only rebuilds
the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping

View File

@ -228,7 +228,7 @@ combine_predictions_for_insn (insn, bb)
bb->index);
/* We implement "first match" heuristics and use probability guessed
by predictor with smallest index. In future we will use better
by predictor with smallest index. In the future we will use better
probability combination techniques. */
while (*pnote)
{
@ -305,7 +305,7 @@ estimate_probability (loops_info)
int header_found = 0;
edge e;
/* Loop branch heruistics - predict as taken an edge back to
/* Loop branch heuristics - predict as taken an edge back to
a loop's head. */
for (e = BASIC_BLOCK(j)->succ; e; e = e->succ_next)
if (e->dest == loops_info->array[i].header
@ -314,8 +314,9 @@ estimate_probability (loops_info)
header_found = 1;
predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
}
/* Loop exit heruistics - predict as not taken an edge exiting
the loop if the conditinal has no loop header successors */
/* Loop exit heuristics - predict as not taken an edge
exiting the loop if the conditinal has no loop header
successors. */
if (!header_found)
for (e = BASIC_BLOCK(j)->succ; e; e = e->succ_next)
if (e->dest->index <= 0
@ -620,19 +621,22 @@ propagate_freq (head)
if (!BLOCK_INFO (e->src)->visited && !EDGE_INFO (e)->back_edge)
break;
/* We didn't proceeded all predecesors of edge e yet. These may
be waiting in the queue or we may hit irreducible region.
/* We haven't proceeded all predecessors of edge e yet.
These may be waiting in the queue or we may hit an
irreducible region.
To avoid infinite looping on irrecudible regions, count number
of block proceeded at the time basic block has been queued. In the
case number didn't changed, we've hit irreducible region and we
forget the backward edge. This can increase time complexity
by the number of irreducible blocks, but in same way standard the
loop does, so it should not result in noticeable slowodwn.
To avoid infinite looping on irrecudible regions, count
the number of blocks proceeded at the time the basic
block has been queued. In the case the number doesn't
change, we've hit an irreducible region and we can forget
the backward edge. This can increase the time complexity
by the number of irreducible blocks, but in the same way
the standard the loop does, so it should not result in a
noticeable slowdown.
Alternativly we may distinquish backward and cross edges in the
DFS tree by preprocesing pass and ignore existence of non-loop
backward edges. */
Alternatively we may distinguish backward and cross edges
in the DFS tree by the preprocessing pass and ignore the
existence of non-loop backward edges. */
if (e && BLOCK_INFO (bb)->nvisited != nvisited)
{
if (!nextbb)
@ -670,7 +674,7 @@ propagate_freq (head)
* BLOCK_INFO (bb)->frequency
/ REG_BR_PROB_BASE);
/* Propagate to succesor blocks. */
/* Propagate to successor blocks. */
for (e = bb->succ; e; e = e->succ_next)
if (!EDGE_INFO (e)->back_edge
&& !BLOCK_INFO (e->dest)->visited
@ -687,7 +691,7 @@ propagate_freq (head)
}
}
/* Estimate probabilities of the loopback edges in loops at same nest level. */
/* Estimate probabilities of loopback edges in loops at same nest level. */
static void
estimate_loops_at_level (first_loop)
struct loop *first_loop;
@ -701,13 +705,13 @@ estimate_loops_at_level (first_loop)
estimate_loops_at_level (loop->inner);
/* find current loop back edge and mark it. */
/* Find current loop back edge and mark it. */
for (e = loop->latch->succ; e->dest != loop->header; e = e->succ_next);
EDGE_INFO (e)->back_edge = 1;
/* In case loop header is shared, ensure that it is the last one sharing
same header, so we avoid redundant work. */
/* In case the loop header is shared, ensure that it is the last
one sharing the same header, so we avoid redundant work. */
if (loop->shared)
{
for (l = loop->next; l; l = l->next)
@ -778,12 +782,12 @@ estimate_bb_frequencies (loops)
edge fallthru, branch;
if (GET_CODE (last_insn) != JUMP_INSN || !any_condjump_p (last_insn)
/* Avoid handling of conditionals jump jumping to fallthru edge. */
/* Avoid handling of conditional jumps jumping to fallthru edge. */
|| BASIC_BLOCK (i)->succ->succ_next == NULL)
{
/* We can predict only conditional jumps at the moment.
Expect each edge to be equall probable.
?? In future we want to make abnormal edges improbable. */
Expect each edge to be equally probable.
?? In the future we want to make abnormal edges improbable. */
int nedges = 0;
edge e;