cfgcleanup.c (try_simplify_condjump): Don't remove line notes to avoid unreachable code warnings.

* cfgcleanup.c (try_simplify_condjump): Don't remove line
	notes to avoid unreachable code warnings.
	* toplev.c (backend_init): Don't emit line notes for
	unreachable code warnings.

	* combine.c (distribute_notes): Don't distribute a
	REG_VTABLE_REF note.
	* final.c (final_scan_insn): Don't handle it.
	* rtl.c (reg_note_name): Remove it.
	* rtl.h (enum reg_node): Dito.

	* emit-rtl.c (force_line_numbers, restore_line_number_status):
	Remove.
	* rtl.h (force_line_numbers, restore_line_number_status):
	Remove prototypes.

	* stmt.c (using_eh_for_cleanups_p, using_eh_for_cleanups): Move...
	* tree-eh.c (using_eh_for_cleanups_p): ...here.  Make static.
	(using_eh_for_cleanups): Also moved here.

	* expr.c (expand_expr_real_1) <CASE_LABEL_EXPR>: Die if we see one.
	<SWITCH_EXPR>: Die if we have a non-NULL SWITCH_BODY.  Update calls
	to expand_start_case and add_case_node.
	* stmt.c (struct nesting): Cleanup unused fields condition_code,
	last_unconditional_cleanup, nominal_type, printname, and
	line_number_status.
	(struct fixup_goto): Remove.
	(struct stmt_status): Remove x_goto_fixup_chain field.
	(goto_fixup_chain): Remove.
	(strip_default_case_nodes, group_case_nodes, emit_jump_if_reachable,
	pushcase, pushcase_range): Remove.
	(expand_start_bindings_and_block): Don't set unused fields in
	the nesting stack.
	(expand_start_case, add_case_node): Cleanup unused formal arguments.
	(expand_end_case_type): Don't simplify the case-list.  Use emit_jump
	instead of emit_jump_if_reachable.
	(emit_case_nodes): Likewise.
	* tree-cfg.c (group_case_labels, cleanup_dead_labels): No longer
	static.
	(update_eh_label): Work around left-over exception handing regions.
	* tree-flow.h (group_case_labels, cleanup_dead_labels): Add protos.
	* tree-optimize.c (execute_cleanup_cfg_post_optimizing): New function.
	(pass_cleanup_cfg_post_optimizing): New pass.
	(init_tree_optimization_passes): Run the new pass after all
	optimizations.
	* tree.h (pushcase, pushcase_range): Remove prototypes.
	(expand_start_case, add_case_node): Update prototypes.

cp/
	* cp-tree.h (struct lang_type): Don't have three GTY options on a
	single bit GTY desc.

java/
	* parse.y (java_complete_expand_methods, java_expand_classes): Don't
	abuse restore_line_number_status.

From-SVN: r84849
This commit is contained in:
Steven Bosscher 2004-07-17 00:31:15 +00:00 committed by Steven Bosscher
parent cd0408d8ff
commit 165b54c3f3
20 changed files with 142 additions and 411 deletions

View File

@ -1,3 +1,53 @@
2004-07-17 Steven Bosscher <stevenb@suse.de>
* cfgcleanup.c (try_simplify_condjump): Don't remove line
notes to avoid unreachable code warnings.
* toplev.c (backend_init): Don't emit line notes for
unreachable code warnings.
* combine.c (distribute_notes): Don't distribute a
REG_VTABLE_REF note.
* final.c (final_scan_insn): Don't handle it.
* rtl.c (reg_note_name): Remove it.
* rtl.h (enum reg_node): Dito.
* emit-rtl.c (force_line_numbers, restore_line_number_status):
Remove.
* rtl.h (force_line_numbers, restore_line_number_status):
Remove prototypes.
* stmt.c (using_eh_for_cleanups_p, using_eh_for_cleanups): Move...
* tree-eh.c (using_eh_for_cleanups_p): ...here. Make static.
(using_eh_for_cleanups): Also moved here.
* expr.c (expand_expr_real_1) <CASE_LABEL_EXPR>: Die if we see one.
<SWITCH_EXPR>: Die if we have a non-NULL SWITCH_BODY. Update calls
to expand_start_case and add_case_node.
* stmt.c (struct nesting): Cleanup unused fields condition_code,
last_unconditional_cleanup, nominal_type, printname, and
line_number_status.
(struct fixup_goto): Remove.
(struct stmt_status): Remove x_goto_fixup_chain field.
(goto_fixup_chain): Remove.
(strip_default_case_nodes, group_case_nodes, emit_jump_if_reachable,
pushcase, pushcase_range): Remove.
(expand_start_bindings_and_block): Don't set unused fields in
the nesting stack.
(expand_start_case, add_case_node): Cleanup unused formal arguments.
(expand_end_case_type): Don't simplify the case-list. Use emit_jump
instead of emit_jump_if_reachable.
(emit_case_nodes): Likewise.
* tree-cfg.c (group_case_labels, cleanup_dead_labels): No longer
static.
(update_eh_label): Work around left-over exception handing regions.
* tree-flow.h (group_case_labels, cleanup_dead_labels): Add protos.
* tree-optimize.c (execute_cleanup_cfg_post_optimizing): New function.
(pass_cleanup_cfg_post_optimizing): New pass.
(init_tree_optimization_passes): Run the new pass after all
optimizations.
* tree.h (pushcase, pushcase_range): Remove prototypes.
(expand_start_case, add_case_node): Update prototypes.
2004-07-16 Krister Walfridsson <cato@df.lth.se>
* tree-inline.c (estimate_num_insns_1): Correct increase of count.

View File

@ -122,8 +122,6 @@ try_simplify_condjump (basic_block cbranch_block)
basic_block jump_block, jump_dest_block, cbranch_dest_block;
edge cbranch_jump_edge, cbranch_fallthru_edge;
rtx cbranch_insn;
rtx insn, next;
rtx end;
/* Verify that there are exactly two successors. */
if (!cbranch_block->succ
@ -186,26 +184,6 @@ try_simplify_condjump (basic_block cbranch_block)
cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
update_br_prob_note (cbranch_block);
end = BB_END (jump_block);
/* Deleting a block may produce unreachable code warning even when we are
not deleting anything live. Suppress it by moving all the line number
notes out of the block. */
for (insn = BB_HEAD (jump_block); insn != NEXT_INSN (BB_END (jump_block));
insn = next)
{
next = NEXT_INSN (insn);
if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
{
if (insn == BB_END (jump_block))
{
BB_END (jump_block) = PREV_INSN (insn);
if (insn == end)
break;
}
reorder_insns_nobb (insn, insn, end);
end = insn;
}
}
/* Delete the block with the unconditional jump, and clean up the mess. */
delete_basic_block (jump_block);
tidy_fallthru_edge (cbranch_jump_edge);

View File

@ -11698,12 +11698,6 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
/* Just get rid of this note, as it is unused later anyway. */
break;
case REG_VTABLE_REF:
/* ??? Should remain with *a particular* memory load. Given the
nature of vtable data, the last insn seems relatively safe. */
place = i3;
break;
case REG_NON_LOCAL_GOTO:
if (JUMP_P (i3))
place = i3;

View File

@ -1,3 +1,8 @@
2004-07-16 Steven Bosscher <stevenb@suse.de>
* cp-tree.h (struct lang_type): Don't have three GTY options on a
single bit GTY desc.
2004-07-16 Richard Henderson <rth@redhat.com>
* cp-lang.c (LANG_HOOKS_TREE_INLINING_COPY_RES_DECL_FOR_INLINING): Die.

View File

@ -1033,7 +1033,7 @@ struct lang_type GTY(())
{
union lang_type_u
{
struct lang_type_header GTY((tag ("2"))) h;
struct lang_type_header GTY((skip (""))) h;
struct lang_type_class GTY((tag ("1"))) c;
struct lang_type_ptrmem GTY((tag ("0"))) ptrmem;
} GTY((desc ("%h.h.is_lang_type_class"))) u;

View File

@ -5438,27 +5438,6 @@ init_emit_once (int line_numbers)
pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
}
/* Query and clear/ restore no_line_numbers. This is used by the
switch / case handling in stmt.c to give proper line numbers in
warnings about unreachable code. */
int
force_line_numbers (void)
{
int old = no_line_numbers;
no_line_numbers = 0;
if (old)
force_next_line_note ();
return old;
}
void
restore_line_number_status (int old_value)
{
no_line_numbers = old_value;
}
/* Produce exact duplicate of insn INSN after AFTER.
Care updating of libcall regions if present. */

View File

@ -8541,6 +8541,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
case WITH_CLEANUP_EXPR:
case CLEANUP_POINT_EXPR:
case TARGET_EXPR:
case CASE_LABEL_EXPR:
case VA_ARG_EXPR:
/* Lowered by gimplify.c. */
abort ();
@ -8561,10 +8562,11 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
abort ();
case SWITCH_EXPR:
expand_start_case (0, SWITCH_COND (exp), integer_type_node,
"switch");
expand_start_case (SWITCH_COND (exp));
/* The switch body is lowered in gimplify.c, we should never have
switches with a non-NULL SWITCH_BODY here. */
if (SWITCH_BODY (exp))
expand_expr_stmt (SWITCH_BODY (exp));
abort ();
if (SWITCH_LABELS (exp))
{
tree duplicate = 0;
@ -8623,7 +8625,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
}
}
add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate);
if (duplicate)
abort ();
}
@ -8635,16 +8637,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
expand_label (TREE_OPERAND (exp, 0));
return const0_rtx;
case CASE_LABEL_EXPR:
{
tree duplicate = 0;
add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
&duplicate, false);
if (duplicate)
abort ();
return const0_rtx;
}
case ASM_EXPR:
expand_asm_expr (exp);
return const0_rtx;

View File

@ -1999,7 +1999,6 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
rtx body = PATTERN (insn);
int insn_code_number;
const char *template;
rtx note;
/* An INSN, JUMP_INSN or CALL_INSN.
First check for special kinds that recog doesn't recognize. */
@ -2549,18 +2548,6 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
dwarf2out_frame_debug (insn);
#endif
#if 0
/* It's not at all clear why we did this and doing so used to
interfere with tests that used REG_WAS_0 notes, which are
now gone, so let's try with this out. */
/* Mark this insn as having been output. */
INSN_DELETED_P (insn) = 1;
#endif
/* Emit information for vtable gc. */
note = find_reg_note (insn, REG_VTABLE_REF, NULL_RTX);
current_output_insn = debug_insn = 0;
}
}

View File

@ -1066,7 +1066,7 @@ compare_case_labels (const void *p1, const void *p2)
return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
}
/* Sort the case labels in LABEL_VEC in ascending order. */
/* Sort the case labels in LABEL_VEC in place in ascending order. */
void
sort_case_labels (tree label_vec)

View File

@ -1,3 +1,8 @@
2004-07-16 Steven Bosscher <stevenb@suse.de>
* parse.y (java_complete_expand_methods, java_expand_classes): Don't
abuse restore_line_number_status.
2004-07-15 Frank Ch. Eigler <fche@redhat.com>
g++/15861

View File

@ -7742,21 +7742,9 @@ java_complete_expand_methods (tree class_decl)
/* Now do the constructors */
for (decl = first_decl ; !java_error_count && decl; decl = TREE_CHAIN (decl))
{
int no_body;
if (!DECL_CONSTRUCTOR_P (decl))
continue;
no_body = !DECL_FUNCTION_BODY (decl);
/* Don't generate debug info on line zero when expanding a
generated constructor. */
if (no_body)
restore_line_number_status (1);
java_complete_expand_method (decl);
if (no_body)
restore_line_number_status (0);
}
/* First, do the ordinary methods. */
@ -9032,9 +9020,7 @@ java_expand_classes (void)
{
if (DECL_CONSTRUCTOR_P (d))
{
restore_line_number_status (1);
java_complete_expand_method (d);
restore_line_number_status (0);
break; /* There is only one constructor. */
}
}
@ -9056,11 +9042,7 @@ java_expand_classes (void)
for (d = TYPE_METHODS (current_class); d; d = TREE_CHAIN (d))
{
if (DECL_RESULT (d) == NULL_TREE)
{
restore_line_number_status (1);
java_complete_expand_method (d);
restore_line_number_status (0);
}
java_complete_expand_method (d);
}
}
}
@ -9089,9 +9071,7 @@ java_expand_classes (void)
if (DECL_RESULT (d) == NULL_TREE)
{
something_changed = 1;
restore_line_number_status (1);
java_complete_expand_method (d);
restore_line_number_status (0);
}
}
}

View File

@ -136,8 +136,7 @@ const char * const reg_note_name[] =
"REG_VALUE_PROFILE", "REG_NOALIAS", "REG_SAVE_AREA", "REG_BR_PRED",
"REG_FRAME_RELATED_EXPR", "REG_EH_CONTEXT", "REG_EH_REGION",
"REG_SAVE_NOTE", "REG_MAYBE_DEAD", "REG_NORETURN",
"REG_NON_LOCAL_GOTO", "REG_CROSSING_JUMP", "REG_SETJMP", "REG_ALWAYS_RETURN",
"REG_VTABLE_REF"
"REG_NON_LOCAL_GOTO", "REG_CROSSING_JUMP", "REG_SETJMP", "REG_ALWAYS_RETURN"
};

View File

@ -865,11 +865,7 @@ enum reg_note
REG_SETJMP,
/* Indicate calls that always returns. */
REG_ALWAYS_RETURN,
/* Indicate that the memory load references a vtable. The expression
is of the form (plus (symbol_ref vtable_sym) (const_int offset)). */
REG_VTABLE_REF
REG_ALWAYS_RETURN
};
/* The base value for branch probability notes. */
@ -2200,11 +2196,6 @@ extern void remove_insn (rtx);
extern void emit_insn_after_with_line_notes (rtx, rtx, rtx);
extern enum rtx_code classify_insn (rtx);
extern rtx emit (rtx);
/* Query and clear/ restore no_line_numbers. This is used by the
switch / case handling in stmt.c to give proper line numbers in
warnings about unreachable code. */
int force_line_numbers (void);
void restore_line_number_status (int old_value);
extern void renumber_insns (FILE *);
extern void remove_unnecessary_notes (void);
extern rtx delete_insn (rtx);

View File

@ -174,16 +174,6 @@ struct nesting GTY(())
reverts to the saved target_temp_slot_level at the very
end of the block. */
int block_target_temp_slot_level;
/* True if we are currently emitting insns in an area of
output code that is controlled by a conditional
expression. This is used by the cleanup handling code to
generate conditional cleanup actions. */
int conditional_code;
/* A place to move the start of the exception region for any
of the conditional cleanups, must be at the end or after
the start of the last unconditional cleanup, and before any
conditional branch points. */
rtx last_unconditional_cleanup;
} GTY ((tag ("BLOCK_NESTING"))) block;
/* For switch (C) or case (Pascal) statements. */
struct nesting_case
@ -199,14 +189,6 @@ struct nesting GTY(())
tree default_label;
/* The expression to be dispatched on. */
tree index_expr;
/* Type that INDEX_EXPR should be converted to. */
tree nominal_type;
/* Name of this kind of statement, for warnings. */
const char *printname;
/* Used to save no_line_numbers till we see the first case label.
We set this to -1 when we see the first case label in this
case statement. */
int line_number_status;
} GTY ((tag ("CASE_NESTING"))) case_stmt;
} GTY ((desc ("%1.desc"))) data;
};
@ -234,32 +216,6 @@ do { struct nesting *target = STACK; \
nesting_stack = this->all; } \
while (this != target); } while (0)
/* In some cases it is impossible to generate code for a forward goto
until the label definition is seen. This happens when it may be necessary
for the goto to reset the stack pointer: we don't yet know how to do that.
So expand_goto puts an entry on this fixup list.
Each time a binding contour that resets the stack is exited,
we check each fixup.
If the target label has now been defined, we can insert the proper code. */
struct goto_fixup GTY(())
{
/* Points to following fixup. */
struct goto_fixup *next;
/* Points to the insn before the jump insn.
If more code must be inserted, it goes after this insn. */
rtx before_jump;
/* The LABEL_DECL that this jump is jumping to, or 0
for break, continue or return. */
tree target;
/* The BLOCK for the place where this goto was found. */
tree context;
/* The CODE_LABEL rtx that this is jumping to. */
rtx target_rtl;
/* Number of binding contours started in current function
before the label reference. */
int block_start_count;
};
struct stmt_status GTY(())
{
@ -287,8 +243,6 @@ struct stmt_status GTY(())
/* Location of last line-number note, whether we actually
emitted it or not. */
location_t x_emit_locus;
struct goto_fixup *x_goto_fixup_chain;
};
#define block_stack (cfun->stmt->x_block_stack)
@ -298,10 +252,6 @@ struct stmt_status GTY(())
#define nesting_depth (cfun->stmt->x_nesting_depth)
#define current_block_start_count (cfun->stmt->x_block_start_count)
#define emit_locus (cfun->stmt->x_emit_locus)
#define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
/* Nonzero if we are using EH to handle cleanups. */
int using_eh_for_cleanups_p = 0;
static int n_occurrences (int, const char *);
static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
@ -316,25 +266,16 @@ static void expand_value_return (rtx);
static void do_jump_if_equal (rtx, rtx, rtx, int);
static int estimate_case_costs (case_node_ptr);
static bool same_case_target_p (rtx, rtx);
static void strip_default_case_nodes (case_node_ptr *, rtx);
static bool lshift_cheap_p (void);
static int case_bit_test_cmp (const void *, const void *);
static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
static void group_case_nodes (case_node_ptr);
static void balance_case_nodes (case_node_ptr *, case_node_ptr);
static int node_has_low_bound (case_node_ptr, tree);
static int node_has_high_bound (case_node_ptr, tree);
static int node_is_bounded (case_node_ptr, tree);
static void emit_jump_if_reachable (rtx);
static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
static struct case_node *case_tree2list (case_node *, case_node *);
void
using_eh_for_cleanups (void)
{
using_eh_for_cleanups_p = 1;
}
void
init_stmt_for_function (void)
{
@ -2207,8 +2148,6 @@ expand_start_bindings_and_block (int flags, tree block)
thisblock->depth = ++nesting_depth;
thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
thisblock->data.block.conditional_code = 0;
thisblock->data.block.last_unconditional_cleanup = note;
/* When we insert instructions after the last unconditional cleanup,
we don't adjust last_insn. That means that a later add_insn will
clobber the instructions we've just added. The easiest way to
@ -2760,8 +2699,7 @@ expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
but instead we take short cuts. */
void
expand_start_case (int exit_flag, tree expr, tree type,
const char *printname)
expand_start_case (tree index_expr)
{
struct nesting *thiscase = ALLOC_NESTING ();
@ -2771,13 +2709,10 @@ expand_start_case (int exit_flag, tree expr, tree type,
thiscase->next = case_stack;
thiscase->all = nesting_stack;
thiscase->depth = ++nesting_depth;
thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
thiscase->exit_label = 0;
thiscase->data.case_stmt.case_list = 0;
thiscase->data.case_stmt.index_expr = expr;
thiscase->data.case_stmt.nominal_type = type;
thiscase->data.case_stmt.index_expr = index_expr;
thiscase->data.case_stmt.default_label = 0;
thiscase->data.case_stmt.printname = printname;
thiscase->data.case_stmt.line_number_status = force_line_numbers ();
case_stack = thiscase;
nesting_stack = thiscase;
@ -2791,119 +2726,12 @@ expand_start_case (int exit_flag, tree expr, tree type,
thiscase->data.case_stmt.start = get_last_insn ();
}
/* Accumulate one case or default label inside a case or switch statement.
VALUE is the value of the case (a null pointer, for a default label).
The function CONVERTER, when applied to arguments T and V,
converts the value V to the type T.
If not currently inside a case or switch statement, return 1 and do
nothing. The caller will print a language-specific error message.
If VALUE is a duplicate or overlaps, return 2 and do nothing
except store the (first) duplicate node in *DUPLICATE.
If VALUE is out of range, return 3 and do nothing.
Return 0 on success.
Extended to handle range statements. */
int
pushcase (tree value, tree (*converter) (tree, tree), tree label,
tree *duplicate)
{
tree index_type;
tree nominal_type;
/* Fail if not inside a real case statement. */
if (! (case_stack && case_stack->data.case_stmt.start))
return 1;
index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
nominal_type = case_stack->data.case_stmt.nominal_type;
/* If the index is erroneous, avoid more problems: pretend to succeed. */
if (index_type == error_mark_node)
return 0;
/* Convert VALUE to the type in which the comparisons are nominally done. */
if (value != 0)
value = (*converter) (nominal_type, value);
/* Fail if this value is out of range for the actual type of the index
(which may be narrower than NOMINAL_TYPE). */
if (value != 0
&& (TREE_CONSTANT_OVERFLOW (value)
|| ! int_fits_type_p (value, index_type)))
return 3;
return add_case_node (value, value, label, duplicate, false);
}
/* Like pushcase but this case applies to all values between VALUE1 and
VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
starts at VALUE1 and ends at the highest value of the index type.
If both are NULL, this case applies to all values.
The return value is the same as that of pushcase but there is one
additional error code: 4 means the specified range was empty. */
int
pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
tree label, tree *duplicate)
{
tree index_type;
tree nominal_type;
/* Fail if not inside a real case statement. */
if (! (case_stack && case_stack->data.case_stmt.start))
return 1;
index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
nominal_type = case_stack->data.case_stmt.nominal_type;
/* If the index is erroneous, avoid more problems: pretend to succeed. */
if (index_type == error_mark_node)
return 0;
/* Convert VALUEs to type in which the comparisons are nominally done
and replace any unspecified value with the corresponding bound. */
if (value1 == 0)
value1 = TYPE_MIN_VALUE (index_type);
if (value2 == 0)
value2 = TYPE_MAX_VALUE (index_type);
/* Fail if the range is empty. Do this before any conversion since
we want to allow out-of-range empty ranges. */
if (value2 != 0 && tree_int_cst_lt (value2, value1))
return 4;
/* If the max was unbounded, use the max of the nominal_type we are
converting to. Do this after the < check above to suppress false
positives. */
if (value2 == 0)
value2 = TYPE_MAX_VALUE (nominal_type);
value1 = (*converter) (nominal_type, value1);
value2 = (*converter) (nominal_type, value2);
/* Fail if these values are out of range. */
if (TREE_CONSTANT_OVERFLOW (value1)
|| ! int_fits_type_p (value1, index_type))
return 3;
if (TREE_CONSTANT_OVERFLOW (value2)
|| ! int_fits_type_p (value2, index_type))
return 3;
return add_case_node (value1, value2, label, duplicate, false);
}
/* Do the actual insertion of a case label for pushcase and pushcase_range
into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
/* Do the insertion of a case label into
case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
slowdown for large switch statements. */
int
add_case_node (tree low, tree high, tree label, tree *duplicate,
bool dont_expand_label)
add_case_node (tree low, tree high, tree label, tree *duplicate)
{
struct case_node *p, **q, *r;
@ -2922,8 +2750,6 @@ add_case_node (tree low, tree high, tree label, tree *duplicate,
return 2;
}
case_stack->data.case_stmt.default_label = label;
if (!dont_expand_label)
expand_label (label);
return 0;
}
@ -2962,8 +2788,6 @@ add_case_node (tree low, tree high, tree label, tree *duplicate,
r->high = high;
r->code_label = label;
if (!dont_expand_label)
expand_label (label);
*q = r;
r->parent = p;
@ -3355,11 +3179,6 @@ expand_end_case_type (tree orig_index, tree orig_type)
thiscase->data.case_stmt.case_list
= case_tree2list (thiscase->data.case_stmt.case_list, 0);
/* Simplify the case-list before we count it. */
group_case_nodes (thiscase->data.case_stmt.case_list);
strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
default_label);
/* Get upper and lower bounds of case values.
Also convert all the case values to the index expr's data type. */
@ -3534,7 +3353,7 @@ expand_end_case_type (tree orig_index, tree orig_type)
balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
emit_case_nodes (index, thiscase->data.case_stmt.case_list,
default_label, index_type);
emit_jump_if_reachable (default_label);
emit_jump (default_label);
}
}
else
@ -3543,7 +3362,7 @@ expand_end_case_type (tree orig_index, tree orig_type)
if (! try_casesi (index_type, index_expr, minval, range,
table_label, default_label))
{
index_type = thiscase->data.case_stmt.nominal_type;
index_type = integer_type_node;
/* Index jumptables from zero for suitable values of
minval to avoid a subtraction. */
@ -3745,100 +3564,16 @@ estimate_case_costs (case_node_ptr node)
return 1;
}
/* Determine whether two case labels branch to the same target. */
/* Determine whether two case labels branch to the same target.
Since we now do tree optimizations, just comparing labels is
good enough. */
static bool
same_case_target_p (rtx l1, rtx l2)
{
#if 0
rtx i1, i2;
if (l1 == l2)
return true;
i1 = next_real_insn (l1);
i2 = next_real_insn (l2);
if (i1 == i2)
return true;
if (i1 && simplejump_p (i1))
{
l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
}
if (i2 && simplejump_p (i2))
{
l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
}
#endif
/* When coming from gimple, we usually won't have emitted either
the labels or the body of the switch statement. The job being
done here should be done via jump threading at the tree level.
Cases that go the same place should have the same label. */
return l1 == l2;
}
/* Delete nodes that branch to the default label from a list of
case nodes. Eg. case 5: default: becomes just default: */
static void
strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
{
case_node_ptr ptr;
while (*prev)
{
ptr = *prev;
if (same_case_target_p (label_rtx (ptr->code_label), deflab))
*prev = ptr->right;
else
prev = &ptr->right;
}
}
/* Scan an ordered list of case nodes
combining those with consecutive values or ranges.
Eg. three separate entries 1: 2: 3: become one entry 1..3: */
static void
group_case_nodes (case_node_ptr head)
{
case_node_ptr node = head;
while (node)
{
rtx lab;
case_node_ptr np = node;
lab = label_rtx (node->code_label);
/* Try to group the successors of NODE with NODE. */
while (((np = np->right) != 0)
/* Do they jump to the same place? */
&& same_case_target_p (label_rtx (np->code_label), lab)
/* Are their ranges consecutive? */
&& tree_int_cst_equal (np->low,
fold (build (PLUS_EXPR,
TREE_TYPE (node->high),
node->high,
integer_one_node)))
/* An overflow is not consecutive. */
&& tree_int_cst_lt (node->high,
fold (build (PLUS_EXPR,
TREE_TYPE (node->high),
node->high,
integer_one_node))))
{
node->high = np->high;
}
/* NP is the first node after NODE which can't be grouped with it.
Delete the nodes in between, and move on to that node. */
node->right = np;
node = np;
}
}
/* Take an ordered list of case nodes
and transform them into a near optimal binary tree,
on the assumption that any target code selection value is as
@ -4061,15 +3796,6 @@ node_is_bounded (case_node_ptr node, tree index_type)
return (node_has_low_bound (node, index_type)
&& node_has_high_bound (node, index_type));
}
/* Emit an unconditional jump to LABEL unless it would be dead code. */
static void
emit_jump_if_reachable (rtx label)
{
if (!BARRIER_P (get_last_insn ()))
emit_jump (label);
}
/* Emit step-by-step code to select a case for the value of INDEX.
The thus generated decision tree follows the form of the
@ -4215,7 +3941,7 @@ emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
emit_case_nodes (index, node->left, default_label, index_type);
/* If left-hand subtree does nothing,
go to default. */
emit_jump_if_reachable (default_label);
emit_jump (default_label);
/* Code branches here for the right-hand subtree. */
expand_label (test_label);
@ -4356,7 +4082,7 @@ emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
{
/* If the left-hand subtree fell through,
don't let it fall into the right-hand subtree. */
emit_jump_if_reachable (default_label);
emit_jump (default_label);
expand_label (test_label);
emit_case_nodes (index, node->right, default_label, index_type);

View File

@ -1926,8 +1926,7 @@ backend_init (void)
/* Enable line number info for traceback. */
|| debug_info_level > DINFO_LEVEL_NONE
#endif
|| flag_test_coverage
|| warn_notreached);
|| flag_test_coverage);
init_regs ();
init_fake_stack_mems ();

View File

@ -99,8 +99,6 @@ static void tree_cfg2vcg (FILE *);
static void tree_merge_blocks (basic_block, basic_block);
static bool tree_can_merge_blocks_p (basic_block, basic_block);
static void remove_bb (basic_block);
static void group_case_labels (void);
static void cleanup_dead_labels (void);
static bool cleanup_control_flow (void);
static bool cleanup_control_expr_graph (basic_block, block_stmt_iterator);
static edge find_taken_edge_cond_expr (basic_block, tree);
@ -769,7 +767,16 @@ update_eh_label (struct eh_region *region)
tree old_label = get_eh_region_tree_label (region);
if (old_label)
{
tree new_label = label_for_bb[label_to_block (old_label)->index];
tree new_label;
basic_block bb = label_to_block (old_label);
/* ??? After optimizing, there may be EH regions with labels
that have already been removed from the function body, so
there is no basic block for them. */
if (! bb)
return;
new_label = label_for_bb[bb->index];
set_eh_region_tree_label (region, new_label);
}
}
@ -791,7 +798,7 @@ main_block_label (tree label)
2) Redirect all references to labels to the leading labels.
3) Cleanup all useless labels. */
static void
void
cleanup_dead_labels (void)
{
basic_block bb;
@ -924,7 +931,7 @@ cleanup_dead_labels (void)
same label.
Eg. three separate entries 1: 2: 3: become one entry 1..3: */
static void
void
group_case_labels (void)
{
basic_block bb;

View File

@ -37,8 +37,15 @@ Boston, MA 02111-1307, USA. */
#include "langhooks.h"
#include "ggc.h"
/* HACK */
extern int using_eh_for_cleanups_p;
/* Nonzero if we are using EH to handle cleanups. */
static int using_eh_for_cleanups_p = 0;
void
using_eh_for_cleanups (void)
{
using_eh_for_cleanups_p = 1;
}
/* Misc functions used in this file. */

View File

@ -473,6 +473,8 @@ extern void dump_cfg_stats (FILE *);
extern void debug_cfg_stats (void);
extern void debug_loop_ir (void);
extern void print_loop_ir (FILE *);
extern void cleanup_dead_labels (void);
extern void group_case_labels (void);
extern void cleanup_tree_cfg (void);
extern tree first_stmt (basic_block);
extern tree last_stmt (basic_block);

View File

@ -101,6 +101,35 @@ static struct tree_opt_pass pass_all_optimizations =
0 /* todo_flags_finish */
};
/* Pass: cleanup the CFG just before expanding trees to RTL.
This is just a round of label cleanups and case node grouping
because after the tree optimizers have run such cleanups may
be necessary. */
static void
execute_cleanup_cfg_post_optimizing (void)
{
cleanup_tree_cfg ();
cleanup_dead_labels ();
group_case_labels ();
}
static struct tree_opt_pass pass_cleanup_cfg_post_optimizing =
{
NULL, /* name */
NULL, /* gate */
execute_cleanup_cfg_post_optimizing, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
0, /* tv_id */
PROP_cfg, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
0 /* todo_flags_finish */
};
/* Pass: do the actions required to finish with tree-ssa optimization
passes. */
@ -324,6 +353,7 @@ init_tree_optimization_passes (void)
NEXT_PASS (pass_del_ssa);
NEXT_PASS (pass_nrv);
NEXT_PASS (pass_remove_useless_vars);
NEXT_PASS (pass_cleanup_cfg_post_optimizing);
*p = NULL;
p = &pass_loop.sub;

View File

@ -3352,12 +3352,12 @@ extern void warn_about_unused_variables (tree);
extern int is_body_block (tree);
extern struct nesting * current_nesting_level (void);
extern void expand_start_case (int, tree, tree, const char *);
extern void expand_start_case (tree);
extern void expand_end_case_type (tree, tree);
#define expand_end_case(cond) expand_end_case_type (cond, NULL)
extern int add_case_node (tree, tree, tree, tree *, bool);
extern int pushcase (tree, tree (*) (tree, tree), tree, tree *);
extern int pushcase_range (tree, tree, tree (*) (tree, tree), tree, tree *);
extern int add_case_node (tree, tree, tree, tree *);
/* In tree-eh.c */
extern void using_eh_for_cleanups (void);
/* In fold-const.c */