Revert 2004-09-09 Giovanni Bajo <giovannibajo@gcc.gnu.org>
* lambda-mat.c (lambda_matrix_inverse_hard): Use gcc_assert and gcc_unreachable instead of abort. * lambda.h (lambda_vector_min_nz): Likewise. * langhooks.c lhd_set_decl_assembler_name, lhd_can_use_bit_fields_p, lhd_incomplete_type_error, lhd_expand_expr, lhd_types_compatible_p, lhd_gimplify_expr): Likewise. * lcm.c (optimize_mode_switching): Likewise. * local-alloc.c (update_equiv_regs, block_alloc, find_free_reg): Likewise. * loop-doloop.c (doloop_modify): Likewise. * loop-invariant.c (record_use): Likewise. * loop-iv.c (get_biv_step_1, get_biv_step, iv_analyze, get_iv_value, canon_condition, simplify_using_condition, simplify_using_initial_values, shorten_into_mode, canonicalize_iv_subregs, iv_number_of_iterations): Likewise. * loop-unroll.c (unroll_and_peel_loops, peel_loop_completely, unroll_loop_constant_iterations, unroll_loop_runtime_iterations, peel_loop_simple, unroll_loop_stupid): Likewise. * loop-unswitch.c (compare_and_jump_seq, unswitch_single_loop, unswitch_loop): Likewise. * loop.c (gen_prefetch, loop_optimize, rtx_equal_for_loop_p, move_movables, replace_call_address, find_and_verify_loops, rtx_equal_for_prefetch_p, record_giv, general_induction_var, simplify_giv_expr, check_ext_dependent_givs, try_copy_prop, loop_giv_dump): Likewise. * loop.h (INSN_LUID): Likewise. * modulo-sched.c (normalize_sched_times, generate_prolog_epilog, sms_schedule, check_nodes_order): Likewise. * optabs.c (gen_conditional, add_equal_note, expand_simple_binop, expand_twoval_unop, expand_twoval_binop, expand_twoval_binop_libfunc, expand_simple_unop, emit_no_conflict_block, prepare_cmp_insn, emit_cmp_and_jump_insn_1, emit_cmp_and_jump_insns, prepare_float_lib_cmp, gen_add2_insn, have_add2_insn, gen_sub2_insn, have_sub2_insn, expand_float, expand_fix, debug_optab_libfuncs): Likewise. * opts.c (common_handle_option): Likewise. * params.c (set_param_value): Likewise. * passes.c (open_dump_file, rest_of_handle_final): Likewise. * postreload-gcse.c (expr_equiv_p, oprs_unchanged_p,hash_scan_set, reg_set_between_after_reload_p, reg_used_between_after_reload_p, get_avail_load_store_reg, eliminate_partially_redundant_load): Likewise. * postreload.c (reload_cse_simplify_set, reload_combine_note_use): Likewise. * predict.c (tree_predicted_by_p, expected_value_to_br_prob, propagate_freq, expensive_function_p): Likewise. * pretty-print.c (pp_base_format_text) * profile.c (instrument_edges, instrument_values, compute_branch_probabilities, branch_prob, union_groups, end_branch_prob, tree_register_profile_hooks): Likewise. From-SVN: r87290
This commit is contained in:
parent
108ebf88b2
commit
8127d0e073
@ -10,85 +10,6 @@
|
||||
tree-ssa-threadupdate.c, tree-vectorizer.c, tree-vectorizer.h,
|
||||
value-prof.c: Fix comment typos. Follow spelling conventions.
|
||||
|
||||
2004-09-10 Giovanni Bajo <giovannibajo@gcc.gnu.org>
|
||||
|
||||
* unroll.c (unroll_loop, calculate_giv_inc, initial_reg_note_copy,
|
||||
final_reg_note_copy, copy_loop_body, fold_rtx_mult_add,
|
||||
final_giv_value, loop_iterations): Replace abort with gcc_assert and
|
||||
gcc_unreachable.
|
||||
* value-prof.c (rtl_find_values_to_profile,
|
||||
rtl_register_value_prof_hooks, tree_find_values_to_profile,
|
||||
tree_value_profile_transformations, tree_register_value_prof_hooks):
|
||||
Likewise.
|
||||
* varasm.c (named_section_flags, named_section, make_decl_rtl,
|
||||
asm_emit_uninitialised, assemble_external, assemble_integer,
|
||||
decode_addr_const, const_hash_1, compare_constant, copy_constant,
|
||||
force_const_mem, output_constant_pool_2, output_constant_pool_1,
|
||||
output_constant, output_constructor, supports_one_only,
|
||||
make_decl_one_only, decl_tls_model, default_no_named_section,
|
||||
default_elf_select_section_1, default_unique_section_1): Likewise.
|
||||
* varray.c (varray_copy): Likewise.
|
||||
* var-tracking.c (stack_adjust_offset_pre_post, variable_htab_free,
|
||||
variable_union, dataflow_set_different_2, count_uses,
|
||||
variable_was_changed, set_frame_base_location, set_variable_part,
|
||||
emit_note_insn_var_location, vt_emit_notes,
|
||||
vt_add_function_parameters): Likewise.
|
||||
* vec.c (vec_gc_o_reserve): Likewise.
|
||||
* vmsdbgout.c (addr_const_to_string): Likewise.
|
||||
|
||||
2004-09-10 Giovanni Bajo <giovannibajo@gcc.gnu.org>
|
||||
|
||||
* lambda-mat.c (lambda_matrix_inverse_hard): Use gcc_assert
|
||||
and gcc_unreachable instead of abort.
|
||||
* lambda.h (lambda_vector_min_nz): Likewise.
|
||||
* langhooks.c lhd_set_decl_assembler_name, lhd_can_use_bit_fields_p,
|
||||
lhd_incomplete_type_error, lhd_expand_expr, lhd_types_compatible_p,
|
||||
lhd_gimplify_expr): Likewise.
|
||||
* lcm.c (optimize_mode_switching): Likewise.
|
||||
* local-alloc.c (update_equiv_regs, block_alloc, find_free_reg):
|
||||
Likewise.
|
||||
* loop-doloop.c (doloop_modify): Likewise.
|
||||
* loop-invariant.c (record_use): Likewise.
|
||||
* loop-iv.c (get_biv_step_1, get_biv_step, iv_analyze, get_iv_value,
|
||||
canon_condition, simplify_using_condition,
|
||||
simplify_using_initial_values, shorten_into_mode,
|
||||
canonicalize_iv_subregs, iv_number_of_iterations): Likewise.
|
||||
* loop-unroll.c (unroll_and_peel_loops, peel_loop_completely,
|
||||
unroll_loop_constant_iterations, unroll_loop_runtime_iterations,
|
||||
peel_loop_simple, unroll_loop_stupid): Likewise.
|
||||
* loop-unswitch.c (compare_and_jump_seq, unswitch_single_loop,
|
||||
unswitch_loop): Likewise.
|
||||
* loop.c (gen_prefetch, loop_optimize, rtx_equal_for_loop_p,
|
||||
move_movables, replace_call_address, find_and_verify_loops,
|
||||
rtx_equal_for_prefetch_p, record_giv, general_induction_var,
|
||||
simplify_giv_expr, check_ext_dependent_givs, try_copy_prop,
|
||||
loop_giv_dump): Likewise.
|
||||
* loop.h (INSN_LUID): Likewise.
|
||||
* modulo-sched.c (normalize_sched_times, generate_prolog_epilog,
|
||||
sms_schedule, check_nodes_order): Likewise.
|
||||
* optabs.c (gen_conditional, add_equal_note, expand_simple_binop,
|
||||
expand_twoval_unop, expand_twoval_binop, expand_twoval_binop_libfunc,
|
||||
expand_simple_unop, emit_no_conflict_block, prepare_cmp_insn,
|
||||
emit_cmp_and_jump_insn_1, emit_cmp_and_jump_insns,
|
||||
prepare_float_lib_cmp, gen_add2_insn, have_add2_insn, gen_sub2_insn,
|
||||
have_sub2_insn, expand_float, expand_fix, debug_optab_libfuncs):
|
||||
Likewise.
|
||||
* opts.c (common_handle_option): Likewise.
|
||||
* params.c (set_param_value): Likewise.
|
||||
* passes.c (open_dump_file, rest_of_handle_final): Likewise.
|
||||
* postreload-gcse.c (expr_equiv_p, oprs_unchanged_p,hash_scan_set,
|
||||
reg_set_between_after_reload_p, reg_used_between_after_reload_p,
|
||||
get_avail_load_store_reg, eliminate_partially_redundant_load):
|
||||
Likewise.
|
||||
* postreload.c (reload_cse_simplify_set, reload_combine_note_use):
|
||||
Likewise.
|
||||
* predict.c (tree_predicted_by_p, expected_value_to_br_prob,
|
||||
propagate_freq, expensive_function_p): Likewise.
|
||||
* pretty-print.c (pp_base_format_text)
|
||||
* profile.c (instrument_edges, instrument_values,
|
||||
compute_branch_probabilities, branch_prob, union_groups,
|
||||
end_branch_prob, tree_register_profile_hooks): Likewise.
|
||||
|
||||
2004-09-10 Jan Hubicka <jh@suse.cz>
|
||||
|
||||
* tree-ssa-dce.c (remove_dead_stmt): Update profile.
|
||||
|
@ -379,7 +379,8 @@ lambda_matrix_inverse_hard (lambda_matrix mat, lambda_matrix inv, int n)
|
||||
diagonal = row[j];
|
||||
|
||||
/* If the matrix is singular, abort. */
|
||||
gcc_assert (diagonal != 0);
|
||||
if (diagonal == 0)
|
||||
abort ();
|
||||
|
||||
determinant = determinant * diagonal;
|
||||
|
||||
|
@ -290,7 +290,10 @@ lambda_vector_min_nz (lambda_vector vec1, int n, int start)
|
||||
{
|
||||
int j;
|
||||
int min = -1;
|
||||
gcc_assert (start <= n);
|
||||
#ifdef ENABLE_CHECKING
|
||||
if (start > n)
|
||||
abort ();
|
||||
#endif
|
||||
for (j = start; j < n; j++)
|
||||
{
|
||||
if (vec1[j])
|
||||
@ -298,7 +301,8 @@ lambda_vector_min_nz (lambda_vector vec1, int n, int start)
|
||||
min = j;
|
||||
}
|
||||
|
||||
gcc_assert (min >= 0);
|
||||
if (min < 0)
|
||||
abort ();
|
||||
|
||||
return min;
|
||||
}
|
||||
|
@ -156,35 +156,38 @@ lhd_set_decl_assembler_name (tree decl)
|
||||
/* The language-independent code should never use the
|
||||
DECL_ASSEMBLER_NAME for lots of DECLs. Only FUNCTION_DECLs and
|
||||
VAR_DECLs for variables with static storage duration need a real
|
||||
DECL_ASSEMBLER_NAME.
|
||||
Nobody should ever be asking for the DECL_ASSEMBLER_NAME of
|
||||
these DECLs -- unless they're in language-dependent code, in
|
||||
which case set_decl_assembler_name hook should handle things. */
|
||||
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
|
||||
|| (TREE_CODE (decl) == VAR_DECL
|
||||
&& (TREE_STATIC (decl)
|
||||
|| DECL_EXTERNAL (decl)
|
||||
|| TREE_PUBLIC (decl))));
|
||||
|
||||
/* By default, assume the name to use in assembly code is the
|
||||
same as that used in the source language. (That's correct
|
||||
for C, and GCC used to set DECL_ASSEMBLER_NAME to the same
|
||||
value as DECL_NAME in build_decl, so this choice provides
|
||||
backwards compatibility with existing front-ends.
|
||||
|
||||
Can't use just the variable's own name for a variable whose
|
||||
scope is less than the whole compilation. Concatenate a
|
||||
distinguishing number - we use the DECL_UID. */
|
||||
if (TREE_PUBLIC (decl) || DECL_CONTEXT (decl) == NULL_TREE)
|
||||
SET_DECL_ASSEMBLER_NAME (decl, DECL_NAME (decl));
|
||||
else
|
||||
DECL_ASSEMBLER_NAME. */
|
||||
if (TREE_CODE (decl) == FUNCTION_DECL
|
||||
|| (TREE_CODE (decl) == VAR_DECL
|
||||
&& (TREE_STATIC (decl)
|
||||
|| DECL_EXTERNAL (decl)
|
||||
|| TREE_PUBLIC (decl))))
|
||||
{
|
||||
const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
|
||||
char *label;
|
||||
/* By default, assume the name to use in assembly code is the
|
||||
same as that used in the source language. (That's correct
|
||||
for C, and GCC used to set DECL_ASSEMBLER_NAME to the same
|
||||
value as DECL_NAME in build_decl, so this choice provides
|
||||
backwards compatibility with existing front-ends.
|
||||
|
||||
ASM_FORMAT_PRIVATE_NAME (label, name, DECL_UID (decl));
|
||||
SET_DECL_ASSEMBLER_NAME (decl, get_identifier (label));
|
||||
Can't use just the variable's own name for a variable whose
|
||||
scope is less than the whole compilation. Concatenate a
|
||||
distinguishing number - we use the DECL_UID. */
|
||||
if (TREE_PUBLIC (decl) || DECL_CONTEXT (decl) == NULL_TREE)
|
||||
SET_DECL_ASSEMBLER_NAME (decl, DECL_NAME (decl));
|
||||
else
|
||||
{
|
||||
const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
|
||||
char *label;
|
||||
|
||||
ASM_FORMAT_PRIVATE_NAME (label, name, DECL_UID (decl));
|
||||
SET_DECL_ASSEMBLER_NAME (decl, get_identifier (label));
|
||||
}
|
||||
}
|
||||
else
|
||||
/* Nobody should ever be asking for the DECL_ASSEMBLER_NAME of
|
||||
these DECLs -- unless they're in language-dependent code, in
|
||||
which case set_decl_assembler_name hook should handle things. */
|
||||
abort ();
|
||||
}
|
||||
|
||||
/* By default we always allow bit-field based optimizations. */
|
||||
@ -198,7 +201,7 @@ lhd_can_use_bit_fields_p (void)
|
||||
tree
|
||||
lhd_type_promotes_to (tree ARG_UNUSED (type))
|
||||
{
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
|
||||
/* Registration of machine- or os-specific builtin types. */
|
||||
@ -212,7 +215,10 @@ lhd_register_builtin_type (tree ARG_UNUSED (type),
|
||||
void
|
||||
lhd_incomplete_type_error (tree ARG_UNUSED (value), tree type)
|
||||
{
|
||||
gcc_assert (TREE_CODE (type) == ERROR_MARK);
|
||||
if (TREE_CODE (type) == ERROR_MARK)
|
||||
return;
|
||||
|
||||
abort ();
|
||||
}
|
||||
|
||||
/* Provide a default routine for alias sets that always returns -1. This
|
||||
@ -241,7 +247,7 @@ lhd_expand_expr (tree ARG_UNUSED (t), rtx ARG_UNUSED (r),
|
||||
int ARG_UNUSED (em),
|
||||
rtx * ARG_UNUSED (a))
|
||||
{
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
|
||||
/* The default language-specific function for expanding a decl. After
|
||||
@ -281,7 +287,7 @@ lhd_types_compatible_p (tree x, tree y)
|
||||
information associated to common tree codes. If a tree node is
|
||||
completely handled within this function, it should set *SUBTREES to
|
||||
0, so that generic handling isn't attempted. For language-specific
|
||||
tree codes, generic handling would assert out, so make sure it is set
|
||||
tree codes, generic handling would abort(), so make sure it is set
|
||||
properly. Both SUBTREES and *SUBTREES is guaranteed to be nonzero
|
||||
when the function is called. */
|
||||
|
||||
@ -436,7 +442,8 @@ lhd_gimplify_expr (tree *expr_p ATTRIBUTE_UNUSED, tree *pre_p ATTRIBUTE_UNUSED,
|
||||
size_t
|
||||
lhd_tree_size (enum tree_code c ATTRIBUTE_UNUSED)
|
||||
{
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Return true if decl, which is a function decl, may be called by a
|
||||
|
32
gcc/lcm.c
32
gcc/lcm.c
@ -1029,7 +1029,8 @@ optimize_mode_switching (FILE *file)
|
||||
{
|
||||
regset live_at_end = eg->src->global_live_at_end;
|
||||
|
||||
gcc_assert (!pre_exit);
|
||||
if (pre_exit)
|
||||
abort ();
|
||||
pre_exit = split_edge (eg);
|
||||
COPY_REG_SET (pre_exit->global_live_at_start, live_at_end);
|
||||
COPY_REG_SET (pre_exit->global_live_at_end, live_at_end);
|
||||
@ -1207,22 +1208,21 @@ optimize_mode_switching (FILE *file)
|
||||
emited = true;
|
||||
if (JUMP_P (BB_END (src_bb)))
|
||||
emit_insn_before (mode_set, BB_END (src_bb));
|
||||
/* It doesn't make sense to switch to normal mode
|
||||
after a CALL_INSN, so we're going to abort if we
|
||||
find one. The cases in which a CALL_INSN may
|
||||
have an abnormal edge are sibcalls and EH edges.
|
||||
In the case of sibcalls, the dest basic-block is
|
||||
the EXIT_BLOCK, that runs in normal mode; it is
|
||||
assumed that a sibcall insn requires normal mode
|
||||
itself, so no mode switch would be required after
|
||||
the call (it wouldn't make sense, anyway). In
|
||||
the case of EH edges, EH entry points also start
|
||||
in normal mode, so a similar reasoning applies. */
|
||||
else if (NONJUMP_INSN_P (BB_END (src_bb)))
|
||||
emit_insn_after (mode_set, BB_END (src_bb));
|
||||
else
|
||||
{
|
||||
/* It doesn't make sense to switch to normal mode
|
||||
after a CALL_INSN, so we're going to abort if we
|
||||
find one. The cases in which a CALL_INSN may
|
||||
have an abnormal edge are sibcalls and EH edges.
|
||||
In the case of sibcalls, the dest basic-block is
|
||||
the EXIT_BLOCK, that runs in normal mode; it is
|
||||
assumed that a sibcall insn requires normal mode
|
||||
itself, so no mode switch would be required after
|
||||
the call (it wouldn't make sense, anyway). In
|
||||
the case of EH edges, EH entry points also start
|
||||
in normal mode, so a similar reasoning applies. */
|
||||
gcc_assert (NONJUMP_INSN_P (BB_END (src_bb)));
|
||||
emit_insn_after (mode_set, BB_END (src_bb));
|
||||
}
|
||||
abort ();
|
||||
bb_info[j][src_bb->index].computing = mode;
|
||||
RESET_BIT (transp[src_bb->index], j);
|
||||
}
|
||||
|
@ -1028,9 +1028,9 @@ update_equiv_regs (void)
|
||||
once and used once. (If it were only set, but not used,
|
||||
flow would have deleted the setting insns.) Hence
|
||||
there can only be one insn in reg_equiv[REGNO].init_insns. */
|
||||
gcc_assert (reg_equiv[regno].init_insns != NULL_RTX);
|
||||
gcc_assert (XEXP (reg_equiv[regno].init_insns, 1)
|
||||
== NULL_RTX);
|
||||
if (reg_equiv[regno].init_insns == NULL_RTX
|
||||
|| XEXP (reg_equiv[regno].init_insns, 1) != NULL_RTX)
|
||||
abort ();
|
||||
equiv_insn = XEXP (reg_equiv[regno].init_insns, 0);
|
||||
|
||||
/* We may not move instructions that can throw, since
|
||||
@ -1188,10 +1188,8 @@ block_alloc (int b)
|
||||
while (1)
|
||||
{
|
||||
if (!NOTE_P (insn))
|
||||
{
|
||||
++insn_count;
|
||||
gcc_assert (insn_count <= max_uid);
|
||||
}
|
||||
if (++insn_count > max_uid)
|
||||
abort ();
|
||||
if (insn == BB_HEAD (BASIC_BLOCK (b)))
|
||||
break;
|
||||
insn = PREV_INSN (insn);
|
||||
@ -2112,8 +2110,8 @@ find_free_reg (enum reg_class class, enum machine_mode mode, int qtyno,
|
||||
#endif
|
||||
|
||||
/* Validate our parameters. */
|
||||
gcc_assert (born_index >= 0);
|
||||
gcc_assert (born_index < dead_index);
|
||||
if (born_index < 0 || born_index > dead_index)
|
||||
abort ();
|
||||
|
||||
/* Don't let a pseudo live in a reg across a function call
|
||||
if we might get a nonlocal goto. */
|
||||
|
@ -303,16 +303,16 @@ doloop_modify (struct loop *loop, struct niter_desc *desc,
|
||||
increment_count = true;
|
||||
noloop = const1_rtx;
|
||||
}
|
||||
else if (XEXP (condition, 1) == const0_rtx)
|
||||
noloop = const0_rtx;
|
||||
else
|
||||
{
|
||||
gcc_assert (XEXP (condition, 1) == const0_rtx);
|
||||
noloop = const0_rtx;
|
||||
}
|
||||
abort ();
|
||||
break;
|
||||
|
||||
case GE:
|
||||
/* Currently only GE tests against zero are supported. */
|
||||
gcc_assert (XEXP (condition, 1) == const0_rtx);
|
||||
if (XEXP (condition, 1) != const0_rtx)
|
||||
abort ();
|
||||
|
||||
noloop = constm1_rtx;
|
||||
|
||||
@ -327,9 +327,9 @@ doloop_modify (struct loop *loop, struct niter_desc *desc,
|
||||
nonneg = 1;
|
||||
break;
|
||||
|
||||
default:
|
||||
/* Abort if an invalid doloop pattern has been generated. */
|
||||
gcc_unreachable ();
|
||||
default:
|
||||
abort ();
|
||||
}
|
||||
|
||||
if (increment_count)
|
||||
|
@ -353,7 +353,8 @@ record_use (struct def *def, rtx *use, rtx insn)
|
||||
|
||||
if (GET_CODE (*use) == SUBREG)
|
||||
use = &SUBREG_REG (*use);
|
||||
gcc_assert (REG_P (*use));
|
||||
if (!REG_P (*use))
|
||||
abort ();
|
||||
|
||||
u->pos = use;
|
||||
u->insn = insn;
|
||||
|
@ -793,15 +793,16 @@ get_biv_step_1 (rtx insn, rtx reg,
|
||||
|
||||
case SIGN_EXTEND:
|
||||
case ZERO_EXTEND:
|
||||
gcc_assert (GET_MODE (op0) == *inner_mode);
|
||||
gcc_assert (*extend == UNKNOWN);
|
||||
gcc_assert (*outer_step == const0_rtx);
|
||||
if (GET_MODE (op0) != *inner_mode
|
||||
|| *extend != UNKNOWN
|
||||
|| *outer_step != const0_rtx)
|
||||
abort ();
|
||||
|
||||
*extend = code;
|
||||
break;
|
||||
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -825,9 +826,17 @@ get_biv_step (rtx reg, rtx *inner_step, enum machine_mode *inner_mode,
|
||||
outer_step))
|
||||
return false;
|
||||
|
||||
gcc_assert (*inner_mode == *outer_mode || *extend != UNKNOWN);
|
||||
gcc_assert (*inner_mode != *outer_mode || *extend == UNKNOWN);
|
||||
gcc_assert (*inner_mode != *outer_mode || *outer_step == const0_rtx);
|
||||
if (*inner_mode != *outer_mode
|
||||
&& *extend == UNKNOWN)
|
||||
abort ();
|
||||
|
||||
if (*inner_mode == *outer_mode
|
||||
&& *extend != UNKNOWN)
|
||||
abort ();
|
||||
|
||||
if (*inner_mode == *outer_mode
|
||||
&& *outer_step != const0_rtx)
|
||||
abort ();
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -1076,7 +1085,8 @@ iv_analyze (rtx insn, rtx def, struct rtx_iv *iv)
|
||||
mby = XEXP (rhs, 1);
|
||||
if (!CONSTANT_P (mby))
|
||||
{
|
||||
gcc_assert (CONSTANT_P (op0));
|
||||
if (!CONSTANT_P (op0))
|
||||
abort ();
|
||||
tmp = op0;
|
||||
op0 = mby;
|
||||
mby = tmp;
|
||||
@ -1084,13 +1094,14 @@ iv_analyze (rtx insn, rtx def, struct rtx_iv *iv)
|
||||
break;
|
||||
|
||||
case ASHIFT:
|
||||
gcc_assert (!CONSTANT_P (XEXP (rhs, 0)));
|
||||
if (CONSTANT_P (XEXP (rhs, 0)))
|
||||
abort ();
|
||||
op0 = XEXP (rhs, 0);
|
||||
mby = XEXP (rhs, 1);
|
||||
break;
|
||||
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
|
||||
amode = GET_MODE (rhs);
|
||||
@ -1181,7 +1192,8 @@ get_iv_value (struct rtx_iv *iv, rtx iteration)
|
||||
|
||||
/* We would need to generate some if_then_else patterns, and so far
|
||||
it is not needed anywhere. */
|
||||
gcc_assert (!iv->first_special);
|
||||
if (iv->first_special)
|
||||
abort ();
|
||||
|
||||
if (iv->step != const0_rtx && iteration != const0_rtx)
|
||||
val = simplify_gen_binary (PLUS, iv->extend_mode, iv->base,
|
||||
@ -1517,7 +1529,8 @@ canon_condition (rtx cond)
|
||||
mode = GET_MODE (op0);
|
||||
if (mode == VOIDmode)
|
||||
mode = GET_MODE (op1);
|
||||
gcc_assert (mode != VOIDmode);
|
||||
if (mode == VOIDmode)
|
||||
abort ();
|
||||
|
||||
if (GET_CODE (op1) == CONST_INT
|
||||
&& GET_MODE_CLASS (mode) != MODE_CC
|
||||
@ -1646,21 +1659,20 @@ simplify_using_condition (rtx cond, rtx *expr, regset altered)
|
||||
static void
|
||||
eliminate_implied_condition (enum rtx_code op, rtx a, rtx *b)
|
||||
{
|
||||
switch (op)
|
||||
if (op == AND)
|
||||
{
|
||||
case AND:
|
||||
/* If A implies *B, we may replace *B by true. */
|
||||
if (implies_p (a, *b))
|
||||
*b = const_true_rtx;
|
||||
break;
|
||||
case IOR:
|
||||
}
|
||||
else if (op == IOR)
|
||||
{
|
||||
/* If *B implies A, we may replace *B by false. */
|
||||
if (implies_p (*b, a))
|
||||
*b = const0_rtx;
|
||||
break;
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
}
|
||||
else
|
||||
abort ();
|
||||
}
|
||||
|
||||
/* Eliminates the conditions in TAIL that are implied by HEAD. OP is the
|
||||
@ -1702,19 +1714,18 @@ simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
|
||||
|
||||
eliminate_implied_conditions (op, &head, tail);
|
||||
|
||||
switch (op)
|
||||
if (op == AND)
|
||||
{
|
||||
case AND:
|
||||
neutral = const_true_rtx;
|
||||
aggr = const0_rtx;
|
||||
break;
|
||||
case IOR:
|
||||
}
|
||||
else if (op == IOR)
|
||||
{
|
||||
neutral = const0_rtx;
|
||||
aggr = const_true_rtx;
|
||||
break;
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
}
|
||||
else
|
||||
abort ();
|
||||
|
||||
simplify_using_initial_values (loop, UNKNOWN, &head);
|
||||
if (head == aggr)
|
||||
@ -1742,7 +1753,8 @@ simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
|
||||
return;
|
||||
}
|
||||
|
||||
gcc_assert (op == UNKNOWN);
|
||||
if (op != UNKNOWN)
|
||||
abort ();
|
||||
|
||||
e = loop_preheader_edge (loop);
|
||||
if (e->src == ENTRY_BLOCK_PTR)
|
||||
@ -1844,7 +1856,7 @@ shorten_into_mode (struct rtx_iv *iv, enum machine_mode mode,
|
||||
break;
|
||||
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
|
||||
iv->mode = mode;
|
||||
@ -1902,7 +1914,7 @@ canonicalize_iv_subregs (struct rtx_iv *iv0, struct rtx_iv *iv1,
|
||||
break;
|
||||
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
|
||||
/* Values of both variables should be computed in the same mode. These
|
||||
@ -2001,13 +2013,15 @@ iv_number_of_iterations (struct loop *loop, rtx insn, rtx condition,
|
||||
desc->niter_max = 0;
|
||||
|
||||
cond = GET_CODE (condition);
|
||||
gcc_assert (COMPARISON_P (condition));
|
||||
if (!COMPARISON_P (condition))
|
||||
abort ();
|
||||
|
||||
mode = GET_MODE (XEXP (condition, 0));
|
||||
if (mode == VOIDmode)
|
||||
mode = GET_MODE (XEXP (condition, 1));
|
||||
/* The constant comparisons should be folded. */
|
||||
gcc_assert (mode != VOIDmode);
|
||||
if (mode == VOIDmode)
|
||||
abort ();
|
||||
|
||||
/* We only handle integers or pointers. */
|
||||
if (GET_MODE_CLASS (mode) != MODE_INT
|
||||
|
@ -116,7 +116,7 @@ unroll_and_peel_loops (struct loops *loops, int flags)
|
||||
{
|
||||
case LPT_PEEL_COMPLETELY:
|
||||
/* Already done. */
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
case LPT_PEEL_SIMPLE:
|
||||
peel_loop_simple (loops, loop);
|
||||
break;
|
||||
@ -133,7 +133,7 @@ unroll_and_peel_loops (struct loops *loops, int flags)
|
||||
check = false;
|
||||
break;
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
if (check)
|
||||
{
|
||||
@ -433,8 +433,6 @@ peel_loop_completely (struct loops *loops, struct loop *loop)
|
||||
|
||||
if (npeel)
|
||||
{
|
||||
int ok;
|
||||
|
||||
wont_exit = sbitmap_alloc (npeel + 1);
|
||||
sbitmap_ones (wont_exit);
|
||||
RESET_BIT (wont_exit, 0);
|
||||
@ -444,12 +442,11 @@ peel_loop_completely (struct loops *loops, struct loop *loop)
|
||||
remove_edges = xcalloc (npeel, sizeof (edge));
|
||||
n_remove_edges = 0;
|
||||
|
||||
ok = duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
|
||||
loops, npeel,
|
||||
wont_exit, desc->out_edge,
|
||||
remove_edges, &n_remove_edges,
|
||||
DLTHE_FLAG_UPDATE_FREQ);
|
||||
gcc_assert (ok);
|
||||
if (!duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
|
||||
loops, npeel,
|
||||
wont_exit, desc->out_edge, remove_edges, &n_remove_edges,
|
||||
DLTHE_FLAG_UPDATE_FREQ))
|
||||
abort ();
|
||||
|
||||
free (wont_exit);
|
||||
|
||||
@ -600,12 +597,11 @@ unroll_loop_constant_iterations (struct loops *loops, struct loop *loop)
|
||||
unsigned max_unroll = loop->lpt_decision.times;
|
||||
struct niter_desc *desc = get_simple_loop_desc (loop);
|
||||
bool exit_at_end = loop_exit_at_end_p (loop);
|
||||
int ok;
|
||||
|
||||
niter = desc->niter;
|
||||
|
||||
/* Should not assert out here (such loop should be peeled instead). */
|
||||
gcc_assert (niter > max_unroll + 1);
|
||||
if (niter <= max_unroll + 1)
|
||||
abort (); /* Should not get here (such loop should be peeled instead). */
|
||||
|
||||
exit_mod = niter % (max_unroll + 1);
|
||||
|
||||
@ -631,14 +627,12 @@ unroll_loop_constant_iterations (struct loops *loops, struct loop *loop)
|
||||
|
||||
if (exit_mod)
|
||||
{
|
||||
int ok;
|
||||
|
||||
ok = duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
|
||||
if (!duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
|
||||
loops, exit_mod,
|
||||
wont_exit, desc->out_edge,
|
||||
remove_edges, &n_remove_edges,
|
||||
DLTHE_FLAG_UPDATE_FREQ);
|
||||
gcc_assert (ok);
|
||||
DLTHE_FLAG_UPDATE_FREQ))
|
||||
abort ();
|
||||
|
||||
desc->noloop_assumptions = NULL_RTX;
|
||||
desc->niter -= exit_mod;
|
||||
@ -661,18 +655,15 @@ unroll_loop_constant_iterations (struct loops *loops, struct loop *loop)
|
||||
if (exit_mod != max_unroll
|
||||
|| desc->noloop_assumptions)
|
||||
{
|
||||
int ok;
|
||||
|
||||
RESET_BIT (wont_exit, 0);
|
||||
if (desc->noloop_assumptions)
|
||||
RESET_BIT (wont_exit, 1);
|
||||
|
||||
ok = duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
|
||||
loops, exit_mod + 1,
|
||||
wont_exit, desc->out_edge,
|
||||
remove_edges, &n_remove_edges,
|
||||
DLTHE_FLAG_UPDATE_FREQ);
|
||||
gcc_assert (ok);
|
||||
if (!duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
|
||||
loops, exit_mod + 1,
|
||||
wont_exit, desc->out_edge, remove_edges, &n_remove_edges,
|
||||
DLTHE_FLAG_UPDATE_FREQ))
|
||||
abort ();
|
||||
|
||||
desc->niter -= exit_mod + 1;
|
||||
desc->niter_max -= exit_mod + 1;
|
||||
@ -686,12 +677,11 @@ unroll_loop_constant_iterations (struct loops *loops, struct loop *loop)
|
||||
}
|
||||
|
||||
/* Now unroll the loop. */
|
||||
ok = duplicate_loop_to_header_edge (loop, loop_latch_edge (loop),
|
||||
loops, max_unroll,
|
||||
wont_exit, desc->out_edge,
|
||||
remove_edges, &n_remove_edges,
|
||||
DLTHE_FLAG_UPDATE_FREQ);
|
||||
gcc_assert (ok);
|
||||
if (!duplicate_loop_to_header_edge (loop, loop_latch_edge (loop),
|
||||
loops, max_unroll,
|
||||
wont_exit, desc->out_edge, remove_edges, &n_remove_edges,
|
||||
DLTHE_FLAG_UPDATE_FREQ))
|
||||
abort ();
|
||||
|
||||
free (wont_exit);
|
||||
|
||||
@ -852,7 +842,6 @@ unroll_loop_runtime_iterations (struct loops *loops, struct loop *loop)
|
||||
unsigned max_unroll = loop->lpt_decision.times;
|
||||
struct niter_desc *desc = get_simple_loop_desc (loop);
|
||||
bool exit_at_end = loop_exit_at_end_p (loop);
|
||||
int ok;
|
||||
|
||||
/* Remember blocks whose dominators will have to be updated. */
|
||||
dom_bbs = xcalloc (n_basic_blocks, sizeof (basic_block));
|
||||
@ -927,12 +916,11 @@ unroll_loop_runtime_iterations (struct loops *loops, struct loop *loop)
|
||||
&& !desc->noloop_assumptions)
|
||||
SET_BIT (wont_exit, 1);
|
||||
ezc_swtch = loop_preheader_edge (loop)->src;
|
||||
ok = duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
|
||||
loops, 1,
|
||||
wont_exit, desc->out_edge,
|
||||
remove_edges, &n_remove_edges,
|
||||
DLTHE_FLAG_UPDATE_FREQ);
|
||||
gcc_assert (ok);
|
||||
if (!duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
|
||||
loops, 1,
|
||||
wont_exit, desc->out_edge, remove_edges, &n_remove_edges,
|
||||
DLTHE_FLAG_UPDATE_FREQ))
|
||||
abort ();
|
||||
|
||||
/* Record the place where switch will be built for preconditioning. */
|
||||
swtch = loop_split_edge_with (loop_preheader_edge (loop),
|
||||
@ -944,12 +932,11 @@ unroll_loop_runtime_iterations (struct loops *loops, struct loop *loop)
|
||||
sbitmap_zero (wont_exit);
|
||||
if (i != n_peel - 1 || !last_may_exit)
|
||||
SET_BIT (wont_exit, 1);
|
||||
ok = duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
|
||||
loops, 1,
|
||||
wont_exit, desc->out_edge,
|
||||
remove_edges, &n_remove_edges,
|
||||
DLTHE_FLAG_UPDATE_FREQ);
|
||||
gcc_assert (ok);
|
||||
if (!duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
|
||||
loops, 1,
|
||||
wont_exit, desc->out_edge, remove_edges, &n_remove_edges,
|
||||
DLTHE_FLAG_UPDATE_FREQ))
|
||||
abort ();
|
||||
|
||||
/* Create item for switch. */
|
||||
j = n_peel - i - (extra_zero_check ? 0 : 1);
|
||||
@ -992,12 +979,11 @@ unroll_loop_runtime_iterations (struct loops *loops, struct loop *loop)
|
||||
sbitmap_ones (wont_exit);
|
||||
RESET_BIT (wont_exit, may_exit_copy);
|
||||
|
||||
ok = duplicate_loop_to_header_edge (loop, loop_latch_edge (loop),
|
||||
loops, max_unroll,
|
||||
wont_exit, desc->out_edge,
|
||||
remove_edges, &n_remove_edges,
|
||||
DLTHE_FLAG_UPDATE_FREQ);
|
||||
gcc_assert (ok);
|
||||
if (!duplicate_loop_to_header_edge (loop, loop_latch_edge (loop),
|
||||
loops, max_unroll,
|
||||
wont_exit, desc->out_edge, remove_edges, &n_remove_edges,
|
||||
DLTHE_FLAG_UPDATE_FREQ))
|
||||
abort ();
|
||||
|
||||
free (wont_exit);
|
||||
|
||||
@ -1027,7 +1013,8 @@ unroll_loop_runtime_iterations (struct loops *loops, struct loop *loop)
|
||||
preconditioning and the fact that the value must be valid at entry
|
||||
of the loop. After passing through the above code, we see that
|
||||
the correct new number of iterations is this: */
|
||||
gcc_assert (!desc->const_iter);
|
||||
if (desc->const_iter)
|
||||
abort ();
|
||||
desc->niter_expr =
|
||||
simplify_gen_binary (UDIV, desc->mode, old_niter, GEN_INT (max_unroll + 1));
|
||||
desc->niter_max /= max_unroll + 1;
|
||||
@ -1151,16 +1138,14 @@ peel_loop_simple (struct loops *loops, struct loop *loop)
|
||||
sbitmap wont_exit;
|
||||
unsigned npeel = loop->lpt_decision.times;
|
||||
struct niter_desc *desc = get_simple_loop_desc (loop);
|
||||
int ok;
|
||||
|
||||
wont_exit = sbitmap_alloc (npeel + 1);
|
||||
sbitmap_zero (wont_exit);
|
||||
|
||||
ok = duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
|
||||
loops, npeel, wont_exit,
|
||||
NULL, NULL, NULL,
|
||||
DLTHE_FLAG_UPDATE_FREQ);
|
||||
gcc_assert (ok);
|
||||
if (!duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
|
||||
loops, npeel, wont_exit, NULL, NULL, NULL,
|
||||
DLTHE_FLAG_UPDATE_FREQ))
|
||||
abort ();
|
||||
|
||||
free (wont_exit);
|
||||
|
||||
@ -1286,16 +1271,14 @@ unroll_loop_stupid (struct loops *loops, struct loop *loop)
|
||||
sbitmap wont_exit;
|
||||
unsigned nunroll = loop->lpt_decision.times;
|
||||
struct niter_desc *desc = get_simple_loop_desc (loop);
|
||||
int ok;
|
||||
|
||||
wont_exit = sbitmap_alloc (nunroll + 1);
|
||||
sbitmap_zero (wont_exit);
|
||||
|
||||
ok = duplicate_loop_to_header_edge (loop, loop_latch_edge (loop),
|
||||
loops, nunroll, wont_exit,
|
||||
NULL, NULL, NULL,
|
||||
DLTHE_FLAG_UPDATE_FREQ);
|
||||
gcc_assert (ok);
|
||||
if (!duplicate_loop_to_header_edge (loop, loop_latch_edge (loop),
|
||||
loops, nunroll, wont_exit, NULL, NULL, NULL,
|
||||
DLTHE_FLAG_UPDATE_FREQ))
|
||||
abort ();
|
||||
|
||||
free (wont_exit);
|
||||
|
||||
|
@ -103,11 +103,13 @@ compare_and_jump_seq (rtx op0, rtx op1, enum rtx_code comp, rtx label, int prob,
|
||||
{
|
||||
/* A hack -- there seems to be no easy generic way how to make a
|
||||
conditional jump from a ccmode comparison. */
|
||||
gcc_assert (cinsn);
|
||||
if (!cinsn)
|
||||
abort ();
|
||||
cond = XEXP (SET_SRC (pc_set (cinsn)), 0);
|
||||
gcc_assert (GET_CODE (cond) == comp);
|
||||
gcc_assert (rtx_equal_p (op0, XEXP (cond, 0)));
|
||||
gcc_assert (rtx_equal_p (op1, XEXP (cond, 1)));
|
||||
if (GET_CODE (cond) != comp
|
||||
|| !rtx_equal_p (op0, XEXP (cond, 0))
|
||||
|| !rtx_equal_p (op1, XEXP (cond, 1)))
|
||||
abort ();
|
||||
emit_jump_insn (copy_insn (PATTERN (cinsn)));
|
||||
jump = get_last_insn ();
|
||||
JUMP_LABEL (jump) = JUMP_LABEL (cinsn);
|
||||
@ -116,7 +118,8 @@ compare_and_jump_seq (rtx op0, rtx op1, enum rtx_code comp, rtx label, int prob,
|
||||
}
|
||||
else
|
||||
{
|
||||
gcc_assert (!cinsn);
|
||||
if (cinsn)
|
||||
abort ();
|
||||
|
||||
op0 = force_operand (op0, NULL_RTX);
|
||||
op1 = force_operand (op1, NULL_RTX);
|
||||
@ -376,7 +379,8 @@ unswitch_single_loop (struct loops *loops, struct loop *loop,
|
||||
|
||||
/* Unswitch the loop on this condition. */
|
||||
nloop = unswitch_loop (loops, loop, bbs[i], cond, cinsn);
|
||||
gcc_assert (nloop);
|
||||
if (!nloop)
|
||||
abort ();
|
||||
|
||||
/* Invoke itself on modified loops. */
|
||||
unswitch_single_loop (loops, nloop, rconds, num + 1);
|
||||
@ -408,17 +412,19 @@ unswitch_loop (struct loops *loops, struct loop *loop, basic_block unswitch_on,
|
||||
rtx seq;
|
||||
|
||||
/* Some sanity checking. */
|
||||
gcc_assert (flow_bb_inside_loop_p (loop, unswitch_on));
|
||||
|
||||
gcc_assert (unswitch_on->succ);
|
||||
gcc_assert (unswitch_on->succ->succ_next);
|
||||
gcc_assert (!unswitch_on->succ->succ_next->succ_next);
|
||||
|
||||
gcc_assert (just_once_each_iteration_p (loop, unswitch_on));
|
||||
gcc_assert (!loop->inner);
|
||||
gcc_assert (flow_bb_inside_loop_p (loop, unswitch_on->succ->dest));
|
||||
gcc_assert (flow_bb_inside_loop_p (loop,
|
||||
unswitch_on->succ->succ_next->dest));
|
||||
if (!flow_bb_inside_loop_p (loop, unswitch_on))
|
||||
abort ();
|
||||
if (!unswitch_on->succ || !unswitch_on->succ->succ_next ||
|
||||
unswitch_on->succ->succ_next->succ_next)
|
||||
abort ();
|
||||
if (!just_once_each_iteration_p (loop, unswitch_on))
|
||||
abort ();
|
||||
if (loop->inner)
|
||||
abort ();
|
||||
if (!flow_bb_inside_loop_p (loop, unswitch_on->succ->dest))
|
||||
abort ();
|
||||
if (!flow_bb_inside_loop_p (loop, unswitch_on->succ->succ_next->dest))
|
||||
abort ();
|
||||
|
||||
entry = loop_preheader_edge (loop);
|
||||
|
||||
|
95
gcc/loop.c
95
gcc/loop.c
@ -77,7 +77,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
#ifndef HAVE_prefetch
|
||||
#define HAVE_prefetch 0
|
||||
#define CODE_FOR_prefetch 0
|
||||
#define gen_prefetch(a,b,c) (gcc_unreachable(), NULL_RTX)
|
||||
#define gen_prefetch(a,b,c) (abort(), NULL_RTX)
|
||||
#endif
|
||||
|
||||
/* Give up the prefetch optimizations once we exceed a given threshold.
|
||||
@ -501,8 +501,8 @@ loop_optimize (rtx f, FILE *dumpfile, int flags)
|
||||
|
||||
/* See if we went too far. Note that get_max_uid already returns
|
||||
one more that the maximum uid of all insn. */
|
||||
gcc_assert (get_max_uid () <= max_uid_for_loop);
|
||||
|
||||
if (get_max_uid () > max_uid_for_loop)
|
||||
abort ();
|
||||
/* Now reset it to the actual size we need. See above. */
|
||||
max_uid_for_loop = get_max_uid ();
|
||||
|
||||
@ -1746,7 +1746,7 @@ rtx_equal_for_loop_p (rtx x, rtx y, struct loop_movables *movables,
|
||||
contain anything but integers and other rtx's,
|
||||
except for within LABEL_REFs and SYMBOL_REFs. */
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
}
|
||||
return 1;
|
||||
@ -1940,24 +1940,21 @@ move_movables (struct loop *loop, struct loop_movables *movables,
|
||||
|
||||
for (count = m->consec; count >= 0; count--)
|
||||
{
|
||||
if (!NOTE_P (p))
|
||||
{
|
||||
/* If this is the first insn of a library call
|
||||
sequence, something is very wrong. */
|
||||
gcc_assert (!find_reg_note (p, REG_LIBCALL,
|
||||
NULL_RTX));
|
||||
/* If this is the first insn of a library call sequence,
|
||||
something is very wrong. */
|
||||
if (!NOTE_P (p)
|
||||
&& (temp = find_reg_note (p, REG_LIBCALL, NULL_RTX)))
|
||||
abort ();
|
||||
|
||||
/* If this is the last insn of a libcall sequence,
|
||||
then delete every insn in the sequence except
|
||||
the last. The last insn is handled in the
|
||||
normal manner. */
|
||||
temp = find_reg_note (p, REG_RETVAL, NULL_RTX);
|
||||
if (temp)
|
||||
{
|
||||
temp = XEXP (temp, 0);
|
||||
while (temp != p)
|
||||
temp = delete_insn (temp);
|
||||
}
|
||||
/* If this is the last insn of a libcall sequence, then
|
||||
delete every insn in the sequence except the last.
|
||||
The last insn is handled in the normal manner. */
|
||||
if (!NOTE_P (p)
|
||||
&& (temp = find_reg_note (p, REG_RETVAL, NULL_RTX)))
|
||||
{
|
||||
temp = XEXP (temp, 0);
|
||||
while (temp != p)
|
||||
temp = delete_insn (temp);
|
||||
}
|
||||
|
||||
temp = p;
|
||||
@ -2121,7 +2118,8 @@ move_movables (struct loop *loop, struct loop_movables *movables,
|
||||
<< GET_MODE_BITSIZE (m->savemode)))
|
||||
- 1),
|
||||
reg, 1, OPTAB_LIB_WIDEN);
|
||||
gcc_assert (tem != 0);
|
||||
if (tem == 0)
|
||||
abort ();
|
||||
if (tem != reg)
|
||||
emit_move_insn (reg, tem);
|
||||
sequence = get_insns ();
|
||||
@ -2409,7 +2407,8 @@ replace_call_address (rtx x, rtx reg, rtx addr)
|
||||
case MEM:
|
||||
/* If this MEM uses a reg other than the one we expected,
|
||||
something is wrong. */
|
||||
gcc_assert (XEXP (x, 0) == reg);
|
||||
if (XEXP (x, 0) != reg)
|
||||
abort ();
|
||||
XEXP (x, 0) = addr;
|
||||
return;
|
||||
|
||||
@ -2724,7 +2723,8 @@ find_and_verify_loops (rtx f, struct loops *loops)
|
||||
break;
|
||||
|
||||
case NOTE_INSN_LOOP_END:
|
||||
gcc_assert (current_loop);
|
||||
if (! current_loop)
|
||||
abort ();
|
||||
|
||||
current_loop->end = insn;
|
||||
current_loop = current_loop->outer;
|
||||
@ -2912,7 +2912,6 @@ find_and_verify_loops (rtx f, struct loops *loops)
|
||||
if (invert_jump (p, new_label, 1))
|
||||
{
|
||||
rtx q, r;
|
||||
bool fail;
|
||||
|
||||
/* If no suitable BARRIER was found, create a suitable
|
||||
one before TARGET. Since TARGET is a fall through
|
||||
@ -2937,8 +2936,8 @@ find_and_verify_loops (rtx f, struct loops *loops)
|
||||
|
||||
/* Include the BARRIER after INSN and copy the
|
||||
block after LOC. */
|
||||
fail = squeeze_notes (&new_label, &last_insn_to_move);
|
||||
gcc_assert (!fail);
|
||||
if (squeeze_notes (&new_label, &last_insn_to_move))
|
||||
abort ();
|
||||
reorder_insns (new_label, last_insn_to_move, loc);
|
||||
|
||||
/* All those insns are now in TARGET_LOOP. */
|
||||
@ -2973,7 +2972,8 @@ find_and_verify_loops (rtx f, struct loops *loops)
|
||||
|
||||
/* If we didn't find it, then something is
|
||||
wrong. */
|
||||
gcc_assert (r);
|
||||
if (! r)
|
||||
abort ();
|
||||
}
|
||||
|
||||
/* P is now a jump outside the loop, so it must be put
|
||||
@ -3683,7 +3683,7 @@ rtx_equal_for_prefetch_p (rtx x, rtx y)
|
||||
contain anything but integers and other rtx's,
|
||||
except for within LABEL_REFs and SYMBOL_REFs. */
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
}
|
||||
return 1;
|
||||
@ -5721,14 +5721,19 @@ record_giv (const struct loop *loop, struct induction *v, rtx insn,
|
||||
/* Add the giv to the class of givs computed from one biv. */
|
||||
|
||||
bl = REG_IV_CLASS (ivs, REGNO (src_reg));
|
||||
gcc_assert (bl); /* Fatal error, biv missing for this giv? */
|
||||
v->next_iv = bl->giv;
|
||||
bl->giv = v;
|
||||
/* Don't count DEST_ADDR. This is supposed to count the number of
|
||||
insns that calculate givs. */
|
||||
if (type == DEST_REG)
|
||||
bl->giv_count++;
|
||||
bl->total_benefit += benefit;
|
||||
if (bl)
|
||||
{
|
||||
v->next_iv = bl->giv;
|
||||
bl->giv = v;
|
||||
/* Don't count DEST_ADDR. This is supposed to count the number of
|
||||
insns that calculate givs. */
|
||||
if (type == DEST_REG)
|
||||
bl->giv_count++;
|
||||
bl->total_benefit += benefit;
|
||||
}
|
||||
else
|
||||
/* Fatal error, biv missing for this giv? */
|
||||
abort ();
|
||||
|
||||
if (type == DEST_ADDR)
|
||||
{
|
||||
@ -6383,7 +6388,7 @@ general_induction_var (const struct loop *loop, rtx x, rtx *src_reg,
|
||||
break;
|
||||
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
|
||||
/* Remove any enclosing USE from ADD_VAL and MULT_VAL (there will be
|
||||
@ -6502,7 +6507,7 @@ simplify_giv_expr (const struct loop *loop, rtx x, rtx *ext_val, int *benefit)
|
||||
ext_val, benefit);
|
||||
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
|
||||
/* Each argument must be either REG, PLUS, or MULT. Convert REG to
|
||||
@ -6643,7 +6648,7 @@ simplify_giv_expr (const struct loop *loop, rtx x, rtx *ext_val, int *benefit)
|
||||
ext_val, benefit);
|
||||
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
|
||||
case ASHIFT:
|
||||
@ -7395,7 +7400,7 @@ check_ext_dependent_givs (const struct loop *loop, struct iv_class *bl)
|
||||
break;
|
||||
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
|
||||
if (ok)
|
||||
@ -10035,7 +10040,8 @@ try_copy_prop (const struct loop *loop, rtx replacement, unsigned int regno)
|
||||
&& REG_P (SET_DEST (set))
|
||||
&& REGNO (SET_DEST (set)) == regno)
|
||||
{
|
||||
gcc_assert (!init_insn);
|
||||
if (init_insn)
|
||||
abort ();
|
||||
|
||||
init_insn = insn;
|
||||
if (REGNO_FIRST_UID (regno) == INSN_UID (insn))
|
||||
@ -10068,7 +10074,8 @@ try_copy_prop (const struct loop *loop, rtx replacement, unsigned int regno)
|
||||
}
|
||||
}
|
||||
}
|
||||
gcc_assert (init_insn);
|
||||
if (! init_insn)
|
||||
abort ();
|
||||
if (apply_change_group ())
|
||||
{
|
||||
if (loop_dump_stream)
|
||||
@ -10569,7 +10576,7 @@ loop_giv_dump (const struct induction *v, FILE *file, int verbose)
|
||||
fprintf (file, " ext tr");
|
||||
break;
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -45,7 +45,8 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
of an insn added during loop, since these don't have LUIDs. */
|
||||
|
||||
#define INSN_LUID(INSN) \
|
||||
(gcc_assert (INSN_UID (INSN) < max_uid_for_loop), uid_luid[INSN_UID (INSN)])
|
||||
(INSN_UID (INSN) < max_uid_for_loop ? uid_luid[INSN_UID (INSN)] \
|
||||
: (abort (), -1))
|
||||
|
||||
#define REGNO_FIRST_LUID(REGNO) \
|
||||
(REGNO_FIRST_UID (REGNO) < max_uid_for_loop \
|
||||
|
@ -546,7 +546,8 @@ normalize_sched_times (partial_schedule_ptr ps)
|
||||
ddg_node_ptr u = &g->nodes[i];
|
||||
int normalized_time = SCHED_TIME (u) - amount;
|
||||
|
||||
gcc_assert (normalized_time >= 0);
|
||||
if (normalized_time < 0)
|
||||
abort ();
|
||||
|
||||
SCHED_TIME (u) = normalized_time;
|
||||
SCHED_ROW (u) = normalized_time % ii;
|
||||
@ -713,8 +714,8 @@ generate_prolog_epilog (partial_schedule_ptr ps, rtx orig_loop_beg,
|
||||
label = XEXP (SET_SRC (cmp), 1);
|
||||
cond = XEXP (SET_SRC (cmp), 0);
|
||||
|
||||
gcc_assert (c_reg);
|
||||
gcc_assert (GET_CODE (cond) == NE);
|
||||
if (! c_reg || GET_CODE (cond) != NE)
|
||||
abort ();
|
||||
|
||||
XEXP (label, 0) = precond_exit_label;
|
||||
JUMP_LABEL (orig_loop_bct) = precond_exit_label_insn;
|
||||
@ -1024,8 +1025,8 @@ sms_schedule (FILE *dump_file)
|
||||
}
|
||||
|
||||
/* Make sure this is a doloop. */
|
||||
count_reg = doloop_register_get (tail, &comp);
|
||||
gcc_assert (count_reg);
|
||||
if ( !(count_reg = doloop_register_get (tail, &comp)))
|
||||
abort ();
|
||||
|
||||
/* This should be NULL_RTX if the count is unknown at compile time. */
|
||||
count_init = const_iteration_count (count_reg, pre_header, &loop_count);
|
||||
@ -1456,9 +1457,8 @@ check_nodes_order (int *node_order, int num_nodes)
|
||||
{
|
||||
int u = node_order[i];
|
||||
|
||||
gcc_assert (u < num_nodes);
|
||||
gcc_assert (u >= 0);
|
||||
gcc_assert (!TEST_BIT (tmp, u));
|
||||
if (u >= num_nodes || u < 0 || TEST_BIT (tmp, u))
|
||||
abort ();
|
||||
|
||||
SET_BIT (tmp, u);
|
||||
}
|
||||
|
114
gcc/optabs.c
114
gcc/optabs.c
@ -119,7 +119,7 @@ static rtx expand_parity (enum machine_mode, rtx, rtx);
|
||||
|
||||
#ifndef HAVE_conditional_trap
|
||||
#define HAVE_conditional_trap 0
|
||||
#define gen_conditional_trap(a,b) (gcc_unreachable (), NULL_RTX)
|
||||
#define gen_conditional_trap(a,b) (abort (), NULL_RTX)
|
||||
#endif
|
||||
|
||||
/* Add a REG_EQUAL note to the last insn in INSNS. TARGET is being set to
|
||||
@ -138,9 +138,10 @@ add_equal_note (rtx insns, rtx target, enum rtx_code code, rtx op0, rtx op1)
|
||||
rtx last_insn, insn, set;
|
||||
rtx note;
|
||||
|
||||
gcc_assert (insns);
|
||||
gcc_assert (INSN_P (insns));
|
||||
gcc_assert (NEXT_INSN (insns) != NULL_RTX);
|
||||
if (! insns
|
||||
|| ! INSN_P (insns)
|
||||
|| NEXT_INSN (insns) == NULL_RTX)
|
||||
abort ();
|
||||
|
||||
if (GET_RTX_CLASS (code) != RTX_COMM_ARITH
|
||||
&& GET_RTX_CLASS (code) != RTX_BIN_ARITH
|
||||
@ -671,7 +672,8 @@ expand_simple_binop (enum machine_mode mode, enum rtx_code code, rtx op0,
|
||||
enum optab_methods methods)
|
||||
{
|
||||
optab binop = code_to_optab[(int) code];
|
||||
gcc_assert (binop != 0);
|
||||
if (binop == 0)
|
||||
abort ();
|
||||
|
||||
return expand_binop (mode, binop, op0, op1, target, unsignedp, methods);
|
||||
}
|
||||
@ -1710,8 +1712,9 @@ expand_twoval_unop (optab unoptab, rtx op0, rtx targ0, rtx targ1,
|
||||
|
||||
/* We could handle this, but we should always be called with a pseudo
|
||||
for our targets and all insns should take them as outputs. */
|
||||
gcc_assert ((*insn_data[icode].operand[0].predicate) (targ0, mode));
|
||||
gcc_assert ((*insn_data[icode].operand[1].predicate) (targ1, mode));
|
||||
if (! (*insn_data[icode].operand[0].predicate) (targ0, mode)
|
||||
|| ! (*insn_data[icode].operand[1].predicate) (targ1, mode))
|
||||
abort ();
|
||||
|
||||
pat = GEN_FCN (icode) (targ0, targ1, xop0);
|
||||
if (pat)
|
||||
@ -1838,8 +1841,9 @@ expand_twoval_binop (optab binoptab, rtx op0, rtx op1, rtx targ0, rtx targ1,
|
||||
|
||||
/* We could handle this, but we should always be called with a pseudo
|
||||
for our targets and all insns should take them as outputs. */
|
||||
gcc_assert ((*insn_data[icode].operand[0].predicate) (targ0, mode));
|
||||
gcc_assert ((*insn_data[icode].operand[3].predicate) (targ1, mode));
|
||||
if (! (*insn_data[icode].operand[0].predicate) (targ0, mode)
|
||||
|| ! (*insn_data[icode].operand[3].predicate) (targ1, mode))
|
||||
abort ();
|
||||
|
||||
pat = GEN_FCN (icode) (targ0, xop0, xop1, targ1);
|
||||
if (pat)
|
||||
@ -1902,7 +1906,8 @@ expand_twoval_binop_libfunc (optab binoptab, rtx op0, rtx op1,
|
||||
rtx insns;
|
||||
|
||||
/* Exactly one of TARG0 or TARG1 should be non-NULL. */
|
||||
gcc_assert ((targ0 != NULL_RTX) ^ (targ1 != NULL_RTX));
|
||||
if (!((targ0 != NULL_RTX) ^ (targ1 != NULL_RTX)))
|
||||
abort ();
|
||||
|
||||
mode = GET_MODE (op0);
|
||||
if (!binoptab->handlers[(int) mode].libfunc)
|
||||
@ -1939,7 +1944,8 @@ expand_simple_unop (enum machine_mode mode, enum rtx_code code, rtx op0,
|
||||
rtx target, int unsignedp)
|
||||
{
|
||||
optab unop = code_to_optab[(int) code];
|
||||
gcc_assert (unop != 0);
|
||||
if (unop == 0)
|
||||
abort ();
|
||||
|
||||
return expand_unop (mode, unop, op0, target, unsignedp);
|
||||
}
|
||||
@ -2614,7 +2620,8 @@ emit_no_conflict_block (rtx insns, rtx target, rtx op0, rtx op1, rtx equiv)
|
||||
}
|
||||
}
|
||||
|
||||
gcc_assert (set != 0);
|
||||
if (set == 0)
|
||||
abort ();
|
||||
|
||||
if (! reg_overlap_mentioned_p (target, SET_DEST (set)))
|
||||
{
|
||||
@ -2925,7 +2932,7 @@ prepare_cmp_insn (rtx *px, rtx *py, enum rtx_code *pcomparison, rtx size,
|
||||
|
||||
/* They could both be VOIDmode if both args are immediate constants,
|
||||
but we should fold that at an earlier stage.
|
||||
With no special code here, this will assert out,
|
||||
With no special code here, this will call abort,
|
||||
reminding the programmer to implement such folding. */
|
||||
|
||||
if (mode != BLKmode && flag_force_mem)
|
||||
@ -2954,10 +2961,11 @@ prepare_cmp_insn (rtx *px, rtx *py, enum rtx_code *pcomparison, rtx size,
|
||||
y = force_reg (mode, y);
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
/* Assert out if we have a non-canonical comparison. The RTL documentation
|
||||
/* Abort if we have a non-canonical comparison. The RTL documentation
|
||||
states that canonical comparisons are required only for targets which
|
||||
have cc0. */
|
||||
gcc_assert (!CONSTANT_P (x) || CONSTANT_P (y));
|
||||
if (CONSTANT_P (x) && ! CONSTANT_P (y))
|
||||
abort ();
|
||||
#endif
|
||||
|
||||
/* Don't let both operands fail to indicate the mode. */
|
||||
@ -2976,7 +2984,8 @@ prepare_cmp_insn (rtx *px, rtx *py, enum rtx_code *pcomparison, rtx size,
|
||||
rtx opalign
|
||||
= GEN_INT (MIN (MEM_ALIGN (x), MEM_ALIGN (y)) / BITS_PER_UNIT);
|
||||
|
||||
gcc_assert (size != 0);
|
||||
if (size == 0)
|
||||
abort ();
|
||||
|
||||
/* Try to use a memory block compare insn - either cmpstr
|
||||
or cmpmem will do. */
|
||||
@ -3073,8 +3082,11 @@ prepare_cmp_insn (rtx *px, rtx *py, enum rtx_code *pcomparison, rtx size,
|
||||
return;
|
||||
}
|
||||
|
||||
gcc_assert (class == MODE_FLOAT);
|
||||
prepare_float_lib_cmp (px, py, pcomparison, pmode, punsignedp);
|
||||
if (class == MODE_FLOAT)
|
||||
prepare_float_lib_cmp (px, py, pcomparison, pmode, punsignedp);
|
||||
|
||||
else
|
||||
abort ();
|
||||
}
|
||||
|
||||
/* Before emitting an insn with code ICODE, make sure that X, which is going
|
||||
@ -3114,7 +3126,7 @@ emit_cmp_and_jump_insn_1 (rtx x, rtx y, enum machine_mode mode,
|
||||
enum machine_mode wider_mode = mode;
|
||||
|
||||
/* Try combined insns first. */
|
||||
for (;;)
|
||||
do
|
||||
{
|
||||
enum insn_code icode;
|
||||
PUT_MODE (test, wider_mode);
|
||||
@ -3157,12 +3169,15 @@ emit_cmp_and_jump_insn_1 (rtx x, rtx y, enum machine_mode mode,
|
||||
return;
|
||||
}
|
||||
|
||||
gcc_assert (class == MODE_INT || class == MODE_FLOAT
|
||||
|| class == MODE_COMPLEX_FLOAT);
|
||||
if (class != MODE_INT && class != MODE_FLOAT
|
||||
&& class != MODE_COMPLEX_FLOAT)
|
||||
break;
|
||||
|
||||
wider_mode = GET_MODE_WIDER_MODE (wider_mode);
|
||||
gcc_assert (wider_mode != VOIDmode);
|
||||
}
|
||||
while (wider_mode != VOIDmode);
|
||||
|
||||
abort ();
|
||||
}
|
||||
|
||||
/* Generate code to compare X with Y so that the condition codes are
|
||||
@ -3193,7 +3208,8 @@ emit_cmp_and_jump_insns (rtx x, rtx y, enum rtx_code comparison, rtx size,
|
||||
{
|
||||
/* If we're not emitting a branch, this means some caller
|
||||
is out of sync. */
|
||||
gcc_assert (label);
|
||||
if (! label)
|
||||
abort ();
|
||||
|
||||
op0 = y, op1 = x;
|
||||
comparison = swap_condition (comparison);
|
||||
@ -3264,7 +3280,8 @@ prepare_float_lib_cmp (rtx *px, rtx *py, enum rtx_code *pcomparison,
|
||||
}
|
||||
}
|
||||
|
||||
gcc_assert (mode != VOIDmode);
|
||||
if (mode == VOIDmode)
|
||||
abort ();
|
||||
|
||||
if (mode != orig_mode)
|
||||
{
|
||||
@ -3322,7 +3339,7 @@ prepare_float_lib_cmp (rtx *px, rtx *py, enum rtx_code *pcomparison,
|
||||
break;
|
||||
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
equiv = simplify_gen_ternary (IF_THEN_ELSE, word_mode, word_mode,
|
||||
equiv, true_rtx, false_rtx);
|
||||
@ -3624,12 +3641,13 @@ gen_add2_insn (rtx x, rtx y)
|
||||
{
|
||||
int icode = (int) add_optab->handlers[(int) GET_MODE (x)].insn_code;
|
||||
|
||||
gcc_assert ((*insn_data[icode].operand[0].predicate)
|
||||
(x, insn_data[icode].operand[0].mode));
|
||||
gcc_assert ((*insn_data[icode].operand[1].predicate)
|
||||
(x, insn_data[icode].operand[1].mode));
|
||||
gcc_assert ((*insn_data[icode].operand[2].predicate)
|
||||
(y, insn_data[icode].operand[2].mode));
|
||||
if (! ((*insn_data[icode].operand[0].predicate)
|
||||
(x, insn_data[icode].operand[0].mode))
|
||||
|| ! ((*insn_data[icode].operand[1].predicate)
|
||||
(x, insn_data[icode].operand[1].mode))
|
||||
|| ! ((*insn_data[icode].operand[2].predicate)
|
||||
(y, insn_data[icode].operand[2].mode)))
|
||||
abort ();
|
||||
|
||||
return (GEN_FCN (icode) (x, x, y));
|
||||
}
|
||||
@ -3658,7 +3676,8 @@ have_add2_insn (rtx x, rtx y)
|
||||
{
|
||||
int icode;
|
||||
|
||||
gcc_assert (GET_MODE (x) != VOIDmode);
|
||||
if (GET_MODE (x) == VOIDmode)
|
||||
abort ();
|
||||
|
||||
icode = (int) add_optab->handlers[(int) GET_MODE (x)].insn_code;
|
||||
|
||||
@ -3683,12 +3702,13 @@ gen_sub2_insn (rtx x, rtx y)
|
||||
{
|
||||
int icode = (int) sub_optab->handlers[(int) GET_MODE (x)].insn_code;
|
||||
|
||||
gcc_assert ((*insn_data[icode].operand[0].predicate)
|
||||
(x, insn_data[icode].operand[0].mode));
|
||||
gcc_assert ((*insn_data[icode].operand[1].predicate)
|
||||
(x, insn_data[icode].operand[1].mode));
|
||||
gcc_assert ((*insn_data[icode].operand[2].predicate)
|
||||
(y, insn_data[icode].operand[2].mode));
|
||||
if (! ((*insn_data[icode].operand[0].predicate)
|
||||
(x, insn_data[icode].operand[0].mode))
|
||||
|| ! ((*insn_data[icode].operand[1].predicate)
|
||||
(x, insn_data[icode].operand[1].mode))
|
||||
|| ! ((*insn_data[icode].operand[2].predicate)
|
||||
(y, insn_data[icode].operand[2].mode)))
|
||||
abort ();
|
||||
|
||||
return (GEN_FCN (icode) (x, x, y));
|
||||
}
|
||||
@ -3717,7 +3737,8 @@ have_sub2_insn (rtx x, rtx y)
|
||||
{
|
||||
int icode;
|
||||
|
||||
gcc_assert (GET_MODE (x) != VOIDmode);
|
||||
if (GET_MODE (x) == VOIDmode)
|
||||
abort ();
|
||||
|
||||
icode = (int) sub_optab->handlers[(int) GET_MODE (x)].insn_code;
|
||||
|
||||
@ -3843,7 +3864,8 @@ expand_float (rtx to, rtx from, int unsignedp)
|
||||
enum machine_mode fmode, imode;
|
||||
|
||||
/* Crash now, because we won't be able to decide which mode to use. */
|
||||
gcc_assert (GET_MODE (from) != VOIDmode);
|
||||
if (GET_MODE (from) == VOIDmode)
|
||||
abort ();
|
||||
|
||||
/* Look for an insn to do the conversion. Do it in the specified
|
||||
modes if possible; otherwise convert either input, output or both to
|
||||
@ -4004,7 +4026,8 @@ expand_float (rtx to, rtx from, int unsignedp)
|
||||
from = force_not_mem (from);
|
||||
|
||||
libfunc = tab->handlers[GET_MODE (to)][GET_MODE (from)].libfunc;
|
||||
gcc_assert (libfunc);
|
||||
if (!libfunc)
|
||||
abort ();
|
||||
|
||||
start_sequence ();
|
||||
|
||||
@ -4187,7 +4210,8 @@ expand_fix (rtx to, rtx from, int unsignedp)
|
||||
|
||||
convert_optab tab = unsignedp ? ufix_optab : sfix_optab;
|
||||
libfunc = tab->handlers[GET_MODE (to)][GET_MODE (from)].libfunc;
|
||||
gcc_assert (libfunc);
|
||||
if (!libfunc)
|
||||
abort ();
|
||||
|
||||
if (flag_force_mem)
|
||||
from = force_not_mem (from);
|
||||
@ -4800,7 +4824,8 @@ debug_optab_libfuncs (void)
|
||||
h = &o->handlers[j];
|
||||
if (h->libfunc)
|
||||
{
|
||||
gcc_assert (GET_CODE (h->libfunc) == SYMBOL_REF);
|
||||
if (GET_CODE (h->libfunc) != SYMBOL_REF)
|
||||
abort ();
|
||||
fprintf (stderr, "%s\t%s:\t%s\n",
|
||||
GET_RTX_NAME (o->code),
|
||||
GET_MODE_NAME (j),
|
||||
@ -4820,7 +4845,8 @@ debug_optab_libfuncs (void)
|
||||
h = &o->handlers[j][k];
|
||||
if (h->libfunc)
|
||||
{
|
||||
gcc_assert (GET_CODE (h->libfunc) == SYMBOL_REF);
|
||||
if (GET_CODE (h->libfunc) != SYMBOL_REF)
|
||||
abort ();
|
||||
fprintf (stderr, "%s\t%s\t%s:\t%s\n",
|
||||
GET_RTX_NAME (o->code),
|
||||
GET_MODE_NAME (j),
|
||||
|
@ -1003,8 +1003,10 @@ common_handle_option (size_t scode, const char *arg, int value)
|
||||
default:
|
||||
/* If the flag was handled in a standard way, assume the lack of
|
||||
processing here is intentional. */
|
||||
gcc_assert (cl_options[scode].flag_var);
|
||||
break;
|
||||
if (cl_options[scode].flag_var)
|
||||
break;
|
||||
|
||||
abort ();
|
||||
}
|
||||
|
||||
return 1;
|
||||
|
@ -61,7 +61,8 @@ set_param_value (const char *name, int value)
|
||||
size_t i;
|
||||
|
||||
/* Make sure nobody tries to set a parameter to an invalid value. */
|
||||
gcc_assert (value != INVALID_PARAM_VAL);
|
||||
if (value == INVALID_PARAM_VAL)
|
||||
abort ();
|
||||
|
||||
/* Scan the parameter table to find a matching entry. */
|
||||
for (i = 0; i < num_compiler_params; ++i)
|
||||
|
10
gcc/passes.c
10
gcc/passes.c
@ -121,8 +121,8 @@ open_dump_file (enum tree_dump_index index, tree decl)
|
||||
|
||||
timevar_push (TV_DUMP);
|
||||
|
||||
gcc_assert (!dump_file);
|
||||
gcc_assert (!dump_file_name);
|
||||
if (dump_file != NULL || dump_file_name != NULL)
|
||||
abort ();
|
||||
|
||||
dump_file_name = get_dump_file_name (index);
|
||||
initializing_dump = !dump_initialized_p (index);
|
||||
@ -289,9 +289,11 @@ rest_of_handle_final (void)
|
||||
different from the DECL_NAME name used in the source file. */
|
||||
|
||||
x = DECL_RTL (current_function_decl);
|
||||
gcc_assert (MEM_P (x));
|
||||
if (!MEM_P (x))
|
||||
abort ();
|
||||
x = XEXP (x, 0);
|
||||
gcc_assert (GET_CODE (x) == SYMBOL_REF);
|
||||
if (GET_CODE (x) != SYMBOL_REF)
|
||||
abort ();
|
||||
fnname = XSTR (x, 0);
|
||||
|
||||
assemble_start_function (current_function_decl, fnname);
|
||||
|
@ -305,7 +305,9 @@ expr_equiv_p (const void *exp1p, const void *exp2p)
|
||||
struct expr *exp1 = (struct expr *) exp1p;
|
||||
struct expr *exp2 = (struct expr *) exp2p;
|
||||
int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
|
||||
gcc_assert (!equiv_p || exp1->hash == exp2->hash);
|
||||
if (equiv_p
|
||||
&& exp1->hash != exp2->hash)
|
||||
abort ();
|
||||
return equiv_p;
|
||||
}
|
||||
|
||||
@ -483,8 +485,11 @@ oprs_unchanged_p (rtx x, rtx insn, bool after_insn)
|
||||
switch (code)
|
||||
{
|
||||
case REG:
|
||||
#ifdef ENABLE_CHECKING
|
||||
/* We are called after register allocation. */
|
||||
gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
|
||||
if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
|
||||
abort ();
|
||||
#endif
|
||||
if (after_insn)
|
||||
/* If the last CUID setting the insn is less than the CUID of
|
||||
INSN, then reg X is not changed in or after INSN. */
|
||||
@ -780,7 +785,8 @@ hash_scan_set (rtx insn)
|
||||
|
||||
#ifdef ENABLE_CHEKCING
|
||||
/* We shouldn't have any EH_REGION notes post reload. */
|
||||
gcc_assert (!find_reg_note (insn, REG_EH_REGION, NULL_RTX));
|
||||
if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
|
||||
abort ();
|
||||
#endif
|
||||
|
||||
if (REG_P (dest))
|
||||
@ -928,9 +934,11 @@ reg_set_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
|
||||
rtx insn;
|
||||
int regno;
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
/* We are called after register allocation. */
|
||||
gcc_assert (REG_P (reg));
|
||||
gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
|
||||
if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
|
||||
abort ();
|
||||
#endif
|
||||
|
||||
if (from_insn == to_insn)
|
||||
return NULL_RTX;
|
||||
@ -965,9 +973,11 @@ reg_used_between_after_reload_p (rtx reg, rtx from_insn, rtx to_insn)
|
||||
rtx insn;
|
||||
int regno;
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
/* We are called after register allocation. */
|
||||
gcc_assert (REG_P (reg));
|
||||
gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER);
|
||||
if (!REG_P (reg) || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
|
||||
abort ();
|
||||
#endif
|
||||
|
||||
if (from_insn == to_insn)
|
||||
return NULL_RTX;
|
||||
@ -1017,7 +1027,7 @@ get_avail_load_store_reg (rtx insn)
|
||||
return SET_DEST(PATTERN(insn));
|
||||
if (REG_P (SET_SRC (PATTERN (insn)))) /* A store. */
|
||||
return SET_SRC (PATTERN (insn));
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
|
||||
/* Return nonzero if the predecessors of BB are "well behaved". */
|
||||
@ -1099,8 +1109,8 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn,
|
||||
{
|
||||
/* Check if the loaded register is not used. */
|
||||
avail_insn = a_occr->insn;
|
||||
avail_reg = get_avail_load_store_reg (avail_insn);
|
||||
gcc_assert (avail_reg);
|
||||
if (! (avail_reg = get_avail_load_store_reg (avail_insn)))
|
||||
abort ();
|
||||
/* Make sure we can generate a move from register avail_reg to
|
||||
dest. */
|
||||
extract_insn (gen_move_insn (copy_rtx (dest),
|
||||
@ -1171,7 +1181,8 @@ eliminate_partially_redundant_load (basic_block bb, rtx insn,
|
||||
/* Set avail_reg to be the register having the value of the
|
||||
memory. */
|
||||
avail_reg = get_avail_load_store_reg (avail_insn);
|
||||
gcc_assert (avail_reg);
|
||||
if (! avail_reg)
|
||||
abort ();
|
||||
|
||||
insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
|
||||
copy_rtx (avail_reg)),
|
||||
|
@ -280,7 +280,7 @@ reload_cse_simplify_set (rtx set, rtx insn)
|
||||
if (this_val == trunc_int_for_mode (this_val, GET_MODE (src)))
|
||||
break;
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
this_rtx = GEN_INT (this_val);
|
||||
}
|
||||
@ -1057,7 +1057,8 @@ reload_combine_note_use (rtx *xp, rtx insn)
|
||||
if (REG_P (SET_DEST (x)))
|
||||
{
|
||||
/* No spurious CLOBBERs of pseudo registers may remain. */
|
||||
gcc_assert (REGNO (SET_DEST (x)) < FIRST_PSEUDO_REGISTER);
|
||||
if (REGNO (SET_DEST (x)) >= FIRST_PSEUDO_REGISTER)
|
||||
abort ();
|
||||
return;
|
||||
}
|
||||
break;
|
||||
@ -1077,7 +1078,8 @@ reload_combine_note_use (rtx *xp, rtx insn)
|
||||
int nregs;
|
||||
|
||||
/* No spurious USEs of pseudo registers may remain. */
|
||||
gcc_assert (regno < FIRST_PSEUDO_REGISTER);
|
||||
if (regno >= FIRST_PSEUDO_REGISTER)
|
||||
abort ();
|
||||
|
||||
nregs = hard_regno_nregs[regno][GET_MODE (x)];
|
||||
|
||||
|
@ -179,7 +179,8 @@ tree_predicted_by_p (basic_block bb, enum br_predictor predictor)
|
||||
void
|
||||
predict_insn (rtx insn, enum br_predictor predictor, int probability)
|
||||
{
|
||||
gcc_assert (any_condjump_p (insn));
|
||||
if (!any_condjump_p (insn))
|
||||
abort ();
|
||||
if (!flag_guess_branch_prob)
|
||||
return;
|
||||
|
||||
@ -1105,7 +1106,8 @@ expected_value_to_br_prob (void)
|
||||
cond = simplify_rtx (cond);
|
||||
|
||||
/* Turn the condition into a scaled branch probability. */
|
||||
gcc_assert (cond == const_true_rtx || cond == const0_rtx);
|
||||
if (cond != const_true_rtx && cond != const0_rtx)
|
||||
abort ();
|
||||
predict_insn_def (insn, PRED_BUILTIN_EXPECT,
|
||||
cond == const_true_rtx ? TAKEN : NOT_TAKEN);
|
||||
}
|
||||
@ -1206,8 +1208,8 @@ propagate_freq (struct loop *loop)
|
||||
{
|
||||
#ifdef ENABLE_CHECKING
|
||||
for (e = bb->pred; e; e = e->pred_next)
|
||||
gcc_assert (!BLOCK_INFO (e->src)->tovisit
|
||||
|| (e->flags & EDGE_DFS_BACK));
|
||||
if (BLOCK_INFO (e->src)->tovisit && !(e->flags & EDGE_DFS_BACK))
|
||||
abort ();
|
||||
#endif
|
||||
|
||||
for (e = bb->pred; e; e = e->pred_next)
|
||||
@ -1351,7 +1353,8 @@ expensive_function_p (int threshold)
|
||||
|
||||
/* We can not compute accurately for large thresholds due to scaled
|
||||
frequencies. */
|
||||
gcc_assert (threshold < BB_FREQ_MAX);
|
||||
if (threshold > BB_FREQ_MAX)
|
||||
abort ();
|
||||
|
||||
/* Frequencies are out of range. This either means that function contains
|
||||
internal loop executing more than BB_FREQ_MAX times or profile feedback
|
||||
|
@ -231,7 +231,8 @@ pp_base_format_text (pretty_printer *pp, text_info *text)
|
||||
break;
|
||||
}
|
||||
/* We don't support precision beyond that of "long long". */
|
||||
gcc_assert (precision <= 2);
|
||||
if (precision > 2)
|
||||
abort();
|
||||
|
||||
if (quoted)
|
||||
pp_string (pp, open_quote);
|
||||
@ -318,10 +319,10 @@ pp_base_format_text (pretty_printer *pp, text_info *text)
|
||||
int n;
|
||||
const char *s;
|
||||
/* We handle no precision specifier but '%.*s'. */
|
||||
++text->format_spec;
|
||||
gcc_assert (*text->format_spec == '*');
|
||||
++text->format_spec;
|
||||
gcc_assert (*text->format_spec == 's');
|
||||
if (*++text->format_spec != '*')
|
||||
abort ();
|
||||
else if (*++text->format_spec != 's')
|
||||
abort ();
|
||||
n = va_arg (*text->args_ptr, int);
|
||||
s = va_arg (*text->args_ptr, const char *);
|
||||
pp_append_text (pp, s, s + n);
|
||||
@ -329,16 +330,14 @@ pp_base_format_text (pretty_printer *pp, text_info *text)
|
||||
break;
|
||||
|
||||
default:
|
||||
{
|
||||
bool ok;
|
||||
|
||||
/* Make sure there's a format translator. */
|
||||
gcc_assert (pp_format_decoder (pp));
|
||||
ok = pp_format_decoder (pp) (pp, text);
|
||||
/* and make sure it recognized the format. */
|
||||
gcc_assert (ok);
|
||||
break;
|
||||
}
|
||||
if (!pp_format_decoder (pp) || !(*pp_format_decoder (pp)) (pp, text))
|
||||
{
|
||||
/* Hmmm. The client failed to install a format translator
|
||||
but called us with an unrecognized format. Or, maybe, the
|
||||
translated string just contains an invalid format, or
|
||||
has formats in the wrong order. Sorry. */
|
||||
abort ();
|
||||
}
|
||||
}
|
||||
if (quoted)
|
||||
pp_string (pp, close_quote);
|
||||
|
@ -149,7 +149,8 @@ instrument_edges (struct edge_list *el)
|
||||
|
||||
if (!inf->ignore && !inf->on_tree)
|
||||
{
|
||||
gcc_assert (!(e->flags & EDGE_ABNORMAL));
|
||||
if (e->flags & EDGE_ABNORMAL)
|
||||
abort ();
|
||||
if (dump_file)
|
||||
fprintf (dump_file, "Edge %d to %d instrumented%s\n",
|
||||
e->src->index, e->dest->index,
|
||||
@ -195,7 +196,7 @@ instrument_values (histogram_values values)
|
||||
break;
|
||||
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
if (!coverage_counter_alloc (t, hist->n_counters))
|
||||
continue;
|
||||
@ -219,7 +220,7 @@ instrument_values (histogram_values values)
|
||||
break;
|
||||
|
||||
default:
|
||||
gcc_unreachable ();
|
||||
abort ();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -419,7 +420,8 @@ compute_branch_probabilities (void)
|
||||
/* Calculate count for remaining edge by conservation. */
|
||||
total = bb->count - total;
|
||||
|
||||
gcc_assert (e);
|
||||
if (! e)
|
||||
abort ();
|
||||
EDGE_INFO (e)->count_valid = 1;
|
||||
e->count = total;
|
||||
bi->succ_count--;
|
||||
@ -445,7 +447,8 @@ compute_branch_probabilities (void)
|
||||
/* Calculate count for remaining edge by conservation. */
|
||||
total = bb->count - total + e->count;
|
||||
|
||||
gcc_assert (e);
|
||||
if (! e)
|
||||
abort ();
|
||||
EDGE_INFO (e)->count_valid = 1;
|
||||
e->count = total;
|
||||
bi->pred_count--;
|
||||
@ -463,18 +466,17 @@ compute_branch_probabilities (void)
|
||||
if (dump_file)
|
||||
fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
/* If the graph has been correctly solved, every block will have a
|
||||
succ and pred count of zero. */
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
gcc_assert (!BB_INFO (bb)->succ_count);
|
||||
gcc_assert (!BB_INFO (bb)->pred_count);
|
||||
if (BB_INFO (bb)->succ_count || BB_INFO (bb)->pred_count)
|
||||
abort ();
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
/* For every edge, calculate its branch probability and add a reg_note
|
||||
to the branch insn to indicate this. */
|
||||
|
||||
for (i = 0; i < 20; i++)
|
||||
hist_br_prob[i] = 0;
|
||||
num_never_executed = 0;
|
||||
@ -982,7 +984,8 @@ branch_prob (void)
|
||||
{
|
||||
unsigned n_instrumented = instrument_edges (el);
|
||||
|
||||
gcc_assert (n_instrumented == num_instrumented);
|
||||
if (n_instrumented != num_instrumented)
|
||||
abort ();
|
||||
|
||||
if (flag_profile_values)
|
||||
instrument_values (values);
|
||||
@ -1041,7 +1044,8 @@ union_groups (basic_block bb1, basic_block bb2)
|
||||
|
||||
/* ??? I don't have a place for the rank field. OK. Lets go w/o it,
|
||||
this code is unlikely going to be performance problem anyway. */
|
||||
gcc_assert (bb1g != bb2g);
|
||||
if (bb1g == bb2g)
|
||||
abort ();
|
||||
|
||||
bb1g->aux = bb2g;
|
||||
}
|
||||
@ -1186,7 +1190,8 @@ void
|
||||
tree_register_profile_hooks (void)
|
||||
{
|
||||
profile_hooks = &tree_profile_hooks;
|
||||
gcc_assert (ir_type ());
|
||||
if (!ir_type ())
|
||||
abort ();
|
||||
}
|
||||
|
||||
/* Set up hooks to enable RTL-based profiling. */
|
||||
@ -1195,5 +1200,6 @@ void
|
||||
rtl_register_profile_hooks (void)
|
||||
{
|
||||
profile_hooks = &rtl_profile_hooks;
|
||||
gcc_assert (!ir_type ());
|
||||
if (ir_type ())
|
||||
abort ();
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user