[PATCH 7/9] ENABLE_CHECKING refactoring: middle-end, LTO FE

[PATCH 7/9] ENABLE_CHECKING refactoring: middle-end, LTO FE
gcc/lto/ChangeLog:
2015-10-27  Mikhail Maltsev  <maltsevm@gmail.com>

	* lto.c (unify_scc): Use flag_checking and remove ENABLE_CHECKING
	conditionals.
	(lto_fixup_state): Likewise.
	(do_whole_program_analysis): Use
	symtab_node::checking_verify_symtab_nodes and remove ENABLE_CHECKING
	conditionals.

gcc/ChangeLog:

2015-10-27  Mikhail Maltsev  <maltsevm@gmail.com>

	* attribs.c (check_attribute_tables): New function, broken out from...
	(init_attributes): Use it.
	* cfgcleanup.c (try_optimize_cfg): Use flag_checking, CHECKING_P
	gcc_checking_assert and checking_* functions to eliminate
	ENABLE_CHECKING conditionals.
	* cfgexpand.c (expand_goto, expand_debug_expr): Likewise.
	(pass_expand::execute): Likewise.
	* cgraphclones.c (symbol_table::materialize_all_clones): Likewise.
	* cgraphunit.c (mark_functions_to_output): Likewise.
	(cgraph_node::expand_thunk): Likewise.
	(symbol_table::compile): Likewise.
	* ddg.c (add_cross_iteration_register_deps): Likewise.
	(create_ddg_all_sccs): Likewise.
	* df-core.c (df_finish_pass, df_analyze): Likewise.
	* diagnostic-core.h: Likewise.
	* diagnostic.c (diagnostic_report_diagnostic): Likewise.
	* dominance.c (calculate_dominance_info): Likewise.
	* dwarf2out.c (add_AT_die_ref): Likewise.
	(const_ok_for_output_1, mem_loc_descriptor): Likewise.
	(loc_list_from_tree, gen_lexical_block_die): Likewise.
	gen_type_die_with_usage, gen_type_die): Likewise.
	(dwarf2out_decl): Likewise.
	* emit-rtl.c (verify_rtx_sharing, reorder_insns_nobb): Likewise.
	* except.c (duplicate_eh_regions): Likewise.
	* fwprop.c (register_active_defs, update_df_init): Likewise.
	(fwprop_init, fwprop_done): Likewise.
	(update_uses): Likewise.
	* ggc-page.c (ggc_grow): Likewise.
	* gimplify.c (gimplify_body): Likewise.
	(gimplify_hasher::equal): Likewise.
	* graphite-isl-ast-to-gimple.c (graphite_verify): Likewise.
	* graphite-scop-detection.c (canonicalize_loop_closed_ssa_form):
	Likewise.
	* graphite-sese-to-poly.c (rewrite_reductions_out_of_ssa): Likewise.
	(rewrite_cross_bb_scalar_deps_out_of_ssa): Likwise.
	* hash-table.h (::find_empty_slot_for_expand): Likewise.
	* ifcvt.c (if_convert): Likewise.
	* ipa-cp.c (ipcp_propagate_stage): Likewise.
	* ipa-devirt.c (type_in_anonymous_namespace_p): Likewise.
	(odr_type_p, odr_types_equivalent_p): Likewise.
	(add_type_duplicate, get_odr_type): Likewise.
	* ipa-icf.c (sem_item_optimizer::execute): Likewise.
	(sem_item_optimizer::subdivide_classes_by_equality): Likewise.
	(sem_item_optimizer::verify_classes): Likewise.
	(sem_item_optimizer::traverse_congruence_split): Likewise.
	(sem_item_optimizer::checking_verify_classes): New.
	* ipa-icf.h (sem_item_optimizer::checking_verify_classes): Add new
	method.
	* cfgrtl.c (commit_edge_insertions): Likewise.
	(fixup_reorder_chain, cfg_layout_finalize): Likewise.
	(rtl_flow_call_edges_add): Likewise.
	* cgraph.c (symbol_table::create_edge): Likewise.
	(cgraph_edge::redirect_call_stmt_to_callee): Likewise.
	* cgraph.h (symtab_node): Likewise.
	(symtab_node::checking_verify_symtab_nodes): Define.
	(cgraph_node::checking_verify_cgraph_nodes): Define.
	* cfghooks.h (checking_verify_flow_info): Define.
	* cfgloop.h (checking_verify_loop_structure): Define.
	* dominance.h (checking_verify_dominators): Define.
	* et-forest.c: Fix comment.
	* ipa-inline-analysis.c (compute_inline_parameters): Use flag_checking,
	CHECKING_P gcc_checking_assert and checking_* functions to eliminate
	ENABLE_CHECKING conditionals.
	* ipa-inline-transform.c (save_inline_function_body): Likewise.
	* ipa-inline.c (inline_small_functions): Likewise.
	(early_inliner): Likewise.
	* ipa-inline.h (estimate_edge_growth): Likewise.
	* ipa-visibility.c (function_and_variable_visibility): Likewise.
	* ipa.c (symbol_table::remove_unreachable_nodes): Likewise.
	(ipa_single_use): Likewise.
	* ira-int.h: Likewise.
	* ira.c (ira): Likewise.
	* loop-doloop.c (doloop_optimize_loops): Likewise.
	* loop-init.c (loop_optimizer_init, fix_loop_structure): Likewise.
	* loop-invariant.c (move_loop_invariants): Likewise.
	* lra-assigns.c (lra_assign): Likewise.
	* lra-constraints.c (lra_constraints): Likewise.
	* lra-eliminations.c (lra_eliminate): Likewise.
	* lra-int.h (struct lra_reg): Likewise.
	* lra-lives.c (check_pseudos_live_through_calls): Likewise.
	(lra_create_live_ranges_1): Likewise.
	* lra-remat.c (create_remat_bb_data): Likewise.
	* lra.c (lra_update_insn_recog_data, restore_scratches): Likewise.
	(lra): Likewise.
	(check_rtl): Always define. Remove incorrect guard around
	extract_constrain_insn call.
	* lto-cgraph.c (input_cgraph_1: Use flag_checking,
	CHECKING_P gcc_checking_assert and checking_* functions to eliminate
	ENABLE_CHECKING conditionals.
	* lto-streamer-out.c (DFS::DFS): Likewise.
	(lto_output): Likewise.
	* lto-streamer.c (lto_streamer_init): Likewise.
	* omp-low.c (scan_omp_target, expand_omp_taskreg): Likewise.
	expand_omp_target, execute_expand_omp): Likewise.
	(lower_omp_target): Likewise.
	* passes.c (execute_function_todo): Likewise.
	(execute_todo, execute_one_pass): Likewise.
	(verify_curr_properties): Always define.
	* predict.c (tree_estimate_probability: Use flag_checking,
	CHECKING_P gcc_checking_assert and checking_* functions to eliminate
	ENABLE_CHECKING conditionals.
	(propagate_freq): Likewise.
	* pretty-print.c (pp_format): Likewise.
	* real.c (real_to_decimal_for_mode): Likewise.
	* recog.c (split_all_insns): Likewise.
	* regcprop.c (kill_value_one_regno): Likewise.
	(copy_value): Likewise.
	(validate_value_data): Define unconditionally.
	* reload.c: Fix comment.
	* timevar.c: Include options.h
	* tree-ssa.h (checking_verify_ssa): Define.
	* tree-ssa-loop-manip.h (checking_verify_loop_closed_ssa): Define.
	* sched-deps.c (CHECK): Remove unused macro.
	(add_or_update_dep_1, sd_add_dep: Use flag_checking, CHECKING_P
	gcc_checking_assert and checking_* functions to eliminate
	ENABLE_CHECKING conditionals.
	* sel-sched-ir.c (free_regset_pool, tidy_control_flow): Likewise.
	* sel-sched.c (struct moveop_static_params): Likewise.
	(find_best_reg_for_expr, move_cond_jump): Likewise.
	(move_op_orig_expr_not_found): Likewise.
	(code_motion_process_successors, move_op): Likewise.
	* ssa-iterators.h (first_readonly_imm_use): Likewise.
	(next_readonly_imm_use): Likewise.
	* store-motion.c (compute_store_table): Likewise.
	* symbol-summary.h (function_summary::function_summary): Likewise.
	* target.h (cumulative_args_t): Likewise.
	(get_cumulative_args, pack_cumulative_args): Likewise.
	* timevar.c: (timer::print): Likewise.
	* trans-mem.c (ipa_tm_execute): Likewise.
	* tree-cfg.c (move_stmt_op): Likewise.
	(move_sese_region_to_fn): Likewise.
	(gimple_flow_call_edges_add): Likewise.
	* tree-cfgcleanup.c (cleanup_tree_cfg_noloop, repair_loop_structures):
	Likewise.
	* tree-eh.c (remove_unreachable_handlers): Likewise.
	* tree-if-conv.c (pass_if_conversion::execute): Likewise.
	* tree-inline.c (expand_call_inline, optimize_inline_calls): Likewise.
	* tree-into-ssa.c (update_ssa): Likewise.
	* tree-loop-distribution.c (pass_loop_distribution::execute): Likewise.
	* tree-outof-ssa.c (eliminate_useless_phis, rewrite_trees): Likewise.
	* tree-parloops.c (pass_parallelize_loops::execute): Likewise.
	* tree-predcom.c (suitable_component_p): Likewise.
	* tree-profile.c (gimple_gen_const_delta_profiler): Likewise.
	* tree-ssa-alias.c (refs_may_alias_p_1): Likewise.
	* tree-ssa-live.c (verify_live_on_entry): Likewise.
	* tree-ssa-live.h (register_ssa_partition): Likewise.
	* tree-ssa-loop-ivcanon.c (tree_unroll_loops_completely): Likewise.
	* tree-ssa-loop-manip.c (add_exit_phi): Likewise.
	(tree_transform_and_unroll_loop): Likewise.
	* tree-ssa-math-opts.c (pass_cse_reciprocals::execute): Likewise.
	* tree-ssa-operands.c (get_expr_operands): Likewise.
	* tree-ssa-propagate.c (replace_exp_1): Likewise.
	* tree-ssa-structalias.c (rewrite_constraints): Likewise.
	* tree-ssa-ter.c (free_temp_expr_table): Likewise.
	* tree-ssa-threadupdate.c (duplicate_thread_path): Likewise.
	* tree-ssanames.c (release_ssa_name_fn): Likewise.
	* tree-stdarg.c (expand_ifn_va_arg): Likewise.
	* tree-vect-loop-manip.c
	(slpeel_tree_duplicate_loop_to_edge_cfg): Likewise.
	(slpeel_checking_verify_cfg_after_peeling): Likewise.
	(vect_do_peeling_for_loop_bound): Likewise.
	(vect_do_peeling_for_alignment): Likewise.
	* tree-vrp.c (supports_overflow_infinity): Likewise.
	(set_value_range): Likewise.
	* tree.c (free_lang_data_in_cgraph): Likewise.
	* value-prof.c (gimple_remove_histogram_value): Likewise.
	(free_hist): Likewise.
	* var-tracking.c (canonicalize_values_star): Likewise.
	(compute_bb_dataflow, vt_find_locations, vt_emit_notes): Likewise.

From-SVN: r229470
This commit is contained in:
Mikhail Maltsev 2015-10-28 01:05:53 +00:00 committed by Jeff Law
parent 15517b4e79
commit b2b293775a
104 changed files with 918 additions and 919 deletions

View File

@ -1,3 +1,175 @@
2015-10-27 Mikhail Maltsev <maltsevm@gmail.com>
* attribs.c (check_attribute_tables): New function, broken out from...
(init_attributes): Use it.
* cfgcleanup.c (try_optimize_cfg): Use flag_checking, CHECKING_P
gcc_checking_assert and checking_* functions to eliminate
ENABLE_CHECKING conditionals.
* cfgexpand.c (expand_goto, expand_debug_expr): Likewise.
(pass_expand::execute): Likewise.
* cgraphclones.c (symbol_table::materialize_all_clones): Likewise.
* cgraphunit.c (mark_functions_to_output): Likewise.
(cgraph_node::expand_thunk): Likewise.
(symbol_table::compile): Likewise.
* ddg.c (add_cross_iteration_register_deps): Likewise.
(create_ddg_all_sccs): Likewise.
* df-core.c (df_finish_pass, df_analyze): Likewise.
* diagnostic-core.h: Likewise.
* diagnostic.c (diagnostic_report_diagnostic): Likewise.
* dominance.c (calculate_dominance_info): Likewise.
* dwarf2out.c (add_AT_die_ref): Likewise.
(const_ok_for_output_1, mem_loc_descriptor): Likewise.
(loc_list_from_tree, gen_lexical_block_die): Likewise.
gen_type_die_with_usage, gen_type_die): Likewise.
(dwarf2out_decl): Likewise.
* emit-rtl.c (verify_rtx_sharing, reorder_insns_nobb): Likewise.
* except.c (duplicate_eh_regions): Likewise.
* fwprop.c (register_active_defs, update_df_init): Likewise.
(fwprop_init, fwprop_done): Likewise.
(update_uses): Likewise.
* ggc-page.c (ggc_grow): Likewise.
* gimplify.c (gimplify_body): Likewise.
(gimplify_hasher::equal): Likewise.
* graphite-isl-ast-to-gimple.c (graphite_verify): Likewise.
* graphite-scop-detection.c (canonicalize_loop_closed_ssa_form):
Likewise.
* graphite-sese-to-poly.c (rewrite_reductions_out_of_ssa): Likewise.
(rewrite_cross_bb_scalar_deps_out_of_ssa): Likwise.
* hash-table.h (::find_empty_slot_for_expand): Likewise.
* ifcvt.c (if_convert): Likewise.
* ipa-cp.c (ipcp_propagate_stage): Likewise.
* ipa-devirt.c (type_in_anonymous_namespace_p): Likewise.
(odr_type_p, odr_types_equivalent_p): Likewise.
(add_type_duplicate, get_odr_type): Likewise.
* ipa-icf.c (sem_item_optimizer::execute): Likewise.
(sem_item_optimizer::subdivide_classes_by_equality): Likewise.
(sem_item_optimizer::verify_classes): Likewise.
(sem_item_optimizer::traverse_congruence_split): Likewise.
(sem_item_optimizer::checking_verify_classes): New.
* ipa-icf.h (sem_item_optimizer::checking_verify_classes): Add new
method.
* cfgrtl.c (commit_edge_insertions): Likewise.
(fixup_reorder_chain, cfg_layout_finalize): Likewise.
(rtl_flow_call_edges_add): Likewise.
* cgraph.c (symbol_table::create_edge): Likewise.
(cgraph_edge::redirect_call_stmt_to_callee): Likewise.
* cgraph.h (symtab_node): Likewise.
(symtab_node::checking_verify_symtab_nodes): Define.
(cgraph_node::checking_verify_cgraph_nodes): Define.
* cfghooks.h (checking_verify_flow_info): Define.
* cfgloop.h (checking_verify_loop_structure): Define.
* dominance.h (checking_verify_dominators): Define.
* et-forest.c: Fix comment.
* ipa-inline-analysis.c (compute_inline_parameters): Use flag_checking,
CHECKING_P gcc_checking_assert and checking_* functions to eliminate
ENABLE_CHECKING conditionals.
* ipa-inline-transform.c (save_inline_function_body): Likewise.
* ipa-inline.c (inline_small_functions): Likewise.
(early_inliner): Likewise.
* ipa-inline.h (estimate_edge_growth): Likewise.
* ipa-visibility.c (function_and_variable_visibility): Likewise.
* ipa.c (symbol_table::remove_unreachable_nodes): Likewise.
(ipa_single_use): Likewise.
* ira-int.h: Likewise.
* ira.c (ira): Likewise.
* loop-doloop.c (doloop_optimize_loops): Likewise.
* loop-init.c (loop_optimizer_init, fix_loop_structure): Likewise.
* loop-invariant.c (move_loop_invariants): Likewise.
* lra-assigns.c (lra_assign): Likewise.
* lra-constraints.c (lra_constraints): Likewise.
* lra-eliminations.c (lra_eliminate): Likewise.
* lra-int.h (struct lra_reg): Likewise.
* lra-lives.c (check_pseudos_live_through_calls): Likewise.
(lra_create_live_ranges_1): Likewise.
* lra-remat.c (create_remat_bb_data): Likewise.
* lra.c (lra_update_insn_recog_data, restore_scratches): Likewise.
(lra): Likewise.
(check_rtl): Always define. Remove incorrect guard around
extract_constrain_insn call.
* lto-cgraph.c (input_cgraph_1: Use flag_checking,
CHECKING_P gcc_checking_assert and checking_* functions to eliminate
ENABLE_CHECKING conditionals.
* lto-streamer-out.c (DFS::DFS): Likewise.
(lto_output): Likewise.
* lto-streamer.c (lto_streamer_init): Likewise.
* omp-low.c (scan_omp_target, expand_omp_taskreg): Likewise.
expand_omp_target, execute_expand_omp): Likewise.
(lower_omp_target): Likewise.
* passes.c (execute_function_todo): Likewise.
(execute_todo, execute_one_pass): Likewise.
(verify_curr_properties): Always define.
* predict.c (tree_estimate_probability: Use flag_checking,
CHECKING_P gcc_checking_assert and checking_* functions to eliminate
ENABLE_CHECKING conditionals.
(propagate_freq): Likewise.
* pretty-print.c (pp_format): Likewise.
* real.c (real_to_decimal_for_mode): Likewise.
* recog.c (split_all_insns): Likewise.
* regcprop.c (kill_value_one_regno): Likewise.
(copy_value): Likewise.
(validate_value_data): Define unconditionally.
* reload.c: Fix comment.
* timevar.c: Include options.h
* tree-ssa.h (checking_verify_ssa): Define.
* tree-ssa-loop-manip.h (checking_verify_loop_closed_ssa): Define.
* sched-deps.c (CHECK): Remove unused macro.
(add_or_update_dep_1, sd_add_dep: Use flag_checking, CHECKING_P
gcc_checking_assert and checking_* functions to eliminate
ENABLE_CHECKING conditionals.
* sel-sched-ir.c (free_regset_pool, tidy_control_flow): Likewise.
* sel-sched.c (struct moveop_static_params): Likewise.
(find_best_reg_for_expr, move_cond_jump): Likewise.
(move_op_orig_expr_not_found): Likewise.
(code_motion_process_successors, move_op): Likewise.
* ssa-iterators.h (first_readonly_imm_use): Likewise.
(next_readonly_imm_use): Likewise.
* store-motion.c (compute_store_table): Likewise.
* symbol-summary.h (function_summary::function_summary): Likewise.
* target.h (cumulative_args_t): Likewise.
(get_cumulative_args, pack_cumulative_args): Likewise.
* timevar.c: (timer::print): Likewise.
* trans-mem.c (ipa_tm_execute): Likewise.
* tree-cfg.c (move_stmt_op): Likewise.
(move_sese_region_to_fn): Likewise.
(gimple_flow_call_edges_add): Likewise.
* tree-cfgcleanup.c (cleanup_tree_cfg_noloop, repair_loop_structures):
Likewise.
* tree-eh.c (remove_unreachable_handlers): Likewise.
* tree-if-conv.c (pass_if_conversion::execute): Likewise.
* tree-inline.c (expand_call_inline, optimize_inline_calls): Likewise.
* tree-into-ssa.c (update_ssa): Likewise.
* tree-loop-distribution.c (pass_loop_distribution::execute): Likewise.
* tree-outof-ssa.c (eliminate_useless_phis, rewrite_trees): Likewise.
* tree-parloops.c (pass_parallelize_loops::execute): Likewise.
* tree-predcom.c (suitable_component_p): Likewise.
* tree-profile.c (gimple_gen_const_delta_profiler): Likewise.
* tree-ssa-alias.c (refs_may_alias_p_1): Likewise.
* tree-ssa-live.c (verify_live_on_entry): Likewise.
* tree-ssa-live.h (register_ssa_partition): Likewise.
* tree-ssa-loop-ivcanon.c (tree_unroll_loops_completely): Likewise.
* tree-ssa-loop-manip.c (add_exit_phi): Likewise.
(tree_transform_and_unroll_loop): Likewise.
* tree-ssa-math-opts.c (pass_cse_reciprocals::execute): Likewise.
* tree-ssa-operands.c (get_expr_operands): Likewise.
* tree-ssa-propagate.c (replace_exp_1): Likewise.
* tree-ssa-structalias.c (rewrite_constraints): Likewise.
* tree-ssa-ter.c (free_temp_expr_table): Likewise.
* tree-ssa-threadupdate.c (duplicate_thread_path): Likewise.
* tree-ssanames.c (release_ssa_name_fn): Likewise.
* tree-stdarg.c (expand_ifn_va_arg): Likewise.
* tree-vect-loop-manip.c
(slpeel_tree_duplicate_loop_to_edge_cfg): Likewise.
(slpeel_checking_verify_cfg_after_peeling): Likewise.
(vect_do_peeling_for_loop_bound): Likewise.
(vect_do_peeling_for_alignment): Likewise.
* tree-vrp.c (supports_overflow_infinity): Likewise.
(set_value_range): Likewise.
* tree.c (free_lang_data_in_cgraph): Likewise.
* value-prof.c (gimple_remove_histogram_value): Likewise.
(free_hist): Likewise.
* var-tracking.c (canonicalize_values_star): Likewise.
(compute_bb_dataflow, vt_find_locations, vt_emit_notes): Likewise.
2015-10-27 Nathan Sidwell <nathan@codesourcery.com>
* internal-fn.def (IFN_GOACC_DIM_SIZE, IFN_GOACC_DIM_POS,

View File

@ -174,8 +174,58 @@ find_attribute_namespace (const char* ns)
return NULL;
}
/* Initialize attribute tables, and make some sanity checks
if --enable-checking. */
/* Make some sanity checks on the attribute tables. */
static void
check_attribute_tables (void)
{
for (size_t i = 0; i < ARRAY_SIZE (attribute_tables); i++)
for (size_t j = 0; attribute_tables[i][j].name != NULL; j++)
{
/* The name must not begin and end with __. */
const char *name = attribute_tables[i][j].name;
int len = strlen (name);
gcc_assert (!(name[0] == '_' && name[1] == '_'
&& name[len - 1] == '_' && name[len - 2] == '_'));
/* The minimum and maximum lengths must be consistent. */
gcc_assert (attribute_tables[i][j].min_length >= 0);
gcc_assert (attribute_tables[i][j].max_length == -1
|| (attribute_tables[i][j].max_length
>= attribute_tables[i][j].min_length));
/* An attribute cannot require both a DECL and a TYPE. */
gcc_assert (!attribute_tables[i][j].decl_required
|| !attribute_tables[i][j].type_required);
/* If an attribute requires a function type, in particular
it requires a type. */
gcc_assert (!attribute_tables[i][j].function_type_required
|| attribute_tables[i][j].type_required);
}
/* Check that each name occurs just once in each table. */
for (size_t i = 0; i < ARRAY_SIZE (attribute_tables); i++)
for (size_t j = 0; attribute_tables[i][j].name != NULL; j++)
for (size_t k = j + 1; attribute_tables[i][k].name != NULL; k++)
gcc_assert (strcmp (attribute_tables[i][j].name,
attribute_tables[i][k].name));
/* Check that no name occurs in more than one table. Names that
begin with '*' are exempt, and may be overridden. */
for (size_t i = 0; i < ARRAY_SIZE (attribute_tables); i++)
for (size_t j = i + 1; j < ARRAY_SIZE (attribute_tables); j++)
for (size_t k = 0; attribute_tables[i][k].name != NULL; k++)
for (size_t l = 0; attribute_tables[j][l].name != NULL; l++)
gcc_assert (attribute_tables[i][k].name[0] == '*'
|| strcmp (attribute_tables[i][k].name,
attribute_tables[j][l].name));
}
/* Initialize attribute tables, and make some sanity checks if checking is
enabled. */
void
init_attributes (void)
@ -195,62 +245,8 @@ init_attributes (void)
if (attribute_tables[i] == NULL)
attribute_tables[i] = empty_attribute_table;
#ifdef ENABLE_CHECKING
/* Make some sanity checks on the attribute tables. */
for (i = 0; i < ARRAY_SIZE (attribute_tables); i++)
{
int j;
for (j = 0; attribute_tables[i][j].name != NULL; j++)
{
/* The name must not begin and end with __. */
const char *name = attribute_tables[i][j].name;
int len = strlen (name);
gcc_assert (!(name[0] == '_' && name[1] == '_'
&& name[len - 1] == '_' && name[len - 2] == '_'));
/* The minimum and maximum lengths must be consistent. */
gcc_assert (attribute_tables[i][j].min_length >= 0);
gcc_assert (attribute_tables[i][j].max_length == -1
|| (attribute_tables[i][j].max_length
>= attribute_tables[i][j].min_length));
/* An attribute cannot require both a DECL and a TYPE. */
gcc_assert (!attribute_tables[i][j].decl_required
|| !attribute_tables[i][j].type_required);
/* If an attribute requires a function type, in particular
it requires a type. */
gcc_assert (!attribute_tables[i][j].function_type_required
|| attribute_tables[i][j].type_required);
}
}
/* Check that each name occurs just once in each table. */
for (i = 0; i < ARRAY_SIZE (attribute_tables); i++)
{
int j, k;
for (j = 0; attribute_tables[i][j].name != NULL; j++)
for (k = j + 1; attribute_tables[i][k].name != NULL; k++)
gcc_assert (strcmp (attribute_tables[i][j].name,
attribute_tables[i][k].name));
}
/* Check that no name occurs in more than one table. Names that
begin with '*' are exempt, and may be overridden. */
for (i = 0; i < ARRAY_SIZE (attribute_tables); i++)
{
size_t j, k, l;
for (j = i + 1; j < ARRAY_SIZE (attribute_tables); j++)
for (k = 0; attribute_tables[i][k].name != NULL; k++)
for (l = 0; attribute_tables[j][l].name != NULL; l++)
gcc_assert (attribute_tables[i][k].name[0] == '*'
|| strcmp (attribute_tables[i][k].name,
attribute_tables[j][l].name));
}
#endif
if (flag_checking)
check_attribute_tables ();
for (i = 0; i < ARRAY_SIZE (attribute_tables); ++i)
/* Put all the GNU attributes into the "gnu" namespace. */

View File

@ -2873,11 +2873,8 @@ try_optimize_cfg (int mode)
to detect and fix during edge forwarding, and in some cases
is only visible after newly unreachable blocks are deleted,
which will be done in fixup_partitions. */
fixup_partitions ();
#ifdef ENABLE_CHECKING
verify_flow_info ();
#endif
fixup_partitions ();
checking_verify_flow_info ();
}
changed_overall |= changed;

View File

@ -3269,12 +3269,13 @@ expand_computed_goto (tree exp)
static void
expand_goto (tree label)
{
#ifdef ENABLE_CHECKING
/* Check for a nonlocal goto to a containing function. Should have
gotten translated to __builtin_nonlocal_goto. */
tree context = decl_function_context (label);
gcc_assert (!context || context == current_function_decl);
#endif
if (flag_checking)
{
/* Check for a nonlocal goto to a containing function. Should have
gotten translated to __builtin_nonlocal_goto. */
tree context = decl_function_context (label);
gcc_assert (!context || context == current_function_decl);
}
emit_jump (jump_target_rtx (label));
}
@ -5056,12 +5057,12 @@ expand_debug_expr (tree exp)
default:
flag_unsupported:
#ifdef ENABLE_CHECKING
debug_tree (exp);
gcc_unreachable ();
#else
if (flag_checking)
{
debug_tree (exp);
gcc_unreachable ();
}
return NULL;
#endif
}
}
@ -6422,9 +6423,7 @@ pass_expand::execute (function *fun)
gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
cleanup_cfg (CLEANUP_NO_INSN_DEL);
#ifdef ENABLE_CHECKING
verify_flow_info ();
#endif
checking_verify_flow_info ();
/* Initialize pseudos allocated for hard registers. */
emit_initial_value_sets ();

View File

@ -186,6 +186,18 @@ struct cfg_hooks
};
extern void verify_flow_info (void);
/* Check control flow invariants, if internal consistency checks are
enabled. */
static inline void
checking_verify_flow_info (void)
{
/* TODO: Add a separate option for -fchecking=cfg. */
if (flag_checking)
verify_flow_info ();
}
extern void dump_bb (FILE *, basic_block, int, int);
extern void dump_bb_for_graph (pretty_printer *, basic_block);
extern void dump_flow_info (FILE *, int);

View File

@ -311,6 +311,16 @@ extern void delete_loop (struct loop *);
extern void verify_loop_structure (void);
/* Check loop structure invariants, if internal consistency checks are
enabled. */
static inline void
checking_verify_loop_structure (void)
{
if (flag_checking)
verify_loop_structure ();
}
/* Loop analysis. */
extern bool just_once_each_iteration_p (const struct loop *, const_basic_block);
gcov_type expected_loop_iterations_unbounded (const struct loop *);

View File

@ -2096,9 +2096,7 @@ commit_edge_insertions (void)
which will be done by fixup_partitions. */
fixup_partitions ();
#ifdef ENABLE_CHECKING
verify_flow_info ();
#endif
checking_verify_flow_info ();
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
@ -3722,9 +3720,8 @@ fixup_reorder_chain (void)
insn = NEXT_INSN (insn);
set_last_insn (insn);
#ifdef ENABLE_CHECKING
verify_insn_chain ();
#endif
if (flag_checking)
verify_insn_chain ();
/* Now add jumps and labels as needed to match the blocks new
outgoing edges. */
@ -4312,9 +4309,7 @@ break_superblocks (void)
void
cfg_layout_finalize (void)
{
#ifdef ENABLE_CHECKING
verify_flow_info ();
#endif
checking_verify_flow_info ();
force_one_exit_fallthru ();
rtl_register_cfg_hooks ();
if (reload_completed && !targetm.have_epilogue ())
@ -4324,10 +4319,9 @@ cfg_layout_finalize (void)
rebuild_jump_labels (get_insns ());
delete_dead_jumptables ();
#ifdef ENABLE_CHECKING
verify_insn_chain ();
verify_flow_info ();
#endif
if (flag_checking)
verify_insn_chain ();
checking_verify_flow_info ();
}
@ -4892,13 +4886,11 @@ rtl_flow_call_edges_add (sbitmap blocks)
block in CFG already. Calling make_edge in such case would
cause us to mark that edge as fake and remove it later. */
#ifdef ENABLE_CHECKING
if (split_at_insn == BB_END (bb))
if (flag_checking && split_at_insn == BB_END (bb))
{
e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
gcc_assert (e == NULL);
}
#endif
/* Note that the following may create a new basic block
and renumber the existing basic blocks. */

View File

@ -832,11 +832,9 @@ symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
{
/* This is a rather expensive check possibly triggering
construction of call stmt hashtable. */
#ifdef ENABLE_CHECKING
cgraph_edge *e;
gcc_checking_assert (
!(e = caller->get_edge (call_stmt)) || e->speculative);
#endif
gcc_checking_assert (!(e = caller->get_edge (call_stmt))
|| e->speculative);
gcc_assert (is_gimple_call (call_stmt));
}
@ -1282,9 +1280,6 @@ cgraph_edge::redirect_call_stmt_to_callee (void)
gcall *new_stmt;
gimple_stmt_iterator gsi;
bool skip_bounds = false;
#ifdef ENABLE_CHECKING
cgraph_node *node;
#endif
if (e->speculative)
{
@ -1402,13 +1397,11 @@ cgraph_edge::redirect_call_stmt_to_callee (void)
&& !skip_bounds))
return e->call_stmt;
#ifdef ENABLE_CHECKING
if (decl)
if (flag_checking && decl)
{
node = cgraph_node::get (decl);
cgraph_node *node = cgraph_node::get (decl);
gcc_assert (!node || !node->clone.combined_args_to_skip);
}
#endif
if (symtab->dump_file)
{

View File

@ -362,7 +362,6 @@ public:
and NULL otherwise. */
static inline symtab_node *get (const_tree decl)
{
#ifdef ENABLE_CHECKING
/* Check that we are called for sane type of object - functions
and static or external variables. */
gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
@ -374,7 +373,6 @@ public:
memcpy/memset on the tree nodes. */
gcc_checking_assert (!decl->decl_with_vis.symtab_node
|| decl->decl_with_vis.symtab_node->decl == decl);
#endif
return decl->decl_with_vis.symtab_node;
}
@ -398,6 +396,9 @@ public:
/* Verify symbol table for internal consistency. */
static DEBUG_FUNCTION void verify_symtab_nodes (void);
/* Perform internal consistency checks, if they are enabled. */
static inline void checking_verify_symtab_nodes (void);
/* Type of the symbol. */
ENUM_BITFIELD (symtab_type) type : 8;
@ -558,6 +559,13 @@ private:
symtab_node *ultimate_alias_target_1 (enum availability *avail = NULL);
};
inline void
symtab_node::checking_verify_symtab_nodes (void)
{
if (flag_checking)
symtab_node::verify_symtab_nodes ();
}
/* Walk all aliases for NODE. */
#define FOR_EACH_ALIAS(node, alias) \
for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
@ -1205,6 +1213,9 @@ public:
/* Verify whole cgraph structure. */
static void DEBUG_FUNCTION verify_cgraph_nodes (void);
/* Verify cgraph, if consistency checking is enabled. */
static inline void checking_verify_cgraph_nodes (void);
/* Worker to bring NODE local. */
static bool make_local (cgraph_node *node, void *);
@ -2753,6 +2764,15 @@ cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void)
return true;
}
/* Verify cgraph, if consistency checking is enabled. */
inline void
cgraph_node::checking_verify_cgraph_nodes (void)
{
if (flag_checking)
cgraph_node::verify_cgraph_nodes ();
}
/* Return true when variable can be removed from variable pool
if all direct calls are eliminated. */

View File

@ -1074,9 +1074,8 @@ symbol_table::materialize_all_clones (void)
if (symtab->dump_file)
fprintf (symtab->dump_file, "Materializing clones\n");
#ifdef ENABLE_CHECKING
cgraph_node::verify_cgraph_nodes ();
#endif
cgraph_node::checking_verify_cgraph_nodes ();
/* We can also do topological order, but number of iterations should be
bounded by number of IPA passes since single IPA pass is probably not
@ -1145,9 +1144,9 @@ symbol_table::materialize_all_clones (void)
node->clear_stmts_in_references ();
if (symtab->dump_file)
fprintf (symtab->dump_file, "Materialization Call site updates done.\n");
#ifdef ENABLE_CHECKING
cgraph_node::verify_cgraph_nodes ();
#endif
cgraph_node::checking_verify_cgraph_nodes ();
symtab->remove_unreachable_nodes (symtab->dump_file);
}

View File

@ -1325,13 +1325,12 @@ handle_alias_pairs (void)
static void
mark_functions_to_output (void)
{
cgraph_node *node;
#ifdef ENABLE_CHECKING
bool check_same_comdat_groups = false;
cgraph_node *node;
FOR_EACH_FUNCTION (node)
gcc_assert (!node->process);
#endif
if (flag_checking)
FOR_EACH_FUNCTION (node)
gcc_assert (!node->process);
FOR_EACH_FUNCTION (node)
{
@ -1365,15 +1364,14 @@ mark_functions_to_output (void)
}
else if (node->same_comdat_group)
{
#ifdef ENABLE_CHECKING
check_same_comdat_groups = true;
#endif
if (flag_checking)
check_same_comdat_groups = true;
}
else
{
/* We should've reclaimed all functions that are not needed. */
#ifdef ENABLE_CHECKING
if (!node->global.inlined_to
if (flag_checking
&& !node->global.inlined_to
&& gimple_has_body_p (decl)
/* FIXME: in ltrans unit when offline copy is outside partition but inline copies
are inside partition, we can end up not removing the body since we no longer
@ -1386,7 +1384,6 @@ mark_functions_to_output (void)
node->debug ();
internal_error ("failed to reclaim unneeded function");
}
#endif
gcc_assert (node->global.inlined_to
|| !gimple_has_body_p (decl)
|| node->in_other_partition
@ -1397,8 +1394,7 @@ mark_functions_to_output (void)
}
}
#ifdef ENABLE_CHECKING
if (check_same_comdat_groups)
if (flag_checking && check_same_comdat_groups)
FOR_EACH_FUNCTION (node)
if (node->same_comdat_group && !node->process)
{
@ -1418,7 +1414,6 @@ mark_functions_to_output (void)
"comdat group");
}
}
#endif
}
/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
@ -1887,9 +1882,7 @@ cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
TREE_ASM_WRITTEN (thunk_fndecl) = false;
delete_unreachable_blocks ();
update_ssa (TODO_update_ssa);
#ifdef ENABLE_CHECKING
verify_flow_info ();
#endif
checking_verify_flow_info ();
free_dominance_info (CDI_DOMINATORS);
/* Since we want to emit the thunk, we explicitly mark its name as
@ -2373,9 +2366,7 @@ symbol_table::compile (void)
if (seen_error ())
return;
#ifdef ENABLE_CHECKING
symtab_node::verify_symtab_nodes ();
#endif
symtab_node::checking_verify_symtab_nodes ();
timevar_push (TV_CGRAPHOPT);
if (pre_ipa_mem_report)
@ -2424,9 +2415,7 @@ symbol_table::compile (void)
(*debug_hooks->assembly_start) ();
if (!quiet_flag)
fprintf (stderr, "Assembling functions:\n");
#ifdef ENABLE_CHECKING
symtab_node::verify_symtab_nodes ();
#endif
symtab_node::checking_verify_symtab_nodes ();
materialize_all_clones ();
bitmap_obstack_initialize (NULL);
@ -2482,7 +2471,8 @@ symbol_table::compile (void)
fprintf (dump_file, "\nFinal ");
symtab_node::dump_table (dump_file);
}
#ifdef ENABLE_CHECKING
if (!flag_checking)
return;
symtab_node::verify_symtab_nodes ();
/* Double check that all inline clones are gone and that all
function bodies have been released from memory. */
@ -2501,7 +2491,6 @@ symbol_table::compile (void)
if (error_found)
internal_error ("nodes with unreleased memory found");
}
#endif
}

View File

@ -300,19 +300,16 @@ add_cross_iteration_register_deps (ddg_ptr g, df_ref last_def)
rtx_insn *def_insn = DF_REF_INSN (last_def);
ddg_node_ptr last_def_node = get_node_of_insn (g, def_insn);
ddg_node_ptr use_node;
#ifdef ENABLE_CHECKING
struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
#endif
df_ref first_def = df_bb_regno_first_def_find (g->bb, regno);
gcc_assert (last_def_node);
gcc_assert (first_def);
#ifdef ENABLE_CHECKING
if (DF_REF_ID (last_def) != DF_REF_ID (first_def))
gcc_assert (!bitmap_bit_p (&bb_info->gen,
DF_REF_ID (first_def)));
#endif
if (flag_checking && DF_REF_ID (last_def) != DF_REF_ID (first_def))
{
struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
gcc_assert (!bitmap_bit_p (&bb_info->gen, DF_REF_ID (first_def)));
}
/* Create inter-loop true dependences and anti dependences. */
for (r_use = DF_REF_CHAIN (last_def); r_use != NULL; r_use = r_use->next)
@ -1013,7 +1010,6 @@ order_sccs (ddg_all_sccs_ptr g)
(int (*) (const void *, const void *)) compare_sccs);
}
#ifdef ENABLE_CHECKING
/* Check that every node in SCCS belongs to exactly one strongly connected
component and that no element of SCCS is empty. */
static void
@ -1033,7 +1029,6 @@ check_sccs (ddg_all_sccs_ptr sccs, int num_nodes)
}
sbitmap_free (tmp);
}
#endif
/* Perform the Strongly Connected Components decomposing algorithm on the
DDG and return DDG_ALL_SCCS structure that contains them. */
@ -1079,9 +1074,10 @@ create_ddg_all_sccs (ddg_ptr g)
sbitmap_free (from);
sbitmap_free (to);
sbitmap_free (scc_nodes);
#ifdef ENABLE_CHECKING
check_sccs (sccs, num_nodes);
#endif
if (flag_checking)
check_sccs (sccs, num_nodes);
return sccs;
}

View File

@ -682,10 +682,8 @@ df_finish_pass (bool verify ATTRIBUTE_UNUSED)
#endif
#endif
#ifdef ENABLE_CHECKING
if (verify)
if (flag_checking && verify)
df->changeable_flags |= DF_VERIFY_SCHEDULED;
#endif
}
@ -1273,12 +1271,14 @@ df_analyze (void)
for (i = 0; i < df->n_blocks; i++)
bitmap_set_bit (current_all_blocks, df->postorder[i]);
#ifdef ENABLE_CHECKING
/* Verify that POSTORDER_INVERTED only contains blocks reachable from
the ENTRY block. */
for (i = 0; i < df->n_blocks_inverted; i++)
gcc_assert (bitmap_bit_p (current_all_blocks, df->postorder_inverted[i]));
#endif
if (flag_checking)
{
/* Verify that POSTORDER_INVERTED only contains blocks reachable from
the ENTRY block. */
for (i = 0; i < df->n_blocks_inverted; i++)
gcc_assert (bitmap_bit_p (current_all_blocks,
df->postorder_inverted[i]));
}
/* Make sure that we have pruned any unreachable blocks from these
sets. */

View File

@ -48,7 +48,7 @@ extern const char *trim_filename (const char *);
/* None of these functions are suitable for ATTRIBUTE_PRINTF, because
each language front end can extend them with its own set of format
specifiers. We must use custom format checks. */
#if (ENABLE_CHECKING && GCC_VERSION >= 4001) || GCC_VERSION == BUILDING_GCC_VERSION
#if (CHECKING_P && GCC_VERSION >= 4001) || GCC_VERSION == BUILDING_GCC_VERSION
#define ATTRIBUTE_GCC_DIAG(m, n) __attribute__ ((__format__ (GCC_DIAG_STYLE, m, n))) ATTRIBUTE_NONNULL(m)
#else
#define ATTRIBUTE_GCC_DIAG(m, n) ATTRIBUTE_NONNULL(m)

View File

@ -736,12 +736,12 @@ diagnostic_report_diagnostic (diagnostic_context *context,
if (diagnostic->kind == DK_ICE || diagnostic->kind == DK_ICE_NOBT)
{
#ifndef ENABLE_CHECKING
/* When not checking, ICEs are converted to fatal errors when an
error has already occurred. This is counteracted by
abort_on_error. */
if ((diagnostic_kind_count (context, DK_ERROR) > 0
|| diagnostic_kind_count (context, DK_SORRY) > 0)
if (!CHECKING_P
&& (diagnostic_kind_count (context, DK_ERROR) > 0
|| diagnostic_kind_count (context, DK_SORRY) > 0)
&& !context->abort_on_error)
{
expanded_location s
@ -750,7 +750,6 @@ diagnostic_report_diagnostic (diagnostic_context *context,
s.file, s.line);
exit (ICE_EXIT_CODE);
}
#endif
if (context->internal_error)
(*context->internal_error) (context,
diagnostic->message.format_spec,

View File

@ -634,9 +634,7 @@ calculate_dominance_info (cdi_direction dir)
if (dom_computed[dir_index] == DOM_OK)
{
#if ENABLE_CHECKING
verify_dominators (dir);
#endif
checking_verify_dominators (dir);
return;
}
@ -665,11 +663,7 @@ calculate_dominance_info (cdi_direction dir)
dom_computed[dir_index] = DOM_NO_FAST_QUERY;
}
else
{
#if ENABLE_CHECKING
verify_dominators (dir);
#endif
}
checking_verify_dominators (dir);
compute_dom_fast_query (dir);

View File

@ -60,6 +60,17 @@ extern bool dominated_by_p (enum cdi_direction, const_basic_block,
unsigned bb_dom_dfs_in (enum cdi_direction, basic_block);
unsigned bb_dom_dfs_out (enum cdi_direction, basic_block);
extern void verify_dominators (enum cdi_direction);
/* Verify invariants of computed dominance information, if internal consistency
checks are enabled. */
static inline void
checking_verify_dominators (cdi_direction dir)
{
if (flag_checking)
verify_dominators (dir);
}
basic_block recompute_dominator (enum cdi_direction, basic_block);
extern void iterate_fix_dominators (enum cdi_direction,
vec<basic_block> , bool);

View File

@ -4149,15 +4149,12 @@ static inline void
add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
{
dw_attr_node attr;
gcc_checking_assert (targ_die != NULL);
#ifdef ENABLE_CHECKING
gcc_assert (targ_die != NULL);
#else
/* With LTO we can end up trying to reference something we didn't create
a DIE for. Avoid crashing later on a NULL referenced DIE. */
if (targ_die == NULL)
return;
#endif
attr.dw_attr = attr_kind;
attr.dw_attr_val.val_class = dw_val_class_die_ref;
@ -5723,7 +5720,6 @@ debug_dwarf (void)
print_die (comp_unit_die (), stderr);
}
#ifdef ENABLE_CHECKING
/* Sanity checks on DIEs. */
static void
@ -5786,7 +5782,6 @@ check_die (dw_die_ref die)
&& a->dw_attr != DW_AT_GNU_all_call_sites);
}
}
#endif
/* Start a new compilation unit DIE for an include file. OLD_UNIT is the CU
for the enclosing include file, if any. BINCL_DIE is the DW_TAG_GNU_BINCL
@ -11763,14 +11758,14 @@ const_ok_for_output_1 (rtx rtl)
{
/* If delegitimize_address couldn't do anything with the UNSPEC, assume
we can't express it in the debug info. */
#ifdef ENABLE_CHECKING
/* Don't complain about TLS UNSPECs, those are just too hard to
delegitimize. Note this could be a non-decl SYMBOL_REF such as
one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
rather than DECL_THREAD_LOCAL_P is not just an optimization. */
if (XVECLEN (rtl, 0) == 0
|| GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
|| SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE)
if (flag_checking
&& (XVECLEN (rtl, 0) == 0
|| GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
|| SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
inform (current_function_decl
? DECL_SOURCE_LOCATION (current_function_decl)
: UNKNOWN_LOCATION,
@ -11782,7 +11777,6 @@ const_ok_for_output_1 (rtx rtl)
#else
"non-delegitimized UNSPEC %d found in variable location",
XINT (rtl, 1));
#endif
#endif
expansion_failed (NULL_TREE, rtl,
"UNSPEC hasn't been delegitimized.\n");
@ -13570,12 +13564,12 @@ mem_loc_descriptor (rtx rtl, machine_mode mode,
goto symref;
default:
#ifdef ENABLE_CHECKING
print_rtl (stderr, rtl);
gcc_unreachable ();
#else
if (flag_checking)
{
print_rtl (stderr, rtl);
gcc_unreachable ();
}
break;
#endif
}
if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
@ -15098,15 +15092,14 @@ loc_list_from_tree (tree loc, int want_address,
return 0;
}
#ifdef ENABLE_CHECKING
/* Otherwise this is a generic code; we should just lists all of
these explicitly. We forgot one. */
gcc_unreachable ();
#else
if (flag_checking)
gcc_unreachable ();
/* In a release build, we want to degrade gracefully: better to
generate incomplete debugging information than to crash. */
return NULL;
#endif
}
if (!ret && !list_ret)
@ -19908,18 +19901,17 @@ gen_lexical_block_die (tree stmt, dw_die_ref context_die)
{
if (old_die)
{
#ifdef ENABLE_CHECKING
/* This must have been generated early and it won't even
need location information since it's a DW_AT_inline
function. */
for (dw_die_ref c = context_die; c; c = c->die_parent)
if (c->die_tag == DW_TAG_inlined_subroutine
|| c->die_tag == DW_TAG_subprogram)
{
gcc_assert (get_AT (c, DW_AT_inline));
break;
}
#endif
if (flag_checking)
for (dw_die_ref c = context_die; c; c = c->die_parent)
if (c->die_tag == DW_TAG_inlined_subroutine
|| c->die_tag == DW_TAG_subprogram)
{
gcc_assert (get_AT (c, DW_AT_inline));
break;
}
return;
}
}
@ -20736,10 +20728,8 @@ gen_type_die_with_usage (tree type, dw_die_ref context_die,
if (type == NULL_TREE || type == error_mark_node)
return;
#ifdef ENABLE_CHECKING
if (type)
if (flag_checking && type)
verify_type (type);
#endif
if (TYPE_NAME (type) != NULL_TREE
&& TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
@ -20933,11 +20923,12 @@ gen_type_die (tree type, dw_die_ref context_die)
if (type != error_mark_node)
{
gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
#ifdef ENABLE_CHECKING
dw_die_ref die = lookup_type_die (type);
if (die)
check_die (die);
#endif
if (flag_checking)
{
dw_die_ref die = lookup_type_die (type);
if (die)
check_die (die);
}
}
}
@ -21975,11 +21966,12 @@ dwarf2out_decl (tree decl)
gen_decl_die (decl, NULL, context_die);
#ifdef ENABLE_CHECKING
dw_die_ref die = lookup_decl_die (decl);
if (die)
check_die (die);
#endif
if (flag_checking)
{
dw_die_ref die = lookup_decl_die (decl);
if (die)
check_die (die);
}
}
/* Write the debugging output for DECL. */

View File

@ -2733,8 +2733,7 @@ verify_rtx_sharing (rtx orig, rtx insn)
/* This rtx may not be shared. If it has already been seen,
replace it with a copy of itself. */
#ifdef ENABLE_CHECKING
if (RTX_FLAG (x, used))
if (flag_checking && RTX_FLAG (x, used))
{
error ("invalid rtl sharing found in the insn");
debug_rtx (insn);
@ -2742,7 +2741,6 @@ verify_rtx_sharing (rtx orig, rtx insn)
debug_rtx (x);
internal_error ("internal consistency failure");
}
#endif
gcc_assert (!RTX_FLAG (x, used));
RTX_FLAG (x, used) = 1;
@ -4259,12 +4257,12 @@ delete_insns_since (rtx_insn *from)
void
reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
{
#ifdef ENABLE_CHECKING
rtx_insn *x;
for (x = from; x != to; x = NEXT_INSN (x))
gcc_assert (after != x);
gcc_assert (after != to);
#endif
if (flag_checking)
{
for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
gcc_assert (after != x);
gcc_assert (after != to);
}
/* Splice this bunch out of where it is now. */
if (PREV_INSN (from))

View File

@ -28,7 +28,7 @@ License along with libiberty; see the file COPYING3. If not see
#include "alloc-pool.h"
#include "et-forest.h"
/* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
/* We do not enable this with CHECKING_P, since it is awfully slow. */
#undef DEBUG_ET
#ifdef DEBUG_ET

View File

@ -612,9 +612,8 @@ duplicate_eh_regions (struct function *ifun,
struct duplicate_eh_regions_data data;
eh_region outer_region;
#ifdef ENABLE_CHECKING
verify_eh_tree (ifun);
#endif
if (flag_checking)
verify_eh_tree (ifun);
data.label_map = map;
data.label_map_data = map_data;
@ -632,9 +631,8 @@ duplicate_eh_regions (struct function *ifun,
duplicate_eh_regions_1 (&data, r, outer_region);
}
#ifdef ENABLE_CHECKING
verify_eh_tree (cfun);
#endif
if (flag_checking)
verify_eh_tree (cfun);
return data.eh_map;
}

View File

@ -843,9 +843,7 @@ all_uses_available_at (rtx_insn *def_insn, rtx_insn *target_insn)
static df_ref *active_defs;
#ifdef ENABLE_CHECKING
static sparseset active_defs_check;
#endif
/* Fill the ACTIVE_DEFS array with the use->def link for the registers
mentioned in USE_REC. Register the valid entries in ACTIVE_DEFS_CHECK
@ -859,9 +857,8 @@ register_active_defs (df_ref use)
df_ref def = get_def_for_use (use);
int regno = DF_REF_REGNO (use);
#ifdef ENABLE_CHECKING
sparseset_set_bit (active_defs_check, regno);
#endif
if (flag_checking)
sparseset_set_bit (active_defs_check, regno);
active_defs[regno] = def;
}
}
@ -876,9 +873,8 @@ register_active_defs (df_ref use)
static void
update_df_init (rtx_insn *def_insn, rtx_insn *insn)
{
#ifdef ENABLE_CHECKING
sparseset_clear (active_defs_check);
#endif
if (flag_checking)
sparseset_clear (active_defs_check);
register_active_defs (DF_INSN_USES (def_insn));
register_active_defs (DF_INSN_USES (insn));
register_active_defs (DF_INSN_EQ_USES (insn));
@ -899,9 +895,7 @@ update_uses (df_ref use)
if (DF_REF_ID (use) >= (int) use_def_ref.length ())
use_def_ref.safe_grow_cleared (DF_REF_ID (use) + 1);
#ifdef ENABLE_CHECKING
gcc_assert (sparseset_bit_p (active_defs_check, regno));
#endif
gcc_checking_assert (sparseset_bit_p (active_defs_check, regno));
use_def_ref[DF_REF_ID (use)] = active_defs[regno];
}
}
@ -1407,9 +1401,8 @@ fwprop_init (void)
df_set_flags (DF_DEFER_INSN_RESCAN);
active_defs = XNEWVEC (df_ref, max_reg_num ());
#ifdef ENABLE_CHECKING
active_defs_check = sparseset_alloc (max_reg_num ());
#endif
if (flag_checking)
active_defs_check = sparseset_alloc (max_reg_num ());
}
static void
@ -1419,9 +1412,8 @@ fwprop_done (void)
use_def_ref.release ();
free (active_defs);
#ifdef ENABLE_CHECKING
sparseset_free (active_defs_check);
#endif
if (flag_checking)
sparseset_free (active_defs_check);
free_dominance_info (CDI_DOMINATORS);
cleanup_cfg (0);

View File

@ -2201,12 +2201,11 @@ ggc_collect (void)
void
ggc_grow (void)
{
#ifndef ENABLE_CHECKING
G.allocated_last_gc = MAX (G.allocated_last_gc,
G.allocated);
#else
ggc_collect ();
#endif
if (!flag_checking)
G.allocated_last_gc = MAX (G.allocated_last_gc,
G.allocated);
else
ggc_collect ();
if (!quiet_flag)
fprintf (stderr, " {GC start %luk} ", (unsigned long) G.allocated / 1024);
}

View File

@ -10319,10 +10319,8 @@ gimplify_body (tree fndecl, bool do_parms)
pop_gimplify_context (outer_bind);
gcc_assert (gimplify_ctxp == NULL);
#ifdef ENABLE_CHECKING
if (!seen_error ())
if (flag_checking && !seen_error ())
verify_gimple_in_seq (gimple_bind_body (outer_bind));
#endif
timevar_pop (TV_TREE_GIMPLIFY);
input_location = saved_location;
@ -10614,11 +10612,9 @@ gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
if (!operand_equal_p (t1, t2, 0))
return false;
#ifdef ENABLE_CHECKING
/* Only allow them to compare equal if they also hash equal; otherwise
results are nondeterminate, and we fail bootstrap comparison. */
gcc_assert (hash (p1) == hash (p2));
#endif
gcc_checking_assert (hash (p1) == hash (p2));
return true;
}

View File

@ -106,10 +106,8 @@ gmp_cst_to_tree (tree type, mpz_t val)
static inline void
graphite_verify (void)
{
#ifdef ENABLE_CHECKING
verify_loop_structure ();
verify_loop_closed_ssa (true);
#endif
checking_verify_loop_structure ();
checking_verify_loop_closed_ssa (true);
}
/* IVS_PARAMS maps ISL's scattering and parameter identifiers

View File

@ -259,21 +259,16 @@ canonicalize_loop_closed_ssa (loop_p loop)
static void
canonicalize_loop_closed_ssa_form (void)
{
checking_verify_loop_closed_ssa (true);
loop_p loop;
#ifdef ENABLE_CHECKING
verify_loop_closed_ssa (true);
#endif
FOR_EACH_LOOP (loop, 0)
canonicalize_loop_closed_ssa (loop);
rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
update_ssa (TODO_update_ssa);
#ifdef ENABLE_CHECKING
verify_loop_closed_ssa (true);
#endif
checking_verify_loop_closed_ssa (true);
}
/* Can all ivs be represented by a signed integer?

View File

@ -1522,9 +1522,7 @@ rewrite_reductions_out_of_ssa (scop_p scop)
}
update_ssa (TODO_update_ssa);
#ifdef ENABLE_CHECKING
verify_loop_closed_ssa (true);
#endif
checking_verify_loop_closed_ssa (true);
}
/* Rewrite the scalar dependence of DEF used in USE_STMT with a memory
@ -1699,9 +1697,7 @@ rewrite_cross_bb_scalar_deps_out_of_ssa (scop_p scop)
{
scev_reset_htab ();
update_ssa (TODO_update_ssa);
#ifdef ENABLE_CHECKING
verify_loop_closed_ssa (true);
#endif
checking_verify_loop_closed_ssa (true);
}
}

View File

@ -638,9 +638,7 @@ hash_table<Descriptor, Allocator>::find_empty_slot_for_expand (hashval_t hash)
if (is_empty (*slot))
return slot;
#ifdef ENABLE_CHECKING
gcc_checking_assert (!is_deleted (*slot));
#endif
hash2 = hash_table_mod2 (hash, m_size_prime_index);
for (;;)
@ -652,9 +650,7 @@ hash_table<Descriptor, Allocator>::find_empty_slot_for_expand (hashval_t hash)
slot = m_entries + index;
if (is_empty (*slot))
return slot;
#ifdef ENABLE_CHECKING
gcc_checking_assert (!is_deleted (*slot));
#endif
}
}

View File

@ -5093,9 +5093,7 @@ if_convert (bool after_combine)
if (optimize == 1)
df_remove_problem (df_live);
#ifdef ENABLE_CHECKING
verify_flow_info ();
#endif
checking_verify_flow_info ();
}
/* If-conversion and CFG cleanup. */

View File

@ -2920,9 +2920,8 @@ ipcp_propagate_stage (struct ipa_topo_info *topo)
overall_size, max_new_size);
propagate_constants_topo (topo);
#ifdef ENABLE_CHECKING
ipcp_verify_propagated_values ();
#endif
if (flag_checking)
ipcp_verify_propagated_values ();
topo->constants.propagate_effects ();
topo->contexts.propagate_effects ();

View File

@ -272,11 +272,10 @@ type_in_anonymous_namespace_p (const_tree t)
{
/* C++ FE uses magic <anon> as assembler names of anonymous types.
verify that this match with type_in_anonymous_namespace_p. */
#ifdef ENABLE_CHECKING
if (in_lto_p)
gcc_assert (!strcmp ("<anon>",
IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (TYPE_NAME (t)))));
#endif
gcc_checking_assert (!strcmp ("<anon>",
IDENTIFIER_POINTER
(DECL_ASSEMBLER_NAME (TYPE_NAME (t)))));
return true;
}
return false;
@ -300,15 +299,13 @@ odr_type_p (const_tree t)
if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
&& (DECL_ASSEMBLER_NAME_SET_P (TYPE_NAME (t))))
{
#ifdef ENABLE_CHECKING
/* C++ FE uses magic <anon> as assembler names of anonymous types.
verify that this match with type_in_anonymous_namespace_p. */
gcc_assert (!type_with_linkage_p (t)
|| strcmp ("<anon>",
IDENTIFIER_POINTER
(DECL_ASSEMBLER_NAME (TYPE_NAME (t))))
|| type_in_anonymous_namespace_p (t));
#endif
gcc_checking_assert (!type_with_linkage_p (t)
|| strcmp ("<anon>",
IDENTIFIER_POINTER
(DECL_ASSEMBLER_NAME (TYPE_NAME (t))))
|| type_in_anonymous_namespace_p (t));
return true;
}
return false;
@ -1777,11 +1774,10 @@ odr_types_equivalent_p (tree t1, tree t2, bool warn, bool *warned,
bool
odr_types_equivalent_p (tree type1, tree type2)
{
hash_set<type_pair> visited;
gcc_checking_assert (odr_or_derived_type_p (type1)
&& odr_or_derived_type_p (type2));
#ifdef ENABLE_CHECKING
gcc_assert (odr_or_derived_type_p (type1) && odr_or_derived_type_p (type2));
#endif
hash_set<type_pair> visited;
return odr_types_equivalent_p (type1, type2, false, NULL,
&visited, UNKNOWN_LOCATION, UNKNOWN_LOCATION);
}
@ -2000,8 +1996,8 @@ add_type_duplicate (odr_type val, tree type)
}
gcc_assert (val->odr_violated || !odr_must_violate);
/* Sanity check that all bases will be build same way again. */
#ifdef ENABLE_CHECKING
if (COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
if (flag_checking
&& COMPLETE_TYPE_P (type) && COMPLETE_TYPE_P (val->type)
&& TREE_CODE (val->type) == RECORD_TYPE
&& TREE_CODE (type) == RECORD_TYPE
&& TYPE_BINFO (val->type) && TYPE_BINFO (type)
@ -2030,7 +2026,6 @@ add_type_duplicate (odr_type val, tree type)
j++;
}
}
#endif
/* Regularize things a little. During LTO same types may come with
@ -2136,8 +2131,8 @@ get_odr_type (tree type, bool insert)
if (slot && *slot)
{
val = *slot;
#ifdef ENABLE_CHECKING
if (in_lto_p && can_be_vtable_hashed_p (type))
if (flag_checking
&& in_lto_p && can_be_vtable_hashed_p (type))
{
hash = hash_odr_vtable (type);
vtable_slot = odr_vtable_hash->find_slot_with_hash (type, hash,
@ -2145,7 +2140,6 @@ get_odr_type (tree type, bool insert)
gcc_assert (!vtable_slot || *vtable_slot == *slot);
vtable_slot = NULL;
}
#endif
}
else if (*vtable_slot)
val = *vtable_slot;

View File

@ -2599,7 +2599,7 @@ sem_item_optimizer::execute (void)
dump_cong_classes ();
process_cong_reduction ();
verify_classes ();
checking_verify_classes ();
if (dump_file)
fprintf (dump_file, "Dump after callgraph-based congruence reduction\n");
@ -2618,7 +2618,7 @@ sem_item_optimizer::execute (void)
process_cong_reduction ();
dump_cong_classes ();
verify_classes ();
checking_verify_classes ();
bool merged_p = merge_classes (prev_class_count);
if (dump_file && (dump_flags & TDF_DETAILS))
@ -2883,7 +2883,7 @@ sem_item_optimizer::subdivide_classes_by_equality (bool in_wpa)
}
}
verify_classes ();
checking_verify_classes ();
}
/* Subdivide classes by address references that members of the class
@ -2977,12 +2977,20 @@ sem_item_optimizer::subdivide_classes_by_sensitive_refs ()
return newly_created_classes;
}
/* Verify congruence classes if checking is enabled. */
/* Verify congruence classes, if checking is enabled. */
void
sem_item_optimizer::checking_verify_classes (void)
{
if (flag_checking)
verify_classes ();
}
/* Verify congruence classes. */
void
sem_item_optimizer::verify_classes (void)
{
#if ENABLE_CHECKING
for (hash_table <congruence_class_group_hash>::iterator it = m_classes.begin ();
it != m_classes.end (); ++it)
{
@ -2990,26 +2998,25 @@ sem_item_optimizer::verify_classes (void)
{
congruence_class *cls = (*it)->classes[i];
gcc_checking_assert (cls);
gcc_checking_assert (cls->members.length () > 0);
gcc_assert (cls);
gcc_assert (cls->members.length () > 0);
for (unsigned int j = 0; j < cls->members.length (); j++)
{
sem_item *item = cls->members[j];
gcc_checking_assert (item);
gcc_checking_assert (item->cls == cls);
gcc_assert (item);
gcc_assert (item->cls == cls);
for (unsigned k = 0; k < item->usages.length (); k++)
{
sem_usage_pair *usage = item->usages[k];
gcc_checking_assert (usage->item->index_in_class <
usage->item->cls->members.length ());
gcc_assert (usage->item->index_in_class <
usage->item->cls->members.length ());
}
}
}
}
#endif
}
/* Disposes split map traverse function. CLS_PTR is pointer to congruence
@ -3054,10 +3061,11 @@ sem_item_optimizer::traverse_congruence_split (congruence_class * const &cls,
add_item_to_class (tc, cls->members[i]);
}
#ifdef ENABLE_CHECKING
for (unsigned int i = 0; i < 2; i++)
gcc_checking_assert (newclasses[i]->members.length ());
#endif
if (flag_checking)
{
for (unsigned int i = 0; i < 2; i++)
gcc_assert (newclasses[i]->members.length ());
}
if (splitter_cls == cls)
optimizer->splitter_class_removed = true;
@ -3152,11 +3160,9 @@ sem_item_optimizer::do_congruence_step_for_index (congruence_class *cls,
else
b = *slot;
#if ENABLE_CHECKING
gcc_checking_assert (usage->item->cls);
gcc_checking_assert (usage->item->index_in_class <
usage->item->cls->members.length ());
#endif
bitmap_set_bit (b, usage->item->index_in_class);
}

View File

@ -479,6 +479,9 @@ public:
void dump (void);
/* Verify congruence classes if checking is enabled. */
void checking_verify_classes (void);
/* Verify congruence classes. */
void verify_classes (void);
/* Write IPA ICF summary for symbols. */

View File

@ -2964,10 +2964,12 @@ compute_inline_parameters (struct cgraph_node *node, bool early)
info->size = info->self_size;
info->stack_frame_offset = 0;
info->estimated_stack_size = info->estimated_self_stack_size;
#ifdef ENABLE_CHECKING
inline_update_overall_summary (node);
gcc_assert (info->time == info->self_time && info->size == info->self_size);
#endif
if (flag_checking)
{
inline_update_overall_summary (node);
gcc_assert (info->time == info->self_time
&& info->size == info->self_size);
}
pop_cfun ();
}

View File

@ -491,10 +491,9 @@ save_inline_function_body (struct cgraph_node *node)
first_clone->remove_symbol_and_inline_clones ();
first_clone = NULL;
}
#ifdef ENABLE_CHECKING
else
else if (flag_checking)
first_clone->verify ();
#endif
return first_clone;
}

View File

@ -1878,7 +1878,7 @@ inline_small_functions (void)
if (!edge->inline_failed || !edge->callee->analyzed)
continue;
#ifdef ENABLE_CHECKING
#if CHECKING_P
/* Be sure that caches are maintained consistent. */
sreal cached_badness = edge_badness (edge, false);
@ -2632,9 +2632,8 @@ early_inliner (function *fun)
if (ipa_node_params_sum)
return 0;
#ifdef ENABLE_CHECKING
node->verify ();
#endif
if (flag_checking)
node->verify ();
node->remove_all_references ();
/* Rebuild this reference because it dosn't depend on

View File

@ -299,10 +299,8 @@ estimate_edge_size (struct cgraph_edge *edge)
static inline int
estimate_edge_growth (struct cgraph_edge *edge)
{
#ifdef ENABLE_CHECKING
gcc_checking_assert (inline_edge_summary (edge)->call_stmt_size
|| !edge->callee->analyzed);
#endif
return (estimate_edge_size (edge)
- inline_edge_summary (edge)->call_stmt_size);
}

View File

@ -464,16 +464,15 @@ function_and_variable_visibility (bool whole_program)
what comdat group they are in when they won't be emitted in this TU. */
if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
{
#ifdef ENABLE_CHECKING
symtab_node *n;
for (n = node->same_comdat_group;
n != node;
n = n->same_comdat_group)
/* If at least one of same comdat group functions is external,
all of them have to be, otherwise it is a front-end bug. */
gcc_assert (DECL_EXTERNAL (n->decl));
#endif
if (flag_checking)
{
for (symtab_node *n = node->same_comdat_group;
n != node;
n = n->same_comdat_group)
/* If at least one of same comdat group functions is external,
all of them have to be, otherwise it is a front-end bug. */
gcc_assert (DECL_EXTERNAL (n->decl));
}
node->dissolve_same_comdat_group_list ();
}
gcc_assert ((!DECL_WEAK (node->decl)

View File

@ -319,12 +319,13 @@ symbol_table::remove_unreachable_nodes (FILE *file)
build_type_inheritance_graph ();
if (file)
fprintf (file, "\nReclaiming functions:");
#ifdef ENABLE_CHECKING
FOR_EACH_FUNCTION (node)
gcc_assert (!node->aux);
FOR_EACH_VARIABLE (vnode)
gcc_assert (!vnode->aux);
#endif
if (flag_checking)
{
FOR_EACH_FUNCTION (node)
gcc_assert (!node->aux);
FOR_EACH_VARIABLE (vnode)
gcc_assert (!vnode->aux);
}
/* Mark functions whose bodies are obviously needed.
This is mostly when they can be referenced externally. Inline clones
are special since their declarations are shared with master clone and thus
@ -678,9 +679,7 @@ symbol_table::remove_unreachable_nodes (FILE *file)
if (file)
fprintf (file, "\n");
#ifdef ENABLE_CHECKING
symtab_node::verify_symtab_nodes ();
#endif
symtab_node::checking_verify_symtab_nodes ();
/* If we removed something, perhaps profile could be improved. */
if (changed && optimize && inline_edge_summary_vec.exists ())
@ -1370,13 +1369,12 @@ ipa_single_use (void)
{
if (var->aux != BOTTOM)
{
#ifdef ENABLE_CHECKING
/* Not having the single user known means that the VAR is
unreachable. Either someone forgot to remove unreachable
variables or the reachability here is wrong. */
gcc_assert (single_user_map.get (var));
#endif
gcc_checking_assert (single_user_map.get (var));
if (dump_file)
{
fprintf (dump_file, "Variable %s/%i is used by single function\n",

View File

@ -26,7 +26,7 @@ along with GCC; see the file COPYING3. If not see
/* To provide consistency in naming, all IRA external variables,
functions, common typedefs start with prefix ira_. */
#ifdef ENABLE_CHECKING
#if CHECKING_P
#define ENABLE_IRA_CHECKING
#endif

View File

@ -5153,9 +5153,9 @@ ira (FILE *f)
df_remove_problem (df_live);
gcc_checking_assert (df_live == NULL);
#ifdef ENABLE_CHECKING
df->changeable_flags |= DF_VERIFY_SCHEDULED;
#endif
if (flag_checking)
df->changeable_flags |= DF_VERIFY_SCHEDULED;
df_analyze ();
init_reg_equiv ();

View File

@ -734,7 +734,5 @@ doloop_optimize_loops (void)
iv_analysis_done ();
#ifdef ENABLE_CHECKING
verify_loop_structure ();
#endif
checking_verify_loop_structure ();
}

View File

@ -104,10 +104,8 @@ loop_optimizer_init (unsigned flags)
/* Ensure that the dominators are computed, like flow_loops_find does. */
calculate_dominance_info (CDI_DOMINATORS);
#ifdef ENABLE_CHECKING
if (!needs_fixup)
verify_loop_structure ();
#endif
checking_verify_loop_structure ();
/* Clear all flags. */
if (recorded_exits)
@ -129,9 +127,7 @@ loop_optimizer_init (unsigned flags)
/* Dump loops. */
flow_loops_dump (dump_file, NULL, 1);
#ifdef ENABLE_CHECKING
verify_loop_structure ();
#endif
checking_verify_loop_structure ();
timevar_pop (TV_LOOP_INIT);
}
@ -323,9 +319,7 @@ fix_loop_structure (bitmap changed_bbs)
/* Apply flags to loops. */
apply_loop_flags (current_loops->state | record_exits);
#ifdef ENABLE_CHECKING
verify_loop_structure ();
#endif
checking_verify_loop_structure ();
timevar_pop (TV_LOOP_INIT);

View File

@ -2137,7 +2137,5 @@ move_loop_invariants (void)
invariant_table = NULL;
invariant_table_size = 0;
#ifdef ENABLE_CHECKING
verify_flow_info ();
#endif
checking_verify_flow_info ();
}

View File

@ -1591,15 +1591,13 @@ lra_assign (void)
bitmap_initialize (&all_spilled_pseudos, &reg_obstack);
create_live_range_start_chains ();
setup_live_pseudos_and_spill_after_risky_transforms (&all_spilled_pseudos);
#ifdef ENABLE_CHECKING
if (!flag_ipa_ra)
if (flag_checking && !flag_ipa_ra)
for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
if (lra_reg_info[i].nrefs != 0 && reg_renumber[i] >= 0
&& lra_reg_info[i].call_p
&& overlaps_hard_reg_set_p (call_used_reg_set,
PSEUDO_REGNO_MODE (i), reg_renumber[i]))
gcc_unreachable ();
#endif
/* Setup insns to process on the next constraint pass. */
bitmap_initialize (&changed_pseudo_bitmap, &reg_obstack);
init_live_reload_and_inheritance_pseudos ();

View File

@ -4455,8 +4455,7 @@ lra_constraints (bool first_p)
bitmap_clear (&equiv_insn_bitmap);
/* If we used a new hard regno, changed_p should be true because the
hard reg is assigned to a new pseudo. */
#ifdef ENABLE_CHECKING
if (! changed_p)
if (flag_checking && !changed_p)
{
for (i = FIRST_PSEUDO_REGISTER; i < new_regno_start; i++)
if (lra_reg_info[i].nrefs != 0
@ -4468,7 +4467,6 @@ lra_constraints (bool first_p)
lra_assert (df_regs_ever_live_p (hard_regno + j));
}
}
#endif
return changed_p;
}

View File

@ -1436,11 +1436,11 @@ lra_eliminate (bool final_p, bool first_p)
bitmap_initialize (&insns_with_changed_offsets, &reg_obstack);
if (final_p)
{
#ifdef ENABLE_CHECKING
update_reg_eliminate (&insns_with_changed_offsets);
if (! bitmap_empty_p (&insns_with_changed_offsets))
gcc_unreachable ();
#endif
if (flag_checking)
{
update_reg_eliminate (&insns_with_changed_offsets);
gcc_assert (bitmap_empty_p (&insns_with_changed_offsets));
}
/* We change eliminable hard registers in insns so we should do
this for all insns containing any eliminable hard
register. */

View File

@ -91,12 +91,10 @@ struct lra_reg
/* True if the pseudo should not be assigned to a stack register. */
bool no_stack_p;
#endif
#ifdef ENABLE_CHECKING
/* True if the pseudo crosses a call. It is setup in lra-lives.c
and used to check that the pseudo crossing a call did not get a
call used hard register. */
bool call_p;
#endif
/* Number of references and execution frequencies of the register in
*non-debug* insns. */
int nrefs, freq;

View File

@ -590,9 +590,7 @@ check_pseudos_live_through_calls (int regno)
for (hr = 0; hr < FIRST_PSEUDO_REGISTER; hr++)
if (HARD_REGNO_CALL_PART_CLOBBERED (hr, PSEUDO_REGNO_MODE (regno)))
SET_HARD_REG_BIT (lra_reg_info[regno].conflict_hard_regs, hr);
#ifdef ENABLE_CHECKING
lra_reg_info[regno].call_p = true;
#endif
if (! sparseset_bit_p (pseudos_live_through_setjumps, regno))
return;
sparseset_clear_bit (pseudos_live_through_setjumps, regno);
@ -1229,9 +1227,7 @@ lra_create_live_ranges_1 (bool all_p, bool dead_insn_p)
lra_reg_info[i].biggest_mode = GET_MODE (regno_reg_rtx[i]);
else
lra_reg_info[i].biggest_mode = VOIDmode;
#ifdef ENABLE_CHECKING
lra_reg_info[i].call_p = false;
#endif
if (i >= FIRST_PSEUDO_REGISTER
&& lra_reg_info[i].nrefs != 0)
{

View File

@ -578,10 +578,8 @@ create_remat_bb_data (void)
last_basic_block_for_fn (cfun));
FOR_ALL_BB_FN (bb, cfun)
{
#ifdef ENABLE_CHECKING
if (bb->index < 0 || bb->index >= last_basic_block_for_fn (cfun))
abort ();
#endif
gcc_checking_assert (bb->index >= 0
&& bb->index < last_basic_block_for_fn (cfun));
bb_info = get_remat_bb_data (bb);
bb_info->bb = bb;
bitmap_initialize (&bb_info->changed_regs, &reg_obstack);

View File

@ -1199,30 +1199,22 @@ lra_update_insn_recog_data (rtx_insn *insn)
decode_asm_operands (PATTERN (insn), NULL,
data->operand_loc,
constraints, operand_mode, NULL);
#ifdef ENABLE_CHECKING
{
int i;
for (i = 0; i < nop; i++)
if (flag_checking)
for (int i = 0; i < nop; i++)
lra_assert
(insn_static_data->operand[i].mode == operand_mode[i]
&& insn_static_data->operand[i].constraint == constraints[i]
&& ! insn_static_data->operand[i].is_operator);
}
#endif
}
#ifdef ENABLE_CHECKING
{
int i;
for (i = 0; i < insn_static_data->n_operands; i++)
if (flag_checking)
for (int i = 0; i < insn_static_data->n_operands; i++)
lra_assert
(insn_static_data->operand[i].type
== (insn_static_data->operand[i].constraint[0] == '=' ? OP_OUT
: insn_static_data->operand[i].constraint[0] == '+' ? OP_INOUT
: OP_IN));
}
#endif
}
else
{
@ -2004,8 +1996,6 @@ restore_scratches (void)
#ifdef ENABLE_CHECKING
/* Function checks RTL for correctness. If FINAL_P is true, it is
done at the end of LRA and the check is more rigorous. */
static void
@ -2024,9 +2014,7 @@ check_rtl (bool final_p)
{
if (final_p)
{
#ifdef ENABLED_CHECKING
extract_constrain_insn (insn);
#endif
continue;
}
/* LRA code is based on assumption that all addresses can be
@ -2039,7 +2027,6 @@ check_rtl (bool final_p)
fatal_insn_not_found (insn);
}
}
#endif /* #ifdef ENABLE_CHECKING */
/* Determine if the current function has an exception receiver block
that reaches the exit block via non-exceptional edges */
@ -2233,10 +2220,9 @@ lra (FILE *f)
init_insn_recog_data ();
#ifdef ENABLE_CHECKING
/* Some quick check on RTL generated by previous passes. */
check_rtl (false);
#endif
if (flag_checking)
check_rtl (false);
lra_in_progress = 1;
@ -2437,9 +2423,8 @@ lra (FILE *f)
by this, so unshare everything here. */
unshare_all_rtl_again (get_insns ());
#ifdef ENABLE_CHECKING
check_rtl (true);
#endif
if (flag_checking)
check_rtl (true);
timevar_pop (TV_LRA);
}

View File

@ -1560,10 +1560,11 @@ input_cgraph_1 (struct lto_file_decl_data *file_data,
lto_input_toplevel_asms (file_data, order_base);
/* AUX pointers should be all non-zero for function nodes read from the stream. */
#ifdef ENABLE_CHECKING
FOR_EACH_VEC_ELT (nodes, i, node)
gcc_assert (node->aux || !is_a <cgraph_node *> (node));
#endif
if (flag_checking)
{
FOR_EACH_VEC_ELT (nodes, i, node)
gcc_assert (node->aux || !is_a <cgraph_node *> (node));
}
FOR_EACH_VEC_ELT (nodes, i, node)
{
int ref;

View File

@ -607,17 +607,12 @@ DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
std::swap (sccstack[first + i],
sccstack[first + entry_start + i]);
if (scc_entry_len == 1)
; /* We already sorted SCC deterministically in hash_scc. */
else
/* Check that we have only one SCC.
Naturally we may have conflicts if hash function is not
strong enough. Lets see how far this gets. */
{
#ifdef ENABLE_CHECKING
gcc_unreachable ();
#endif
}
/* We already sorted SCC deterministically in hash_scc. */
/* Check that we have only one SCC.
Naturally we may have conflicts if hash function is not
strong enough. Lets see how far this gets. */
gcc_checking_assert (scc_entry_len == 1);
}
/* Write LTO_tree_scc. */
@ -2277,12 +2272,13 @@ void
lto_output (void)
{
struct lto_out_decl_state *decl_state;
#ifdef ENABLE_CHECKING
bitmap output = lto_bitmap_alloc ();
#endif
bitmap output = NULL;
int i, n_nodes;
lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
if (flag_checking)
output = lto_bitmap_alloc ();
/* Initialize the streamer. */
lto_streamer_init ();
@ -2296,10 +2292,11 @@ lto_output (void)
if (lto_symtab_encoder_encode_body_p (encoder, node)
&& !node->alias)
{
#ifdef ENABLE_CHECKING
gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
bitmap_set_bit (output, DECL_UID (node->decl));
#endif
if (flag_checking)
{
gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
bitmap_set_bit (output, DECL_UID (node->decl));
}
decl_state = lto_new_out_decl_state ();
lto_push_out_decl_state (decl_state);
if (gimple_has_body_p (node->decl) || !flag_wpa
@ -2326,10 +2323,11 @@ lto_output (void)
&& !node->alias)
{
timevar_push (TV_IPA_LTO_CTORS_OUT);
#ifdef ENABLE_CHECKING
gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
bitmap_set_bit (output, DECL_UID (node->decl));
#endif
if (flag_checking)
{
gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
bitmap_set_bit (output, DECL_UID (node->decl));
}
decl_state = lto_new_out_decl_state ();
lto_push_out_decl_state (decl_state);
if (DECL_INITIAL (node->decl) != error_mark_node
@ -2353,7 +2351,7 @@ lto_output (void)
output_offload_tables ();
#ifdef ENABLE_CHECKING
#if CHECKING_P
lto_bitmap_free (output);
#endif
}

View File

@ -297,13 +297,12 @@ static hash_table<tree_hash_entry> *tree_htab;
void
lto_streamer_init (void)
{
#ifdef ENABLE_CHECKING
/* Check that all the TS_* handled by the reader and writer routines
match exactly the structures defined in treestruct.def. When a
new TS_* astructure is added, the streamer should be updated to
handle it. */
streamer_check_handled_ts_structures ();
#endif
if (flag_checking)
streamer_check_handled_ts_structures ();
#ifdef LTO_STREAMER_DEBUG
tree_htab = new hash_table<tree_hash_entry> (31);

View File

@ -1,3 +1,12 @@
2015-10-27 Mikhail Maltsev <maltsevm@gmail.com>
* lto.c (unify_scc): Use flag_checking and remove ENABLE_CHECKING
conditionals.
(lto_fixup_state): Likewise.
(do_whole_program_analysis): Use
symtab_node::checking_verify_symtab_nodes and remove ENABLE_CHECKING
conditionals.
2015-10-13 Jakub Jelinek <jakub@redhat.com>
* lto-lang.c (DEF_FUNCTION_TYPE_9, DEF_FUNCTION_TYPE_10,

View File

@ -1586,19 +1586,18 @@ unify_scc (struct data_in *data_in, unsigned from,
num_sccs_merged++;
total_scc_size_merged += len;
#ifdef ENABLE_CHECKING
for (unsigned i = 0; i < len; ++i)
{
tree t = map[2*i+1];
enum tree_code code = TREE_CODE (t);
/* IDENTIFIER_NODEs should be singletons and are merged by the
streamer. The others should be singletons, too, and we
should not merge them in any way. */
gcc_assert (code != TRANSLATION_UNIT_DECL
&& code != IDENTIFIER_NODE
&& !streamer_handle_as_builtin_p (t));
}
#endif
if (flag_checking)
for (unsigned i = 0; i < len; ++i)
{
tree t = map[2*i+1];
enum tree_code code = TREE_CODE (t);
/* IDENTIFIER_NODEs should be singletons and are merged by the
streamer. The others should be singletons, too, and we
should not merge them in any way. */
gcc_assert (code != TRANSLATION_UNIT_DECL
&& code != IDENTIFIER_NODE
&& !streamer_handle_as_builtin_p (t));
}
/* Fixup the streamer cache with the prevailing nodes according
to the tree node mapping computed by compare_tree_sccs. */
@ -2636,10 +2635,8 @@ lto_fixup_state (struct lto_in_decl_state *state)
for (i = 0; i < vec_safe_length (trees); i++)
{
tree t = (*trees)[i];
#ifdef ENABLE_CHECKING
if (TYPE_P (t))
if (flag_checking && TYPE_P (t))
verify_type (t);
#endif
if (VAR_OR_FUNCTION_DECL_P (t)
&& (TREE_PUBLIC (t) || DECL_EXTERNAL (t)))
(*trees)[i] = lto_symtab_prevailing_decl (t);
@ -3101,9 +3098,8 @@ do_whole_program_analysis (void)
fprintf (symtab->dump_file, "Optimized ");
symtab_node::dump_table (symtab->dump_file);
}
#ifdef ENABLE_CHECKING
symtab_node::verify_symtab_nodes ();
#endif
symtab_node::checking_verify_symtab_nodes ();
bitmap_obstack_release (NULL);
/* We are about to launch the final LTRANS phase, stop the WPA timer. */

View File

@ -3127,14 +3127,14 @@ scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
{
TYPE_FIELDS (ctx->record_type)
= nreverse (TYPE_FIELDS (ctx->record_type));
#ifdef ENABLE_CHECKING
tree field;
unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
for (field = TYPE_FIELDS (ctx->record_type);
field;
field = DECL_CHAIN (field))
gcc_assert (DECL_ALIGN (field) == align);
#endif
if (flag_checking)
{
unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
for (tree field = TYPE_FIELDS (ctx->record_type);
field;
field = DECL_CHAIN (field))
gcc_assert (DECL_ALIGN (field) == align);
}
layout_type (ctx->record_type);
if (offloaded)
fixup_child_record_type (ctx);
@ -6742,10 +6742,8 @@ expand_omp_taskreg (struct omp_region *region)
}
if (gimple_in_ssa_p (cfun))
update_ssa (TODO_update_ssa);
#ifdef ENABLE_CHECKING
if (!loops_state_satisfies_p (LOOPS_NEED_FIXUP))
if (flag_checking && !loops_state_satisfies_p (LOOPS_NEED_FIXUP))
verify_loop_structure ();
#endif
pop_cfun ();
}
@ -11562,10 +11560,8 @@ expand_omp_target (struct omp_region *region)
if (changed)
cleanup_tree_cfg ();
}
#ifdef ENABLE_CHECKING
if (!loops_state_satisfies_p (LOOPS_NEED_FIXUP))
if (flag_checking && !loops_state_satisfies_p (LOOPS_NEED_FIXUP))
verify_loop_structure ();
#endif
pop_cfun ();
}
@ -12102,10 +12098,8 @@ execute_expand_omp (void)
expand_omp (root_omp_region);
#ifdef ENABLE_CHECKING
if (!loops_state_satisfies_p (LOOPS_NEED_FIXUP))
if (flag_checking && !loops_state_satisfies_p (LOOPS_NEED_FIXUP))
verify_loop_structure ();
#endif
cleanup_tree_cfg ();
free_omp_regions ();
@ -14260,7 +14254,7 @@ lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
default:
break;
case OMP_CLAUSE_MAP:
#ifdef ENABLE_CHECKING
#if CHECKING_P
/* First check what we're prepared to handle in the following. */
switch (OMP_CLAUSE_MAP_KIND (c))
{

View File

@ -1952,9 +1952,8 @@ execute_function_todo (function *fn, void *data)
gcc_assert (dom_info_state (fn, CDI_POST_DOMINATORS) == DOM_NONE);
/* If we've seen errors do not bother running any verifiers. */
if (!seen_error ())
if (flag_checking && !seen_error ())
{
#if defined ENABLE_CHECKING
dom_state pre_verify_state = dom_info_state (fn, CDI_DOMINATORS);
dom_state pre_verify_pstate = dom_info_state (fn, CDI_POST_DOMINATORS);
@ -1988,7 +1987,6 @@ execute_function_todo (function *fn, void *data)
/* Make sure verifiers don't change dominator state. */
gcc_assert (dom_info_state (fn, CDI_DOMINATORS) == pre_verify_state);
gcc_assert (dom_info_state (fn, CDI_POST_DOMINATORS) == pre_verify_pstate);
#endif
}
fn->last_verified = flags & TODO_verify_all;
@ -2008,11 +2006,10 @@ execute_function_todo (function *fn, void *data)
static void
execute_todo (unsigned int flags)
{
#if defined ENABLE_CHECKING
if (cfun
if (flag_checking
&& cfun
&& need_ssa_update_p (cfun))
gcc_assert (flags & TODO_update_ssa_any);
#endif
timevar_push (TV_TODO);
@ -2076,14 +2073,12 @@ clear_last_verified (function *fn, void *data ATTRIBUTE_UNUSED)
/* Helper function. Verify that the properties has been turn into the
properties expected by the pass. */
#ifdef ENABLE_CHECKING
static void
static void DEBUG_FUNCTION
verify_curr_properties (function *fn, void *data)
{
unsigned int props = (size_t)data;
gcc_assert ((fn->curr_properties & props) == props);
}
#endif
/* Initialize pass dump file. */
/* This is non-static so that the plugins can use it. */
@ -2331,10 +2326,9 @@ execute_one_pass (opt_pass *pass)
/* Run pre-pass verification. */
execute_todo (pass->todo_flags_start);
#ifdef ENABLE_CHECKING
do_per_function (verify_curr_properties,
(void *)(size_t)pass->properties_required);
#endif
if (flag_checking)
do_per_function (verify_curr_properties,
(void *)(size_t)pass->properties_required);
/* If a timevar is present, start it. */
if (pass->tv_id != TV_NONE)

View File

@ -2205,8 +2205,6 @@ tree_bb_level_predictions (void)
}
}
#ifdef ENABLE_CHECKING
/* Callback for hash_map::traverse, asserts that the pointer map is
empty. */
@ -2217,7 +2215,6 @@ assert_is_empty (const_basic_block const &, edge_prediction *const &value,
gcc_assert (!value);
return false;
}
#endif
/* Predict branch probabilities and estimate profile for basic block BB. */
@ -2352,9 +2349,9 @@ tree_estimate_probability (void)
FOR_EACH_BB_FN (bb, cfun)
combine_predictions_for_bb (bb);
#ifdef ENABLE_CHECKING
bb_predictions->traverse<void *, assert_is_empty> (NULL);
#endif
if (flag_checking)
bb_predictions->traverse<void *, assert_is_empty> (NULL);
delete bb_predictions;
bb_predictions = NULL;
@ -2545,11 +2542,10 @@ propagate_freq (basic_block head, bitmap tovisit)
/* Compute frequency of basic block. */
if (bb != head)
{
#ifdef ENABLE_CHECKING
FOR_EACH_EDGE (e, ei, bb->preds)
gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
|| (e->flags & EDGE_DFS_BACK));
#endif
if (flag_checking)
FOR_EACH_EDGE (e, ei, bb->preds)
gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
|| (e->flags & EDGE_DFS_BACK));
FOR_EACH_EDGE (e, ei, bb->preds)
if (EDGE_INFO (e)->back_edge)

View File

@ -625,10 +625,9 @@ pp_format (pretty_printer *pp, text_info *text)
*formatters[argno] = XOBFINISH (&buffer->chunk_obstack, const char *);
}
#ifdef ENABLE_CHECKING
for (; argno < PP_NL_ARGMAX; argno++)
gcc_assert (!formatters[argno]);
#endif
if (CHECKING_P)
for (; argno < PP_NL_ARGMAX; argno++)
gcc_assert (!formatters[argno]);
/* Revert to normal obstack and wrapping mode. */
buffer->obstack = &buffer->formatted_obstack;

View File

@ -1808,15 +1808,13 @@ real_to_decimal_for_mode (char *str, const REAL_VALUE_TYPE *r_orig,
/* Append the exponent. */
sprintf (last, "e%+d", dec_exp);
#ifdef ENABLE_CHECKING
/* Verify that we can read the original value back in. */
if (mode != VOIDmode)
if (flag_checking && mode != VOIDmode)
{
real_from_string (&r, str);
real_convert (&r, mode, &r);
gcc_assert (real_identical (&r, r_orig));
}
#endif
}
/* Likewise, except always uses round-to-nearest. */

View File

@ -2975,9 +2975,7 @@ split_all_insns (void)
if (changed)
find_many_sub_basic_blocks (blocks);
#ifdef ENABLE_CHECKING
verify_flow_info ();
#endif
checking_verify_flow_info ();
sbitmap_free (blocks);
}

View File

@ -100,9 +100,7 @@ static bool replace_oldest_value_addr (rtx *, enum reg_class,
static bool replace_oldest_value_mem (rtx, rtx_insn *, struct value_data *);
static bool copyprop_hardreg_forward_1 (basic_block, struct value_data *);
extern void debug_value_data (struct value_data *);
#ifdef ENABLE_CHECKING
static void validate_value_data (struct value_data *);
#endif
/* Free all queued updates for DEBUG_INSNs that change some reg to
register REGNO. */
@ -150,9 +148,8 @@ kill_value_one_regno (unsigned int regno, struct value_data *vd)
if (vd->e[regno].debug_insn_changes)
free_debug_insn_changes (vd, regno);
#ifdef ENABLE_CHECKING
validate_value_data (vd);
#endif
if (flag_checking)
validate_value_data (vd);
}
/* Kill the value in register REGNO for NREGS, and any other registers
@ -365,9 +362,8 @@ copy_value (rtx dest, rtx src, struct value_data *vd)
continue;
vd->e[i].next_regno = dr;
#ifdef ENABLE_CHECKING
validate_value_data (vd);
#endif
if (flag_checking)
validate_value_data (vd);
}
/* Return true if a mode change from ORIG to NEW is allowed for REGNO. */
@ -1141,7 +1137,6 @@ copyprop_hardreg_forward_bb_without_debug_insn (basic_block bb)
skip_debug_insn_p = false;
}
#ifdef ENABLE_CHECKING
static void
validate_value_data (struct value_data *vd)
{
@ -1187,7 +1182,7 @@ validate_value_data (struct value_data *vd)
i, GET_MODE_NAME (vd->e[i].mode), vd->e[i].oldest_regno,
vd->e[i].next_regno);
}
#endif
namespace {

View File

@ -85,7 +85,7 @@ a register with any other reload. */
#define REG_OK_STRICT
/* We do not enable this with ENABLE_CHECKING, since it is awfully slow. */
/* We do not enable this with CHECKING_P, since it is awfully slow. */
#undef DEBUG_RELOAD
#include "config.h"

View File

@ -47,12 +47,6 @@ along with GCC; see the file COPYING3. If not see
#ifdef INSN_SCHEDULING
#ifdef ENABLE_CHECKING
#define CHECK (true)
#else
#define CHECK (false)
#endif
/* Holds current parameters for the dependency analyzer. */
struct sched_deps_info_def *sched_deps_info;
@ -505,9 +499,8 @@ static enum DEPS_ADJUST_RESULT maybe_add_or_update_dep_1 (dep_t, bool,
rtx, rtx);
static enum DEPS_ADJUST_RESULT add_or_update_dep_1 (dep_t, bool, rtx, rtx);
#ifdef ENABLE_CHECKING
static void check_dep (dep_t, bool);
#endif
/* Return nonzero if a load of the memory reference MEM can cause a trap. */
@ -1228,9 +1221,8 @@ add_or_update_dep_1 (dep_t new_dep, bool resolved_p,
gcc_assert (INSN_P (DEP_PRO (new_dep)) && INSN_P (DEP_CON (new_dep))
&& DEP_PRO (new_dep) != DEP_CON (new_dep));
#ifdef ENABLE_CHECKING
check_dep (new_dep, mem1 != NULL);
#endif
if (flag_checking)
check_dep (new_dep, mem1 != NULL);
if (true_dependency_cache != NULL)
{
@ -1348,9 +1340,8 @@ sd_add_dep (dep_t dep, bool resolved_p)
add_to_deps_list (DEP_NODE_BACK (n), con_back_deps);
#ifdef ENABLE_CHECKING
check_dep (dep, false);
#endif
if (flag_checking)
check_dep (dep, false);
add_to_deps_list (DEP_NODE_FORW (n), pro_forw_deps);
@ -4515,7 +4506,6 @@ debug_ds (ds_t s)
fprintf (stderr, "\n");
}
#ifdef ENABLE_CHECKING
/* Verify that dependence type and status are consistent.
If RELAXED_P is true, then skip dep_weakness checks. */
static void
@ -4600,7 +4590,6 @@ check_dep (dep_t dep, bool relaxed_p)
gcc_assert (ds & BEGIN_CONTROL);
}
}
#endif /* ENABLE_CHECKING */
/* The following code discovers opportunities to switch a memory reference
and an increment by modifying the address. We ensure that this is done

View File

@ -954,7 +954,6 @@ return_regset_to_pool (regset rs)
regset_pool.v[regset_pool.n++] = rs;
}
#ifdef ENABLE_CHECKING
/* This is used as a qsort callback for sorting regset pool stacks.
X and XX are addresses of two regsets. They are never equal. */
static int
@ -968,44 +967,42 @@ cmp_v_in_regset_pool (const void *x, const void *xx)
return -1;
gcc_unreachable ();
}
#endif
/* Free the regset pool possibly checking for memory leaks. */
/* Free the regset pool possibly checking for memory leaks. */
void
free_regset_pool (void)
{
#ifdef ENABLE_CHECKING
{
regset *v = regset_pool.v;
int i = 0;
int n = regset_pool.n;
if (flag_checking)
{
regset *v = regset_pool.v;
int i = 0;
int n = regset_pool.n;
regset *vv = regset_pool.vv;
int ii = 0;
int nn = regset_pool.nn;
regset *vv = regset_pool.vv;
int ii = 0;
int nn = regset_pool.nn;
int diff = 0;
int diff = 0;
gcc_assert (n <= nn);
gcc_assert (n <= nn);
/* Sort both vectors so it will be possible to compare them. */
qsort (v, n, sizeof (*v), cmp_v_in_regset_pool);
qsort (vv, nn, sizeof (*vv), cmp_v_in_regset_pool);
/* Sort both vectors so it will be possible to compare them. */
qsort (v, n, sizeof (*v), cmp_v_in_regset_pool);
qsort (vv, nn, sizeof (*vv), cmp_v_in_regset_pool);
while (ii < nn)
{
if (v[i] == vv[ii])
i++;
else
/* VV[II] was lost. */
diff++;
while (ii < nn)
{
if (v[i] == vv[ii])
i++;
else
/* VV[II] was lost. */
diff++;
ii++;
}
ii++;
}
gcc_assert (diff == regset_pool.diff);
}
#endif
gcc_assert (diff == regset_pool.diff);
}
/* If not true - we have a memory leak. */
gcc_assert (regset_pool.diff == 0);
@ -3623,7 +3620,6 @@ insn_is_the_only_one_in_bb_p (insn_t insn)
return sel_bb_head_p (insn) && sel_bb_end_p (insn);
}
#ifdef ENABLE_CHECKING
/* Check that the region we're scheduling still has at most one
backedge. */
static void
@ -3644,7 +3640,6 @@ verify_backedges (void)
gcc_assert (n <= 1);
}
}
#endif
/* Functions to work with control flow. */
@ -3889,10 +3884,12 @@ tidy_control_flow (basic_block xbb, bool full_tidying)
sel_recompute_toporder ();
}
#ifdef ENABLE_CHECKING
verify_backedges ();
verify_dominators (CDI_DOMINATORS);
#endif
/* TODO: use separate flag for CFG checking. */
if (flag_checking)
{
verify_backedges ();
verify_dominators (CDI_DOMINATORS);
}
return changed;
}

View File

@ -378,10 +378,8 @@ struct moveop_static_params
they are to be removed. */
int uid;
#ifdef ENABLE_CHECKING
/* This is initialized to the insn on which the driver stopped its traversal. */
insn_t failed_insn;
#endif
/* True if we scheduled an insn with different register. */
bool was_renamed;
@ -1655,9 +1653,8 @@ find_best_reg_for_expr (expr_t expr, blist_t bnds, bool *is_orig_reg_p)
collect_unavailable_regs_from_bnds (expr, bnds, used_regs, &reg_rename_data,
&original_insns);
#ifdef ENABLE_CHECKING
/* If after reload, make sure we're working with hard regs here. */
if (reload_completed)
if (flag_checking && reload_completed)
{
reg_set_iterator rsi;
unsigned i;
@ -1665,7 +1662,6 @@ find_best_reg_for_expr (expr_t expr, blist_t bnds, bool *is_orig_reg_p)
EXECUTE_IF_SET_IN_REG_SET (used_regs, FIRST_PSEUDO_REGISTER, i, rsi)
gcc_unreachable ();
}
#endif
if (EXPR_SEPARABLE_P (expr))
{
@ -3593,7 +3589,6 @@ vinsn_vec_has_expr_p (vinsn_vec_t vinsn_vec, expr_t expr)
return false;
}
#ifdef ENABLE_CHECKING
/* Return true if either of expressions from ORIG_OPS can be blocked
by previously created bookkeeping code. STATIC_PARAMS points to static
parameters of move_op. */
@ -3635,7 +3630,6 @@ av_set_could_be_blocked_by_bookkeeping_p (av_set_t orig_ops, void *static_params
return false;
}
#endif
/* Clear VINSN_VEC and detach vinsns. */
static void
@ -4889,11 +4883,10 @@ move_cond_jump (rtx_insn *insn, bnd_t bnd)
block_bnd = BLOCK_FOR_INSN (BND_TO (bnd));
prev = BND_TO (bnd);
#ifdef ENABLE_CHECKING
/* Moving of jump should not cross any other jumps or beginnings of new
basic blocks. The only exception is when we move a jump through
mutually exclusive insns along fallthru edges. */
if (block_from != block_bnd)
if (flag_checking && block_from != block_bnd)
{
bb = block_from;
for (link = PREV_INSN (insn); link != PREV_INSN (prev);
@ -4908,7 +4901,6 @@ move_cond_jump (rtx_insn *insn, bnd_t bnd)
}
}
}
#endif
/* Jump is moved to the boundary. */
next = PREV_INSN (insn);
@ -6205,9 +6197,7 @@ move_op_orig_expr_not_found (insn_t insn, av_set_t orig_ops ATTRIBUTE_UNUSED,
{
moveop_static_params_p sparams = (moveop_static_params_p) static_params;
#ifdef ENABLE_CHECKING
sparams->failed_insn = insn;
#endif
/* If we're scheduling separate expr, in order to generate correct code
we need to stop the search at bookkeeping code generated with the
@ -6380,7 +6370,6 @@ code_motion_process_successors (insn_t insn, av_set_t orig_ops,
}
}
#ifdef ENABLE_CHECKING
/* Here, RES==1 if original expr was found at least for one of the
successors. After the loop, RES may happen to have zero value
only if at some point the expr searched is present in av_set, but is
@ -6388,12 +6377,10 @@ code_motion_process_successors (insn_t insn, av_set_t orig_ops,
The exception is when the original operation is blocked by
bookkeeping generated for another fence or for another path in current
move_op. */
gcc_assert (res == 1
|| (res == 0
&& av_set_could_be_blocked_by_bookkeeping_p (orig_ops,
static_params))
|| res == -1);
#endif
gcc_checking_assert (res == 1
|| (res == 0
&& av_set_could_be_blocked_by_bookkeeping_p (orig_ops, static_params))
|| res == -1);
/* Merge data, clean up, etc. */
if (res != -1 && code_motion_path_driver_info->after_merge_succs)
@ -6695,9 +6682,7 @@ move_op (insn_t insn, av_set_t orig_ops, expr_t expr_vliw,
sparams.dest = dest;
sparams.c_expr = c_expr;
sparams.uid = INSN_UID (EXPR_INSN_RTX (expr_vliw));
#ifdef ENABLE_CHECKING
sparams.failed_insn = NULL;
#endif
sparams.was_renamed = false;
lparams.e1 = NULL;

View File

@ -344,9 +344,7 @@ first_readonly_imm_use (imm_use_iterator *imm, tree var)
{
imm->end_p = &(SSA_NAME_IMM_USE_NODE (var));
imm->imm_use = imm->end_p->next;
#ifdef ENABLE_CHECKING
imm->iter_node.next = imm->imm_use->next;
#endif
if (end_readonly_imm_use_p (imm))
return NULL_USE_OPERAND_P;
return imm->imm_use;
@ -358,14 +356,15 @@ next_readonly_imm_use (imm_use_iterator *imm)
{
use_operand_p old = imm->imm_use;
#ifdef ENABLE_CHECKING
/* If this assertion fails, it indicates the 'next' pointer has changed
since the last bump. This indicates that the list is being modified
via stmt changes, or SET_USE, or somesuch thing, and you need to be
using the SAFE version of the iterator. */
gcc_assert (imm->iter_node.next == old->next);
imm->iter_node.next = old->next->next;
#endif
if (flag_checking)
{
gcc_assert (imm->iter_node.next == old->next);
imm->iter_node.next = old->next->next;
}
imm->imm_use = old->next;
if (end_readonly_imm_use_p (imm))

View File

@ -644,9 +644,6 @@ compute_store_table (void)
{
int ret;
basic_block bb;
#ifdef ENABLE_CHECKING
unsigned regno;
#endif
rtx_insn *insn;
rtx_insn *tmp;
df_ref def;
@ -692,11 +689,12 @@ compute_store_table (void)
last_set_in[DF_REF_REGNO (def)] = 0;
}
#ifdef ENABLE_CHECKING
/* last_set_in should now be all-zero. */
for (regno = 0; regno < max_gcse_regno; regno++)
gcc_assert (!last_set_in[regno]);
#endif
if (flag_checking)
{
/* last_set_in should now be all-zero. */
for (unsigned regno = 0; regno < max_gcse_regno; regno++)
gcc_assert (!last_set_in[regno]);
}
/* Clear temporary marks. */
for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))

View File

@ -39,14 +39,12 @@ public:
function_summary (symbol_table *symtab, bool ggc = false): m_ggc (ggc),
m_map (13, ggc), m_insertion_enabled (true), m_symtab (symtab)
{
#ifdef ENABLE_CHECKING
cgraph_node *node;
FOR_EACH_FUNCTION (node)
{
gcc_checking_assert (node->summary_uid > 0);
}
#endif
if (flag_checking)
{
cgraph_node *node;
FOR_EACH_FUNCTION (node)
gcc_assert (node->summary_uid > 0);
}
m_symtab_insertion_hook =
symtab->add_cgraph_insertion_hook

View File

@ -52,21 +52,21 @@
#include "tm.h"
#include "hard-reg-set.h"
#ifdef ENABLE_CHECKING
#if CHECKING_P
struct cumulative_args_t { void *magic; void *p; };
#else /* !ENABLE_CHECKING */
#else /* !CHECKING_P */
/* When using a GCC build compiler, we could use
__attribute__((transparent_union)) to get cumulative_args_t function
arguments passed like scalars where the ABI would mandate a less
efficient way of argument passing otherwise. However, that would come
at the cost of less type-safe !ENABLE_CHECKING compilation. */
at the cost of less type-safe !CHECKING_P compilation. */
union cumulative_args_t { void *p; };
#endif /* !ENABLE_CHECKING */
#endif /* !CHECKING_P */
/* Types used by the record_gcc_switches() target function. */
enum print_switch_type
@ -200,9 +200,9 @@ extern struct gcc_target targetm;
static inline CUMULATIVE_ARGS *
get_cumulative_args (cumulative_args_t arg)
{
#ifdef ENABLE_CHECKING
#if CHECKING_P
gcc_assert (arg.magic == CUMULATIVE_ARGS_MAGIC);
#endif /* ENABLE_CHECKING */
#endif /* CHECKING_P */
return (CUMULATIVE_ARGS *) arg.p;
}
@ -211,9 +211,9 @@ pack_cumulative_args (CUMULATIVE_ARGS *arg)
{
cumulative_args_t ret;
#ifdef ENABLE_CHECKING
#if CHECKING_P
ret.magic = CUMULATIVE_ARGS_MAGIC;
#endif /* ENABLE_CHECKING */
#endif /* CHECKING_P */
ret.p = (void *) arg;
return ret;
}

View File

@ -22,6 +22,7 @@ along with GCC; see the file COPYING3. If not see
#include "system.h"
#include "coretypes.h"
#include "timevar.h"
#include "options.h"
#ifndef HAVE_CLOCK_T
typedef int clock_t;
@ -727,10 +728,13 @@ timer::print (FILE *fp)
#endif
fprintf (fp, "%8u kB\n", (unsigned) (total->ggc_mem >> 10));
#ifdef ENABLE_CHECKING
fprintf (fp, "Extra diagnostic checks enabled; compiler may run slowly.\n");
fprintf (fp, "Configure with --enable-checking=release to disable checks.\n");
#endif
if (flag_checking)
{
fprintf (fp, "Extra diagnostic checks enabled; "
"compiler may run slowly.\n");
fprintf (fp, "Configure with --enable-checking=release "
"to disable checks.\n");
}
#ifndef ENABLE_ASSERT_CHECKING
fprintf (fp, "Internal checks disabled; compiler is not suited for release.\n");
fprintf (fp, "Configure with --enable-checking=release to enable checks.\n");

View File

@ -5341,9 +5341,7 @@ ipa_tm_execute (void)
enum availability a;
unsigned int i;
#ifdef ENABLE_CHECKING
cgraph_node::verify_cgraph_nodes ();
#endif
cgraph_node::checking_verify_cgraph_nodes ();
bitmap_obstack_initialize (&tm_obstack);
initialize_original_copy_tables ();
@ -5589,9 +5587,7 @@ ipa_tm_execute (void)
FOR_EACH_FUNCTION (node)
node->aux = NULL;
#ifdef ENABLE_CHECKING
cgraph_node::verify_cgraph_nodes ();
#endif
cgraph_node::checking_verify_cgraph_nodes ();
return 0;
}

View File

@ -6469,14 +6469,12 @@ move_stmt_op (tree *tp, int *walk_subtrees, void *data)
|| (p->orig_block == NULL_TREE
&& block != NULL_TREE))
TREE_SET_BLOCK (t, p->new_block);
#ifdef ENABLE_CHECKING
else if (block != NULL_TREE)
else if (flag_checking && block != NULL_TREE)
{
while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
block = BLOCK_SUPERCONTEXT (block);
gcc_assert (block == p->orig_block);
}
#endif
}
else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
{
@ -7061,9 +7059,9 @@ move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
bbs.create (0);
bbs.safe_push (entry_bb);
gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
#ifdef ENABLE_CHECKING
verify_sese (entry_bb, exit_bb, &bbs);
#endif
if (flag_checking)
verify_sese (entry_bb, exit_bb, &bbs);
/* The blocks that used to be dominated by something in BBS will now be
dominated by the new block. */
@ -7905,13 +7903,11 @@ gimple_flow_call_edges_add (sbitmap blocks)
no edge to the exit block in CFG already.
Calling make_edge in such case would cause us to
mark that edge as fake and remove it later. */
#ifdef ENABLE_CHECKING
if (stmt == last_stmt)
if (flag_checking && stmt == last_stmt)
{
e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
gcc_assert (e == NULL);
}
#endif
/* Note that the following may create a new basic block
and renumber the existing basic blocks. */

View File

@ -729,9 +729,7 @@ cleanup_tree_cfg_noloop (void)
}
else
{
#ifdef ENABLE_CHECKING
verify_dominators (CDI_DOMINATORS);
#endif
checking_verify_dominators (CDI_DOMINATORS);
changed = false;
}
@ -740,9 +738,7 @@ cleanup_tree_cfg_noloop (void)
gcc_assert (dom_info_available_p (CDI_DOMINATORS));
compact_blocks ();
#ifdef ENABLE_CHECKING
verify_flow_info ();
#endif
checking_verify_flow_info ();
timevar_pop (TV_TREE_CLEANUP_CFG);
@ -777,9 +773,7 @@ repair_loop_structures (void)
BITMAP_FREE (changed_bbs);
#ifdef ENABLE_CHECKING
verify_loop_structure ();
#endif
checking_verify_loop_structure ();
scev_reset ();
timevar_pop (TV_REPAIR_LOOPS);

View File

@ -703,7 +703,7 @@ maybe_record_in_goto_queue (struct leh_state *state, gimple *stmt)
}
#ifdef ENABLE_CHECKING
#if CHECKING_P
/* We do not process GIMPLE_SWITCHes for now. As long as the original source
was in fact structured, and we've not yet done jump threading, then none
of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
@ -3921,9 +3921,8 @@ remove_unreachable_handlers (void)
sbitmap_free (r_reachable);
sbitmap_free (lp_reachable);
#ifdef ENABLE_CHECKING
verify_eh_tree (cfun);
#endif
if (flag_checking)
verify_eh_tree (cfun);
}
/* Remove unreachable handlers if any landing pads have been removed after

View File

@ -2787,13 +2787,12 @@ pass_if_conversion::execute (function *fun)
&& !loop->dont_vectorize))
todo |= tree_if_conversion (loop);
#ifdef ENABLE_CHECKING
{
basic_block bb;
FOR_EACH_BB_FN (bb, fun)
gcc_assert (!bb->aux);
}
#endif
if (flag_checking)
{
basic_block bb;
FOR_EACH_BB_FN (bb, fun)
gcc_assert (!bb->aux);
}
return todo;
}

View File

@ -4481,10 +4481,8 @@ expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
fn = cg_edge->callee->decl;
cg_edge->callee->get_untransformed_body ();
#ifdef ENABLE_CHECKING
if (cg_edge->callee->decl != id->dst_node->decl)
if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
cg_edge->callee->verify ();
#endif
/* We will be inlining this callee. */
id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
@ -4973,7 +4971,7 @@ optimize_inline_calls (tree fn)
pop_gimplify_context (NULL);
#ifdef ENABLE_CHECKING
if (flag_checking)
{
struct cgraph_edge *e;
@ -4983,7 +4981,6 @@ optimize_inline_calls (tree fn)
for (e = id.dst_node->callees; e; e = e->next_callee)
gcc_assert (e->inline_failed);
}
#endif
/* Fold queued statements. */
fold_marked_statements (last, id.statements_to_fold);
@ -4999,9 +4996,8 @@ optimize_inline_calls (tree fn)
number_blocks (fn);
delete_unreachable_blocks_update_callgraph (&id);
#ifdef ENABLE_CHECKING
id.dst_node->verify ();
#endif
if (flag_checking)
id.dst_node->verify ();
/* It would be nice to check SSA/CFG/statement consistency here, but it is
not possible yet - the IPA passes might make various functions to not

View File

@ -3169,45 +3169,46 @@ update_ssa (unsigned update_flags)
if (!need_ssa_update_p (cfun))
return;
#ifdef ENABLE_CHECKING
timevar_push (TV_TREE_STMT_VERIFY);
bool err = false;
FOR_EACH_BB_FN (bb, cfun)
if (flag_checking)
{
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
timevar_push (TV_TREE_STMT_VERIFY);
bool err = false;
FOR_EACH_BB_FN (bb, cfun)
{
gimple *stmt = gsi_stmt (gsi);
ssa_op_iter i;
use_operand_p use_p;
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, i, SSA_OP_ALL_USES)
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
tree use = USE_FROM_PTR (use_p);
if (TREE_CODE (use) != SSA_NAME)
continue;
gimple *stmt = gsi_stmt (gsi);
if (SSA_NAME_IN_FREE_LIST (use))
ssa_op_iter i;
use_operand_p use_p;
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, i, SSA_OP_ALL_USES)
{
error ("statement uses released SSA name:");
debug_gimple_stmt (stmt);
fprintf (stderr, "The use of ");
print_generic_expr (stderr, use, 0);
fprintf (stderr," should have been replaced\n");
err = true;
tree use = USE_FROM_PTR (use_p);
if (TREE_CODE (use) != SSA_NAME)
continue;
if (SSA_NAME_IN_FREE_LIST (use))
{
error ("statement uses released SSA name:");
debug_gimple_stmt (stmt);
fprintf (stderr, "The use of ");
print_generic_expr (stderr, use, 0);
fprintf (stderr," should have been replaced\n");
err = true;
}
}
}
}
if (err)
internal_error ("cannot update SSA form");
timevar_pop (TV_TREE_STMT_VERIFY);
}
if (err)
internal_error ("cannot update SSA form");
timevar_pop (TV_TREE_STMT_VERIFY);
#endif
timevar_push (TV_TREE_SSA_INCREMENTAL);
if (dump_file && (dump_flags & TDF_DETAILS))
@ -3271,29 +3272,28 @@ update_ssa (unsigned update_flags)
placement heuristics. */
prepare_block_for_update (start_bb, insert_phi_p);
#ifdef ENABLE_CHECKING
for (i = 1; i < num_ssa_names; ++i)
{
tree name = ssa_name (i);
if (!name
|| virtual_operand_p (name))
continue;
if (flag_checking)
for (i = 1; i < num_ssa_names; ++i)
{
tree name = ssa_name (i);
if (!name
|| virtual_operand_p (name))
continue;
/* For all but virtual operands, which do not have SSA names
with overlapping life ranges, ensure that symbols marked
for renaming do not have existing SSA names associated with
them as we do not re-write them out-of-SSA before going
into SSA for the remaining symbol uses. */
if (marked_for_renaming (SSA_NAME_VAR (name)))
{
fprintf (stderr, "Existing SSA name for symbol marked for "
"renaming: ");
print_generic_expr (stderr, name, TDF_SLIM);
fprintf (stderr, "\n");
internal_error ("SSA corruption");
}
}
#endif
/* For all but virtual operands, which do not have SSA names
with overlapping life ranges, ensure that symbols marked
for renaming do not have existing SSA names associated with
them as we do not re-write them out-of-SSA before going
into SSA for the remaining symbol uses. */
if (marked_for_renaming (SSA_NAME_VAR (name)))
{
fprintf (stderr, "Existing SSA name for symbol marked for "
"renaming: ");
print_generic_expr (stderr, name, TDF_SLIM);
fprintf (stderr, "\n");
internal_error ("SSA corruption");
}
}
}
else
{

View File

@ -1821,9 +1821,7 @@ out:
rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
}
#ifdef ENABLE_CHECKING
verify_loop_structure ();
#endif
checking_verify_loop_structure ();
return 0;
}

View File

@ -841,24 +841,23 @@ eliminate_useless_phis (void)
result = gimple_phi_result (phi);
if (virtual_operand_p (result))
{
#ifdef ENABLE_CHECKING
size_t i;
/* There should be no arguments which are not virtual, or the
results will be incorrect. */
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
tree arg = PHI_ARG_DEF (phi, i);
if (TREE_CODE (arg) == SSA_NAME
&& !virtual_operand_p (arg))
{
fprintf (stderr, "Argument of PHI is not virtual (");
print_generic_expr (stderr, arg, TDF_SLIM);
fprintf (stderr, "), but the result is :");
print_gimple_stmt (stderr, phi, 0, TDF_SLIM);
internal_error ("SSA corruption");
}
}
#endif
if (flag_checking)
for (size_t i = 0; i < gimple_phi_num_args (phi); i++)
{
tree arg = PHI_ARG_DEF (phi, i);
if (TREE_CODE (arg) == SSA_NAME
&& !virtual_operand_p (arg))
{
fprintf (stderr, "Argument of PHI is not virtual (");
print_generic_expr (stderr, arg, TDF_SLIM);
fprintf (stderr, "), but the result is :");
print_gimple_stmt (stderr, phi, 0, TDF_SLIM);
internal_error ("SSA corruption");
}
}
remove_phi_node (&gsi, true);
}
else
@ -884,9 +883,11 @@ eliminate_useless_phis (void)
variable. */
static void
rewrite_trees (var_map map ATTRIBUTE_UNUSED)
rewrite_trees (var_map map)
{
#ifdef ENABLE_CHECKING
if (!flag_checking)
return;
basic_block bb;
/* Search for PHIs where the destination has no partition, but one
or more arguments has a partition. This should not happen and can
@ -918,7 +919,6 @@ rewrite_trees (var_map map ATTRIBUTE_UNUSED)
}
}
}
#endif
}
/* Given the out-of-ssa info object SA (with prepared partitions)

View File

@ -2784,9 +2784,7 @@ pass_parallelize_loops::execute (function *fun)
{
fun->curr_properties &= ~(PROP_gimple_eomp);
#ifdef ENABLE_CHECKING
verify_loop_structure ();
#endif
checking_verify_loop_structure ();
return TODO_update_ssa;
}

View File

@ -896,13 +896,9 @@ suitable_component_p (struct loop *loop, struct component *comp)
if (!determine_offset (first->ref, a->ref, &a->offset))
return false;
#ifdef ENABLE_CHECKING
{
enum ref_step_type a_step;
ok = suitable_reference_p (a->ref, &a_step);
gcc_assert (ok && a_step == comp->comp_step);
}
#endif
enum ref_step_type a_step;
gcc_checking_assert (suitable_reference_p (a->ref, &a_step)
&& a_step == comp->comp_step);
}
/* If there is a write inside the component, we must know whether the

View File

@ -461,9 +461,8 @@ gimple_gen_const_delta_profiler (histogram_value value ATTRIBUTE_UNUSED,
unsigned base ATTRIBUTE_UNUSED)
{
/* FIXME implement this. */
#ifdef ENABLE_CHECKING
internal_error ("unimplemented functionality");
#endif
if (flag_checking)
internal_error ("unimplemented functionality");
gcc_unreachable ();
}

View File

@ -1442,12 +1442,7 @@ refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
ao_ref_alias_set (ref2), -1,
tbaa_p);
/* We really do not want to end up here, but returning true is safe. */
#ifdef ENABLE_CHECKING
gcc_unreachable ();
#else
return true;
#endif
}
static bool

View File

@ -52,9 +52,7 @@ along with GCC; see the file COPYING3. If not see
#include "ipa-utils.h"
#include "cfgloop.h"
#ifdef ENABLE_CHECKING
static void verify_live_on_entry (tree_live_info_p);
#endif
static void verify_live_on_entry (tree_live_info_p);
/* VARMAP maintains a mapping from SSA version number to real variables.
@ -1153,9 +1151,8 @@ calculate_live_ranges (var_map map, bool want_livein)
live_worklist (live);
#ifdef ENABLE_CHECKING
verify_live_on_entry (live);
#endif
if (flag_checking)
verify_live_on_entry (live);
calculate_live_on_exit (live);
@ -1292,7 +1289,6 @@ debug (tree_live_info_d *ptr)
}
#ifdef ENABLE_CHECKING
/* Verify that SSA_VAR is a non-virtual SSA_NAME. */
void
@ -1422,4 +1418,3 @@ verify_live_on_entry (tree_live_info_p live)
}
gcc_assert (num <= 0);
}
#endif

View File

@ -80,9 +80,7 @@ extern void remove_unused_locals (void);
extern void dump_var_map (FILE *, var_map);
extern void debug (_var_map &ref);
extern void debug (_var_map *ptr);
#ifdef ENABLE_CHECKING
extern void register_ssa_partition_check (tree ssa_var);
#endif
/* Return number of partitions in MAP. */
@ -181,12 +179,10 @@ num_basevars (var_map map)
partitions may be filtered out by a view later. */
static inline void
register_ssa_partition (var_map map ATTRIBUTE_UNUSED,
tree ssa_var ATTRIBUTE_UNUSED)
register_ssa_partition (var_map map ATTRIBUTE_UNUSED, tree ssa_var)
{
#if defined ENABLE_CHECKING
register_ssa_partition_check (ssa_var);
#endif
if (flag_checking)
register_ssa_partition_check (ssa_var);
}

View File

@ -1376,10 +1376,8 @@ tree_unroll_loops_completely (bool may_increase_size, bool unroll_outer)
/* Clean up the information about numbers of iterations, since
complete unrolling might have invalidated it. */
scev_reset ();
#ifdef ENABLE_CHECKING
if (loops_state_satisfies_p (LOOP_CLOSED_SSA))
if (flag_checking && loops_state_satisfies_p (LOOP_CLOSED_SSA))
verify_loop_closed_ssa (true);
#endif
}
if (loop_closed_ssa_invalidated)
BITMAP_FREE (loop_closed_ssa_invalidated);

View File

@ -278,22 +278,22 @@ add_exit_phi (basic_block exit, tree var)
edge e;
edge_iterator ei;
#ifdef ENABLE_CHECKING
/* Check that at least one of the edges entering the EXIT block exits
the loop, or a superloop of that loop, that VAR is defined in. */
gimple *def_stmt = SSA_NAME_DEF_STMT (var);
basic_block def_bb = gimple_bb (def_stmt);
FOR_EACH_EDGE (e, ei, exit->preds)
if (flag_checking)
{
struct loop *aloop = find_common_loop (def_bb->loop_father,
e->src->loop_father);
if (!flow_bb_inside_loop_p (aloop, e->dest))
break;
gimple *def_stmt = SSA_NAME_DEF_STMT (var);
basic_block def_bb = gimple_bb (def_stmt);
FOR_EACH_EDGE (e, ei, exit->preds)
{
struct loop *aloop = find_common_loop (def_bb->loop_father,
e->src->loop_father);
if (!flow_bb_inside_loop_p (aloop, e->dest))
break;
}
gcc_assert (e);
}
gcc_checking_assert (e);
#endif
phi = create_phi_node (NULL_TREE, exit);
create_new_def_for (var, phi, gimple_phi_result_ptr (phi));
FOR_EACH_EDGE (e, ei, exit->preds)
@ -1368,11 +1368,9 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
gimple_cond_set_rhs (exit_if, exit_bound);
update_stmt (exit_if);
#ifdef ENABLE_CHECKING
verify_flow_info ();
verify_loop_structure ();
verify_loop_closed_ssa (true);
#endif
checking_verify_flow_info ();
checking_verify_loop_structure ();
checking_verify_loop_closed_ssa (true);
}
/* Wrapper over tree_transform_and_unroll_loop for case we do not

View File

@ -27,6 +27,14 @@ extern void create_iv (tree, tree, tree, struct loop *, gimple_stmt_iterator *,
extern void rewrite_into_loop_closed_ssa (bitmap, unsigned);
extern void rewrite_virtuals_into_loop_closed_ssa (struct loop *);
extern void verify_loop_closed_ssa (bool);
static inline void
checking_verify_loop_closed_ssa (bool verify_ssa_p)
{
if (flag_checking)
verify_loop_closed_ssa (verify_ssa_p);
}
extern basic_block split_loop_exit_edge (edge);
extern basic_block ip_end_pos (struct loop *);
extern basic_block ip_normal_pos (struct loop *);

View File

@ -541,10 +541,9 @@ pass_cse_reciprocals::execute (function *fun)
calculate_dominance_info (CDI_DOMINATORS);
calculate_dominance_info (CDI_POST_DOMINATORS);
#ifdef ENABLE_CHECKING
FOR_EACH_BB_FN (bb, fun)
gcc_assert (!bb->aux);
#endif
if (flag_checking)
FOR_EACH_BB_FN (bb, fun)
gcc_assert (!bb->aux);
for (arg = DECL_ARGUMENTS (fun->decl); arg; arg = DECL_CHAIN (arg))
if (FLOAT_TYPE_P (TREE_TYPE (arg))

View File

@ -881,12 +881,13 @@ get_expr_operands (struct function *fn, gimple *stmt, tree *expr_p, int flags)
}
/* If we get here, something has gone wrong. */
#ifdef ENABLE_CHECKING
fprintf (stderr, "unhandled expression in get_expr_operands():\n");
debug_tree (expr);
fputs ("\n", stderr);
#endif
gcc_unreachable ();
if (flag_checking)
{
fprintf (stderr, "unhandled expression in get_expr_operands():\n");
debug_tree (expr);
fputs ("\n", stderr);
gcc_unreachable ();
}
}

View File

@ -1501,14 +1501,14 @@ static void
replace_exp_1 (use_operand_p op_p, tree val,
bool for_propagation ATTRIBUTE_UNUSED)
{
#if defined ENABLE_CHECKING
tree op = USE_FROM_PTR (op_p);
gcc_assert (!(for_propagation
&& TREE_CODE (op) == SSA_NAME
&& TREE_CODE (val) == SSA_NAME
&& !may_propagate_copy (op, val)));
#endif
if (flag_checking)
{
tree op = USE_FROM_PTR (op_p);
gcc_assert (!(for_propagation
&& TREE_CODE (op) == SSA_NAME
&& TREE_CODE (val) == SSA_NAME
&& !may_propagate_copy (op, val)));
}
if (TREE_CODE (val) == SSA_NAME)
SET_USE (op_p, val);

View File

@ -2544,10 +2544,11 @@ rewrite_constraints (constraint_graph_t graph,
int i;
constraint_t c;
#ifdef ENABLE_CHECKING
for (unsigned int j = 0; j < graph->size; j++)
gcc_assert (find (j) == j);
#endif
if (flag_checking)
{
for (unsigned int j = 0; j < graph->size; j++)
gcc_assert (find (j) == j);
}
FOR_EACH_VEC_ELT (constraints, i, c)
{

View File

@ -182,9 +182,7 @@ struct temp_expr_table
/* A place for the many, many bitmaps we create. */
static bitmap_obstack ter_bitmap_obstack;
#ifdef ENABLE_CHECKING
extern void debug_ter (FILE *, temp_expr_table *);
#endif
/* Create a new TER table for MAP. */
@ -232,16 +230,16 @@ free_temp_expr_table (temp_expr_table *t)
{
bitmap ret = NULL;
#ifdef ENABLE_CHECKING
unsigned x;
for (x = 0; x <= num_var_partitions (t->map); x++)
gcc_assert (!t->kill_list[x]);
for (x = 0; x < num_ssa_names; x++)
if (flag_checking)
{
gcc_assert (t->expr_decl_uids[x] == NULL);
gcc_assert (t->partition_dependencies[x] == NULL);
for (unsigned x = 0; x <= num_var_partitions (t->map); x++)
gcc_assert (!t->kill_list[x]);
for (unsigned x = 0; x < num_ssa_names; x++)
{
gcc_assert (t->expr_decl_uids[x] == NULL);
gcc_assert (t->partition_dependencies[x] == NULL);
}
}
#endif
BITMAP_FREE (t->partition_in_use);
BITMAP_FREE (t->new_replaceable_dependencies);
@ -748,7 +746,6 @@ dump_replaceable_exprs (FILE *f, bitmap expr)
}
#ifdef ENABLE_CHECKING
/* Dump the status of the various tables in the expression table. This is used
exclusively to debug TER. F is the place to send debug info and T is the
table being debugged. */
@ -796,4 +793,3 @@ debug_ter (FILE *f, temp_expr_table *t)
fprintf (f, "\n----------\n");
}
#endif

View File

@ -2510,9 +2510,8 @@ duplicate_thread_path (edge entry, edge exit,
scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
}
#ifdef ENABLE_CHECKING
verify_jump_thread (region_copy, n_region);
#endif
if (flag_checking)
verify_jump_thread (region_copy, n_region);
/* Remove the last branch in the jump thread path. */
remove_ctrl_stmt_and_useless_edges (region_copy[n_region - 1], exit->dest);

View File

@ -77,5 +77,13 @@ redirect_edge_var_map_location (edge_var_map *v)
return v->locus;
}
/* Verify SSA invariants, if internal consistency checks are enabled. */
static inline void
checking_verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
{
if (flag_checking)
verify_ssa (check_modified_stmt, check_ssa_operands);
}
#endif /* GCC_TREE_SSA_H */

View File

@ -336,9 +336,8 @@ release_ssa_name_fn (struct function *fn, tree var)
if (MAY_HAVE_DEBUG_STMTS)
insert_debug_temp_for_var_def (NULL, var);
#ifdef ENABLE_CHECKING
verify_imm_links (stderr, var);
#endif
if (flag_checking)
verify_imm_links (stderr, var);
while (imm->next != imm)
delink_imm_use (imm->next);

View File

@ -1107,13 +1107,14 @@ expand_ifn_va_arg (function *fun)
if ((fun->curr_properties & PROP_gimple_lva) == 0)
expand_ifn_va_arg_1 (fun);
#if ENABLE_CHECKING
basic_block bb;
gimple_stmt_iterator i;
FOR_EACH_BB_FN (bb, fun)
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i)));
#endif
if (flag_checking)
{
basic_block bb;
gimple_stmt_iterator i;
FOR_EACH_BB_FN (bb, fun)
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
gcc_assert (!gimple_call_ifn_va_arg_p (gsi_stmt (i)));
}
}
namespace {

View File

@ -919,9 +919,7 @@ slpeel_tree_duplicate_loop_to_edge_cfg (struct loop *loop,
free (new_bbs);
free (bbs);
#ifdef ENABLE_CHECKING
verify_dominators (CDI_DOMINATORS);
#endif
checking_verify_dominators (CDI_DOMINATORS);
return new_loop;
}
@ -1003,11 +1001,13 @@ slpeel_can_duplicate_loop_p (const struct loop *loop, const_edge e)
return true;
}
#ifdef ENABLE_CHECKING
static void
slpeel_verify_cfg_after_peeling (struct loop *first_loop,
struct loop *second_loop)
slpeel_checking_verify_cfg_after_peeling (struct loop *first_loop,
struct loop *second_loop)
{
if (!flag_checking)
return;
basic_block loop1_exit_bb = single_exit (first_loop)->dest;
basic_block loop2_entry_bb = loop_preheader_edge (second_loop)->src;
basic_block loop1_entry_bb = loop_preheader_edge (first_loop)->src;
@ -1035,7 +1035,6 @@ slpeel_verify_cfg_after_peeling (struct loop *first_loop,
second_loop. */
/* TODO */
}
#endif
/* If the run time cost model check determines that vectorization is
not profitable and hence scalar loop should be generated then set
@ -1773,9 +1772,7 @@ vect_do_peeling_for_loop_bound (loop_vec_info loop_vinfo,
0, LOOP_VINFO_VECT_FACTOR (loop_vinfo));
gcc_assert (new_loop);
gcc_assert (loop_num == loop->num);
#ifdef ENABLE_CHECKING
slpeel_verify_cfg_after_peeling (loop, new_loop);
#endif
slpeel_checking_verify_cfg_after_peeling (loop, new_loop);
/* A guard that controls whether the new_loop is to be executed or skipped
is placed in LOOP->exit. LOOP->exit therefore has two successors - one
@ -2032,9 +2029,7 @@ vect_do_peeling_for_alignment (loop_vec_info loop_vinfo, tree ni_name,
bound, 0);
gcc_assert (new_loop);
#ifdef ENABLE_CHECKING
slpeel_verify_cfg_after_peeling (new_loop, loop);
#endif
slpeel_checking_verify_cfg_after_peeling (new_loop, loop);
/* For vectorization factor N, we need to copy at most N-1 values
for alignment and this means N-2 loopback edge executions. */
max_iter = LOOP_VINFO_VECT_FACTOR (loop_vinfo) - 2;

Some files were not shown because too many files have changed in this diff Show More