Merger of git branch "gimple-classes-v2-option-3"

gcc/ChangeLog:
	Merger of git branch "gimple-classes-v2-option-3".

	* ChangeLog.gimple-classes: New.

	* coretypes.h (struct gcond): Add forward decl.
	(struct gdebug): Likewise.
	(struct ggoto): Likewise.
	(struct glabel): Likewise.
	(struct gswitch): Likewise.
	(struct gassign): Likewise.
	(struct gasm): Likewise.
	(struct gcall): Likewise.
	(struct gtransaction): Likewise.
	(struct greturn): Likewise.
	(struct gbind): Likewise.
	(struct gcatch): Likewise.
	(struct geh_filter): Likewise.
	(struct geh_mnt): Likewise.
	(struct geh_else): Likewise.
	(struct gresx): Likewise.
	(struct geh_dispatch): Likewise.
	(struct gphi): Likewise.
	(struct gtry): Likewise.
	(struct gomp_atomic_load): Likewise.
	(struct gomp_atomic_store): Likewise.
	(struct gomp_continue): Likewise.
	(struct gomp_critical): Likewise.
	(struct gomp_for): Likewise.
	(struct gomp_parallel): Likewise.
	(struct gomp_task): Likewise.
	(struct gomp_sections): Likewise.
	(struct gomp_single): Likewise.
	(struct gomp_target): Likewise.
	(struct gomp_teams): Likewise.

	* doc/gimple.texi (Class hierarchy of GIMPLE statements): Update
	for renaming of gimple subclasses.

	* gdbhooks.py: Update.

	* gimple-iterator.c (gsi_for_phi): New.
	(gsi_start_phis): Strengthen return type from gimple_stmt_iterator
	to gphi_iterator.
	* gimple-iterator.h (struct gphi_iterator): New subclass of
	gimple_stmt_iterator.
	(gsi_for_phi): New prototype.
	(gsi_start_phis): Strengthen return type from gimple_stmt_iterator
	to gphi_iterator.
	(gsi_next_nonvirtual_phi): Strengthen param from
	gimple_stmt_iterator * to gphi_iterator *, and local "phi" from
	gimple to gphi *.

	* gsstruct.def: Update for renamings of classes.

	* gimple.c (gimple_build_return): Strengthen return type from
	gimple to greturn *.
	(gimple_call_reset_alias_info): Strengthen param to gcall *.
	(gimple_build_call_1): Strengthen return type from gimple to
	gcall *.
	(gimple_build_call_vec): Likewise.
	(gimple_build_call): Likewise.
	(gimple_build_call_valist): Likewise.
	(gimple_build_call_internal_1): Likewise.
	(gimple_build_call_internal): Likewise.
	(gimple_build_call_internal_vec): Likewise.
	(gimple_build_call_from_tree): Likewise.
	(gimple_build_assign_stat): Strengthen return type from gimple to
	gassign *.
	(gimple_build_assign_with_ops): Likewise.
	(gimple_build_assign_with_ops): Likewise.
	(gimple_build_cond): Strengthen return type from gimple to
	gcond *.
	(gimple_build_cond_from_tree): Likewise.
	(gimple_cond_set_condition_from_tree): Require a gcond *.
	(gimple_build_label): Strengthen return type from gimple to
	glabel *.
	(gimple_build_goto): Strengthen return type from gimple to
	ggoto *.
	(gimple_build_bind): Strengthen return type from gimple to
	gbind *.
	(gimple_build_asm_1): Strengthen return type from gimple to
	gasm *.
	(gimple_build_asm_vec): Likewise.
	(gimple_build_catch): Strengthen return type from gimple to
	gcatch *.
	(gimple_build_eh_filter): Strengthen return type from gimple to
	geh_filter *.
	(gimple_build_eh_must_not_throw): Strengthen return type from
	gimple to geh_mnt *.
	(gimple_build_eh_else): Strengthen return type from gimple to
	geh_else *.
	(gimple_build_try): Update for renaming of gimple_statement_try to
	gtry.
	(gimple_build_resx): Strengthen return type from gimple to
	gresx *.
	(gimple_build_switch_nlabels): Strengthen return type from gimple
	to gswitch *.
	(gimple_build_switch): Likewise.
	(gimple_build_eh_dispatch): Strengthen return type from gimple to
	geh_dispatch *.
	(gimple_build_debug_bind_stat): Strengthen return type from gimple
	to gdebug *.
	(gimple_build_debug_source_bind_stat): Strengthen return type from
	gimple to gdebug *.
	(gimple_build_omp_critical): Strengthen return type from gimple to
	gomp_critical *.
	(gimple_build_omp_for): Strengthen return type from gimple to
	gomp_for *.
	(gimple_build_omp_parallel): Strengthen return type from gimple to
	gomp_parallel *.
	(gimple_build_omp_task): Strengthen return type from gimple to
	gomp_task *.
	(gimple_build_omp_continue): Strengthen return type from gimple to
	gomp_continue *.
	(gimple_build_omp_sections): Strengthen return type from gimple to
	gomp_sections *.
	(gimple_build_omp_single): Strengthen return type from gimple to
	gomp_single *.
	(gimple_build_omp_target): Strengthen return type from gimple to
	gomp_target *.
	(gimple_build_omp_teams): Strengthen return type from gimple to
	gomp_teams *.
	(gimple_build_omp_atomic_load): Strengthen return type from gimple
	to gomp_atomic_load *.
	(gimple_build_omp_atomic_store): Strengthen return type from gimple
	to gomp_atomic_store *.
	(gimple_build_transaction): Strengthen return type from gimple
	to gtransaction *.
	(empty_stmt_p): Replace check for GIMPLE_BIND with a dyn_cast.
	(gimple_call_fnspec): Require a const gcall *.
	(gimple_call_arg_flags): Likewise.
	(gimple_call_return_flags): Likewise.
	(gimple_set_bb): Add a checked cast.
	(gimple_copy): Within the cases, add locals of the appropriate
	subclass and use in place of "stmt" and "copy" for typesafety.
	(gimple_has_side_effects): Add a checked cast.
	(gimple_could_trap_p_1): Likewise.
	(gimple_call_copy_skip_args): Require a gcall *, and return one.
	(gimple_asm_clobbers_memory_p): Require a const gasm *.
	(infer_nonnull_range): Replace a check for GIMPLE_RETURN with a
	dyn_cast, introducing local "return_stmt" and using ti in place
	of "stmt".

	* gimple.h (gimple_vec): Eliminate this typedef.
	(struct gimple_statement_call): Rename to...
	(struct gcall): ...this.
	(struct gimple_statement_bind): Rename to...
	(struct gbind): ...this.
	(struct gimple_statement_catch): Rename to...
	(struct gcatch): ...this.
	(struct gimple_statement_eh_filter): Rename to...
	(struct geh_filter): ...this.
	(struct gimple_statement_eh_else): Rename to...
	(struct geh_else): ...this.
	(struct gimple_statement_eh_mnt): Rename to...
	(struct geh_mnt): ...this.
	(struct gimple_statement_phi): Rename to...
	(struct gphi): ...this.
	(struct gimple_statement_resx): Rename to...
	(struct gresx): ...this.
	(struct gimple_statement_eh_dispatch): Rename to...
	(struct geh_dispatch): ...this.
	(struct gimple_statement_try): Rename to...
	(struct gtry): ...this.
	(struct gimple_statement_asm): Rename to...
	(struct gasm): ...this.
	(struct gimple_statement_omp_critical): Rename to...
	(struct gomp_critical): ...this.
	(struct gimple_statement_omp_for): Rename to...
	(struct gomp_for): ...this.
	(struct gimple_statement_omp_parallel): Rename to...
	(struct gomp_parallel): ...this.
	(struct gimple_statement_omp_target): Rename to...
	(struct gomp_target): ...this.
	(struct gimple_statement_omp_task): Rename to...
	(struct gomp_task): ...this.
	(struct gimple_statement_omp_sections): Rename to...
	(struct gomp_sections): ...this.
	(struct gimple_statement_omp_continue): Rename to...
	(struct gomp_continue): ...this.
	(struct gimple_statement_omp_single): Rename to...
	(struct gomp_single): ...this.
	(struct gimple_statement_omp_teams): Rename to...
	(struct gomp_teams): ...this.
	(struct gimple_statement_omp_atomic_load): Rename to...
	(struct gomp_atomic_load): ...this.
	(struct gimple_statement_omp_atomic_store :): Rename to...
	(struct gomp_atomic_store :): ...this.
	(struct gimple_statement_transaction): Rename to...
	(struct gtransaction): ...this.
	(struct gcond): New subclass.
	(struct gdebug): New subclass.
	(struct ggoto): New subclass.
	(struct glabel): New subclass.
	(struct gswitch): New subclass.
	(struct gassign): New subclass.
	(struct greturn): New subclass.
	(is_a_helper <gimple_statement_asm *>::test): Rename to...
	(is_a_helper <gasm *>::test): ...this.
	(is_a_helper <gimple_statement_bind *>::test): Rename to...
	(is_a_helper <gbind *>::test): ...this.
	(is_a_helper <gassign *>::test): New.
	(is_a_helper <gimple_statement_call *>::test): Rename to...
	(is_a_helper <gcall *>::test): ...this.
	(is_a_helper <gimple_statement_catch *>::test): Rename to...
	(is_a_helper <gcatch *>::test): ...this.
	(is_a_helper <gimple_statement_resx *>::test): Rename to...
	(is_a_helper <gresx *>::test): ...this.
	(is_a_helper <gcond *>::test): New.
	(is_a_helper <gdebug *>::test): New.
	(is_a_helper <ggoto *>::test): New.
	(is_a_helper <glabel *>::test): New.
	(is_a_helper <gimple_statement_eh_dispatch *>::test): Rename to...
	(is_a_helper <geh_dispatch *>::test): ...this.
	(is_a_helper <gimple_statement_eh_else *>::test): Rename to...
	(is_a_helper <geh_else *>::test): ...this.
	(is_a_helper <gimple_statement_eh_filter *>::test): Rename to...
	(is_a_helper <geh_filter *>::test): ...this.
	(is_a_helper <gimple_statement_eh_mnt *>::test): Rename to...
	(is_a_helper <geh_mnt *>::test): ...this.
	(is_a_helper <gimple_statement_omp_atomic_load *>::test): Rename to...
	(is_a_helper <gomp_atomic_load *>::test): ...this.
	(is_a_helper <gimple_statement_omp_atomic_store *>::test): Rename to...
	(is_a_helper <gomp_atomic_store *>::test): ...this.
	(is_a_helper <gimple_statement_omp_continue *>::test): Rename to...
	(is_a_helper <gomp_continue *>::test): ...this.
	(is_a_helper <gimple_statement_omp_critical *>::test): Rename to...
	(is_a_helper <gomp_critical *>::test): ...this.
	(is_a_helper <gimple_statement_omp_for *>::test): Rename to...
	(is_a_helper <gomp_for *>::test): ...this.
	(is_a_helper <gimple_statement_omp_parallel *>::test): Rename to...
	(is_a_helper <gomp_parallel *>::test): ...this.
	(is_a_helper <gimple_statement_omp_target *>::test): Rename to...
	(is_a_helper <gomp_target *>::test): ...this.
	(is_a_helper <gimple_statement_omp_sections *>::test): Rename to...
	(is_a_helper <gomp_sections *>::test): ...this.
	(is_a_helper <gimple_statement_omp_single *>::test): Rename to...
	(is_a_helper <gomp_single *>::test): ...this.
	(is_a_helper <gimple_statement_omp_teams *>::test): Rename to...
	(is_a_helper <gomp_teams *>::test): ...this.
	(is_a_helper <gimple_statement_omp_task *>::test): Rename to...
	(is_a_helper <gomp_task *>::test): ...this.
	(is_a_helper <gimple_statement_phi *>::test): Rename to...
	(is_a_helper <gphi *>::test): ...this.
	(is_a_helper <gimple_statement_transaction *>::test): Rename to...
	(is_a_helper <gtransaction *>::test): ...this.
	(is_a_helper <greturn *>::test): New.
	(is_a_helper <gswitch *>::test): New.
	(is_a_helper <gimple_statement_try *>::test): Rename to...
	(is_a_helper <gtry *>::test): ...this.
	(is_a_helper <const gimple_statement_asm *>::test): Rename to...
	(is_a_helper <const gasm *>::test): ...this.
	(is_a_helper <const gimple_statement_bind *>::test): Rename to...
	(is_a_helper <const gbind *>::test): ...this.
	(is_a_helper <const gimple_statement_call *>::test): Rename to...
	(is_a_helper <const gcall *>::test): ...this.
	(is_a_helper <const gimple_statement_catch *>::test): Rename to...
	(is_a_helper <const gcatch *>::test): ...this.
	(is_a_helper <const gimple_statement_resx *>::test): Rename to...
	(is_a_helper <const gresx *>::test): ...this.
	(is_a_helper <const gimple_statement_eh_dispatch *>::test): Rename to...
	(is_a_helper <const geh_dispatch *>::test): ...this.
	(is_a_helper <const gimple_statement_eh_filter *>::test): Rename to...
	(is_a_helper <const geh_filter *>::test): ...this.
	(is_a_helper <const gimple_statement_omp_atomic_load *>::test):
	Rename to...
	(is_a_helper <const gomp_atomic_load *>::test): ...this.
	(is_a_helper <const gimple_statement_omp_atomic_store *>::test):
	Rename to...
	(is_a_helper <const gomp_atomic_store *>::test): ...this.
	(is_a_helper <const gimple_statement_omp_continue *>::test):
	Rename to...
	(is_a_helper <const gomp_continue *>::test): ...this.
	(is_a_helper <const gimple_statement_omp_critical *>::test):
	Rename to...
	(is_a_helper <const gomp_critical *>::test): ...this.
	(is_a_helper <const gimple_statement_omp_for *>::test): Rename to...
	(is_a_helper <const gomp_for *>::test): ...this.
	(is_a_helper <const gimple_statement_omp_parallel *>::test):
	Rename to...
	(is_a_helper <const gomp_parallel *>::test): ...this.
	(is_a_helper <const gimple_statement_omp_target *>::test): Rename to...
	(is_a_helper <const gomp_target *>::test): ...this.
	(is_a_helper <const gimple_statement_omp_sections *>::test):
	Rename to...
	(is_a_helper <const gomp_sections *>::test): ...this.
	(is_a_helper <const gimple_statement_omp_single *>::test): Rename to...
	(is_a_helper <const gomp_single *>::test): ...this.
	(is_a_helper <const gimple_statement_omp_teams *>::test): Rename to...
	(is_a_helper <const gomp_teams *>::test): ...this.
	(is_a_helper <const gimple_statement_omp_task *>::test): Rename to...
	(is_a_helper <const gomp_task *>::test): ...this.
	(is_a_helper <const gimple_statement_phi *>::test): Rename to...
	(is_a_helper <const gphi *>::test): ...this.
	(is_a_helper <const gimple_statement_transaction *>::test): Rename to...
	(is_a_helper <const gtransaction *>::test): ...this.
	(gimple_build_return): Strengthen return type to greturn *.
	(gimple_call_reset_alias_info): Require a gcall *.
	(gimple_build_call_vec): Return a gcall *.
	(gimple_build_call): Likewise.
	(gimple_build_call_valist): Likewise.
	(gimple_build_call_internal): Likewise.
	(gimple_build_call_internal_vec): Likewise.
	(gimple_build_call_from_tree): Likewise.
	(gimple_build_assign_stat): Return a gassign *.
	(gimple_build_assign_with_ops): Likewise.
	(gimple_build_cond): Return a gcond *.
	(gimple_build_cond_from_tree): Likewise.
	(gimple_cond_set_condition_from_tree): Require a gcond *.
	(gimple_build_label): Return a glabel *.
	(gimple_build_goto): Return a ggoto *.
	(gimple_build_bind): Return a gbind *.
	(gimple_build_asm_vec): Return a gasm *.
	(gimple_build_catch): Return a gcatch *.
	(gimple_build_eh_filter): Return a geh_filter *.
	(gimple_build_eh_must_not_throw): Return a geh_mnt *.
	(gimple_build_eh_else): Return a geh_else *.
	(gimple_build_try): Return a gtry *.
	(gimple_build_resx): Return a gresx *.
	(gimple_build_switch_nlabels): Return a gswitch *.
	(gimple_build_switch): Return a gswitch *.
	(gimple_build_eh_dispatch): Return a geh_dispatch *.
	(gimple_build_debug_bind_stat): Return a gdebug *.
	(gimple_build_debug_source_bind_stat): Return a gdebug *.
	(gimple_build_omp_critical): Return a gomp_critical *.
	(gimple_build_omp_for): Return a gomp_for *.
	(gimple_build_omp_parallel): Return a gomp_parallel *.
	(gimple_build_omp_task): Return a gomp_task *.
	(gimple_build_omp_continue): Return a gomp_continue *.
	(gimple_build_omp_sections): Return a gomp_sections *.
	(gimple_build_omp_single): Return a gomp_single *.
	(gimple_build_omp_target): Return a gomp_target *.
	(gimple_build_omp_teams): Return a gomp_teams *.
	(gimple_build_omp_atomic_load): Return a gomp_atomic_load *.
	(gimple_build_omp_atomic_store): Return a gomp_atomic_store *.
	(gimple_build_transaction): Return a gtransaction *.
	(gimple_call_arg_flags): Require a const gcall *.
	(gimple_call_return_flags): Likewise.
	(gimple_call_copy_skip_args): Require and return a gcall *.
	(gimple_asm_clobbers_memory_p): Require a const gasm *.
	(gimple_seq_first_stmt_as_a_bind): New.
	(gimple_assign_nontemporal_move_p): Require a const gassign *
	rather than a const_gimple.
	(gimple_call_internal_fn): Update for renaming to gcall.
	(gimple_call_fntype): Likewise.
	(gimple_call_set_fntype): Require a gcall * rather than a gimple.
	(gimple_call_set_fn): Likewise.
	(gimple_call_set_internal_fn): Likewise.
	(gimple_call_set_chain): Likewise.
	(gimple_call_set_tail): Likewise.
	(gimple_call_tail_p): Likewise.
	(gimple_call_set_return_slot_opt): Likewise.
	(gimple_call_return_slot_opt_p): Likewise.
	(gimple_call_set_from_thunk): Likewise.
	(gimple_call_from_thunk_p): Likewise.
	(gimple_call_set_va_arg_pack): Likewise.
	(gimple_call_va_arg_pack_p): Likewise.
	(gimple_call_set_nothrow): Likewise.
	(gimple_call_nothrow_p): Likewise.
	(gimple_call_set_alloca_for_var): Likewise.
	(gimple_call_alloca_for_var_p): Likewise.
	(gimple_call_use_set): Likewise.
	(gimple_call_clobber_set): Likewise.
	(gimple_call_return_type): Require a const gcall * rather than a
	const_gimple.
	(gimple_call_chain_ptr): Likewise.
	(gimple_call_copy_flags): Require a pair of gcall *.
	(gimple_cond_set_code): Require a gcond * rather than a gimple
	(gimple_cond_set_lhs): Likewise.
	(gimple_cond_set_rhs): Likewise.
	(gimple_cond_set_true_label): Likewise.
	(gimple_cond_set_false_label): Likewise.
	(gimple_cond_make_false): Likewise.
	(gimple_cond_make_true): Likewise.
	(gimple_cond_lhs_ptr): Require a const gcond * rather than a
	const_gimple.
	(gimple_cond_rhs_ptr): Likewise.
	(gimple_cond_true_label): Likewise.
	(gimple_cond_false_label): Likewise.
	(gimple_cond_true_p): Likewise.
	(gimple_cond_false_p): Likewise.
	(gimple_cond_set_condition): Likewise.
	(gimple_label_label): Require a const glabel *.
	(gimple_label_set_label): Require a glabel *.
	(gimple_goto_set_dest): Require a ggoto *.
	(gimple_bind_vars): Require a const gbind *.
	(gimple_bind_block): Likewise.
	(gimple_bind_set_vars): Require a gbind *.
	(gimple_bind_append_vars): Likewise.
	(gimple_bind_body_ptr): Likewise.
	(gimple_bind_body): Likewise.
	(gimple_bind_set_body): Likewise.
	(gimple_bind_add_stmt): Likewise.
	(gimple_bind_add_seq): Likewise.
	(gimple_bind_set_block): Likewise.
	(gimple_asm_ninputs): Require a const gasm *.
	(gimple_asm_noutputs): Likewise.
	(gimple_asm_nclobbers): Likewise.
	(gimple_asm_nlabels): Likewise.
	(gimple_asm_input_op): Likewise.
	(gimple_asm_input_op_ptr): Likewise.
	(gimple_asm_output_op): Likewise.
	(gimple_asm_output_op_ptr): Likewise.
	(gimple_asm_clobber_op): Likewise.
	(gimple_asm_label_op): Likewise.
	(gimple_asm_string): Likewise.
	(gimple_asm_volatile_p): Likewise.
	(gimple_asm_input_p): Likewise.
	(gimple_asm_set_input_op): Require a gasm *.
	(gimple_asm_set_output_op): Likewise.
	(gimple_asm_set_clobber_op): Likewise.
	(gimple_asm_set_label_op): Likewise.
	(gimple_asm_set_volatile): Likewise.
	(gimple_asm_set_input): Likewise.
	(gimple_catch_types): Require a const gcatch *.
	(gimple_catch_types_ptr): Require a gcatch *.
	(gimple_catch_handler_ptr): Likewise.
	(gimple_catch_handler): Likewise.
	(gimple_catch_set_types): Likewise.
	(gimple_catch_set_handler): Likewise.
	(gimple_eh_filter_types): Update for renaming of subclass to
	geh_filter.
	(gimple_eh_filter_types_ptr): Likewise.
	(gimple_eh_filter_failure_ptr): Likewise.
	(gimple_eh_filter_set_types): Require a geh_filter *.
	(gimple_eh_filter_set_failure): Likewise.
	(gimple_eh_must_not_throw_fndecl): Require a geh_mnt *.
	(gimple_eh_must_not_throw_set_fndecl): Likewise.
	(gimple_eh_else_n_body_ptr): Require a geh_else *.
	(gimple_eh_else_n_body): Likewise.
	(gimple_eh_else_e_body_ptr): Likewise.
	(gimple_eh_else_e_body): Likewise.
	(gimple_eh_else_set_n_body): Likewise.
	(gimple_eh_else_set_e_body): Likewise.
	(gimple_try_set_kind): Require a gtry *.
	(gimple_try_set_catch_is_cleanup): Likewise.
	(gimple_try_set_eval): Likewise.
	(gimple_try_set_cleanup): Likewise.
	(gimple_try_eval_ptr): Update for renaming of subclass to gtry.
	(gimple_try_cleanup_ptr): Likewise.
	(gimple_phi_capacity): Update for renaming of subclass to gphi.
	(gimple_phi_num_args): Likewise.
	(gimple_phi_result): Likewise.
	(gimple_phi_result_ptr): Likewise.
	(gimple_phi_arg): Likewise.
	(gimple_phi_set_result): Require a gphi *.
	(gimple_phi_set_arg): Likewise.
	(gimple_phi_arg_def_ptr): Likewise.
	(gimple_phi_arg_edge): Likewise.
	(gimple_phi_arg_location): Likewise.
	(gimple_phi_arg_location_from_edge): Likewise.
	(gimple_phi_arg_set_location): Likewise.
	(gimple_phi_arg_has_location): Likewise.
	(gimple_resx_region): Require a const gresx *.
	(gimple_resx_set_region): Require a gresx *.
	(gimple_eh_dispatch_region): Require a const geh_dispatch *.
	(gimple_eh_dispatch_set_region): Require a geh_dispatch *.
	(gimple_switch_num_labels): Require a const gswitch *.
	(gimple_switch_set_num_labels): Likewise.
	(gimple_switch_index): Likewise.
	(gimple_switch_index_ptr): Likewise.
	(gimple_switch_label): Likewise.
	(gimple_switch_default_label): Likewise.
	(gimple_switch_set_index): Require a gswitch *.
	(gimple_switch_set_label): Likewise.
	(gimple_switch_set_default_label): Likewise.
	(gimple_omp_critical_name): Require a const gomp_critical *.
	(gimple_omp_critical_name_ptr): Require a gomp_critical *.
	(gimple_omp_critical_set_name): Likewise.
	(gimple_omp_for_set_kind): Require a gomp_for *.
	(gimple_omp_for_set_combined_p): Likewise.
	(gimple_omp_for_set_combined_into_p): Likewise.
	(gimple_omp_for_clauses): Update for renaming of subclass to
	gomp_for.
	(gimple_omp_for_clauses_ptr): Likewise.
	(gimple_omp_for_set_clauses): Likewise.
	(gimple_omp_for_collapse): Likewise.
	(gimple_omp_for_index): Likewise.
	(gimple_omp_for_index_ptr): Likewise.
	(gimple_omp_for_set_index): Likewise.
	(gimple_omp_for_initial): Likewise.
	(gimple_omp_for_initial_ptr): Likewise.
	(gimple_omp_for_set_initial): Likewise.
	(gimple_omp_for_final): Likewise.
	(gimple_omp_for_final_ptr): Likewise.
	(gimple_omp_for_set_final): Likewise.
	(gimple_omp_for_incr): Likewise.
	(gimple_omp_for_incr_ptr): Likewise.
	(gimple_omp_for_set_incr): Likewise.
	(gimple_omp_for_pre_body): Likewise.
	(gimple_omp_for_set_pre_body): Likewise.
	(gimple_omp_parallel_clauses): Update for renaming of subclass to
	gomp_parallel.
	(gimple_omp_parallel_clauses_ptr): Require a gomp_parallel *.
	(gimple_omp_parallel_set_clauses): Likewise.
	(gimple_omp_parallel_child_fn_ptr): Likewise.
	(gimple_omp_parallel_set_child_fn): Likewise.
	(gimple_omp_parallel_data_arg_ptr): Likewise.
	(gimple_omp_parallel_set_data_arg): Likewise.
	(gimple_omp_parallel_child_fn): Require a const gomp_parallel *.
	(gimple_omp_parallel_data_arg): Likewise.
	(gimple_omp_task_clauses): Update for renaming of subclass to
	gomp_task.
	(gimple_omp_task_clauses_ptr): Likewise.
	(gimple_omp_task_set_clauses): Likewise.
	(gimple_omp_task_child_fn): Likewise.
	(gimple_omp_task_child_fn_ptr): Likewise.
	(gimple_omp_task_set_child_fn): Likewise.
	(gimple_omp_task_data_arg): Likewise.
	(gimple_omp_task_data_arg_ptr): Likewise.
	(gimple_omp_task_set_data_arg): Likewise.
	(gimple_omp_taskreg_clauses): Whitespace fixes.
	(gimple_omp_taskreg_clauses_ptr): Likewise.
	(gimple_omp_taskreg_set_clauses): Likewise.
	(gimple_omp_taskreg_child_fn): Likewise.
	(gimple_omp_taskreg_child_fn_ptr): Likewise.
	(gimple_omp_taskreg_set_child_fn): Likewise.
	(gimple_omp_taskreg_data_arg): Likewise.
	(gimple_omp_taskreg_data_arg_ptr): Likewise.
	(gimple_omp_taskreg_set_data_arg): Likewise.
	(gimple_omp_task_copy_fn): Update for renaming of subclass to
	gomp_task.
	(gimple_omp_task_copy_fn_ptr): Likewise.
	(gimple_omp_task_set_copy_fn): Likewise.
	(gimple_omp_task_arg_size): Likewise.
	(gimple_omp_task_arg_size_ptr): Likewise.
	(gimple_omp_task_set_arg_size): Likewise.
	(gimple_omp_task_arg_align): Likewise.
	(gimple_omp_task_arg_align_ptr): Likewise.
	(gimple_omp_task_set_arg_align): Likewise.
	(gimple_omp_single_clauses): Update for renaming of subclass to
	gomp_single.
	(gimple_omp_single_clauses_ptr): Likewise.
	(gimple_omp_single_set_clauses): Likewise.
	(gimple_omp_target_clauses): Update for renaming of subclass to
	gomp_target.
	(gimple_omp_target_clauses_ptr): Likewise.
	(gimple_omp_target_set_clauses): Require a gomp_target *.
	(gimple_omp_target_set_kind): Likewise.
	(gimple_omp_target_child_fn_ptr): Likewise.
	(gimple_omp_target_set_child_fn): Likewise.
	(gimple_omp_target_data_arg_ptr): Likewise.
	(gimple_omp_target_set_data_arg): Likewise.
	(gimple_omp_target_child_fn): Require a const gomp_target *.
	(gimple_omp_target_data_arg): Likewise.
	(gimple_omp_teams_clauses): Update for renaming of subclass to
	gomp_teams.
	(gimple_omp_teams_clauses_ptr): Likewise.
	(gimple_omp_teams_set_clauses): Require a gomp_teams *.
	(gimple_omp_sections_clauses): Update for renaming of subclass to
	gomp_sections.
	(gimple_omp_sections_clauses_ptr): Likewise.
	(gimple_omp_sections_set_clauses): Likewise.
	(gimple_omp_sections_control): Likewise.
	(gimple_omp_sections_control_ptr): Likewise.
	(gimple_omp_sections_set_control): Likewise.
	(gimple_omp_for_set_cond): Likewise.
	(gimple_omp_for_cond): Likewise.
	(gimple_omp_atomic_store_set_val): Require a gomp_atomic_store *.
	(gimple_omp_atomic_store_val_ptr): Likewise.
	(gimple_omp_atomic_load_set_lhs): Likewise.
	(gimple_omp_atomic_store_val): Require a const gomp_atomic_store *.
	(gimple_omp_atomic_load_lhs): Likewise.
	(gimple_omp_atomic_load_rhs): Likewise.
	(gimple_omp_atomic_load_lhs_ptr): Require a gomp_atomic_load *.
	(gimple_omp_atomic_load_set_rhs): Likewise.
	(gimple_omp_atomic_load_rhs_ptr): Likewise.
	(gimple_omp_continue_control_def): Require a const gomp_continue *.
	(gimple_omp_continue_control_use): Likewise.
	(gimple_omp_continue_control_def_ptr): Require a gomp_continue *.
	(gimple_omp_continue_set_control_def): Likewise.
	(gimple_omp_continue_control_use_ptr): Likewise.
	(gimple_omp_continue_set_control_use): Likewise.
	(gimple_transaction_body_ptr): Require a gtransaction *.
	(gimple_transaction_body): Likewise.
	(gimple_transaction_label_ptr): Likewise.
	(gimple_transaction_label): Require a const gtransaction *.
	(gimple_transaction_subcode): Likewise.
	(gimple_transaction_set_body): Require a gtransaction *.
	(gimple_transaction_set_label): Likewise.
	(gimple_transaction_set_subcode): Likewise.
	(gimple_return_retval_ptr): Require a const greturn *.
	(gimple_return_retval): Likewise.
	(gimple_return_set_retval): Require a greturn *.
	(gimple_expr_type): Introduce local "call_stmt" and use in place of
	"stmt" for typesafety.

	* asan.c: Use gimple subclasses.
	* auto-profile.c: Likewise.
	* builtins.c: Likewise.
	* builtins.h: Likewise.
	* cfgexpand.c: Likewise.
	* cfgloop.c: Likewise.
	* cfgloopmanip.c: Likewise.
	* cgraph.c: Likewise.
	* cgraph.h: Likewise.
	* cgraphbuild.c: Likewise.
	* cgraphclones.c: Likewise.
	* cgraphunit.c: Likewise.
	* expr.h: Likewise.
	* gimple-builder.c: Likewise.
	* gimple-builder.h: Likewise.
	* gimple-fold.c: Likewise.
	* gimple-low.c: Likewise.
	* gimple-pretty-print.c: Likewise.
	* gimple-ssa-isolate-paths.c: Likewise.
	* gimple-ssa-strength-reduction.c: Likewise.
	* gimple-streamer-in.c: Likewise.
	* gimple-streamer-out.c: Likewise.
	* gimple-walk.c: Likewise.
	* gimplify-me.c: Likewise.
	* gimplify.c: Likewise.
	* gimplify.h: Likewise.
	* graphite-scop-detection.c: Likewise.
	* graphite-sese-to-poly.c: Likewise.
	* internal-fn.c: Likewise.
	* internal-fn.def:: Likewise.
	* internal-fn.h: Likewise.
	* ipa-icf-gimple.c: Likewise.
	* ipa-icf-gimple.h: Likewise.
	* ipa-icf.c: Likewise.
	* ipa-inline-analysis.c: Likewise.
	* ipa-prop.c: Likewise.
	* ipa-prop.h: Likewise.
	* ipa-pure-const.c: Likewise.
	* ipa-split.c: Likewise.
	* lto-streamer-in.c: Likewise.
	* lto-streamer-out.c: Likewise.
	* omp-low.c: Likewise.
	* predict.c: Likewise.
	* sanopt.c: Likewise.
	* sese.c: Likewise.
	* ssa-iterators.h: Likewise.
	* stmt.c: Likewise.
	* trans-mem.c: Likewise.
	* tree-call-cdce.c: Likewise.
	* tree-cfg.c: Likewise.
	* tree-cfg.h: Likewise.
	* tree-cfgcleanup.c: Likewise.
	* tree-chkp.c: Likewise.
	* tree-chkp.h: Likewise.
	* tree-complex.c: Likewise.
	* tree-data-ref.c: Likewise.
	* tree-dfa.c: Likewise.
	* tree-eh.c: Likewise.
	* tree-eh.h: Likewise.
	* tree-emutls.c: Likewise.
	* tree-if-conv.c: Likewise.
	* tree-inline.c: Likewise.
	* tree-inline.h: Likewise.
	* tree-into-ssa.c: Likewise.
	* tree-into-ssa.h: Likewise.
	* tree-loop-distribution.c: Likewise.
	* tree-nrv.c: Likewise.
	* tree-object-size.c: Likewise.
	* tree-outof-ssa.c: Likewise.
	* tree-parloops.c: Likewise.
	* tree-phinodes.c: Likewise.
	* tree-phinodes.h: Likewise.
	* tree-predcom.c: Likewise.
	* tree-profile.c: Likewise.
	* tree-scalar-evolution.c: Likewise.
	* tree-scalar-evolution.h
	* tree-sra.cn_function):
	* tree-ssa-alias.c: Likewise.
	* tree-ssa-alias.h: Likewise.
	* tree-ssa-ccp.c: Likewise.
	* tree-ssa-coalesce.c: Likewise.
	* tree-ssa-copy.c: Likewise.
	* tree-ssa-copyrename.c: Likewise.
	* tree-ssa-dce.c: Likewise.
	* tree-ssa-dom.c: Likewise.
	* tree-ssa-forwprop.c: Likewise.
	* tree-ssa-ifcombine.c: Likewise.
	* tree-ssa-live.c: Likewise.
	* tree-ssa-loop-im.c: Likewise.
	* tree-ssa-loop-ivcanon.c: Likewise.
	* tree-ssa-loop-ivopts.c: Likewise.
	* tree-ssa-loop-manip.c: Likewise.
	* tree-ssa-loop-niter.c: Likewise.
	* tree-ssa-loop-prefetch.c: Likewise.
	* tree-ssa-loop-unswitch.c: Likewise.
	* tree-ssa-math-opts.c: Likewise.
	* tree-ssa-operands.c: Likewise.
	* tree-ssa-phiopt.c: Likewise.
	* tree-ssa-phiprop.c: Likewise.
	* tree-ssa-pre.c: Likewise.
	* tree-ssa-propagate.c: Likewise.
	* tree-ssa-propagate.h: Likewise.
	* tree-ssa-reassoc.c: Likewise.
	* tree-ssa-sccvn.c: Likewise.
	* tree-ssa-sccvn.h: Likewise.
	* tree-ssa-sink.c: Likewise.
	* tree-ssa-strlen.c
	* tree-ssa-structalias.c
	* tree-ssa-tail-merge.c: Likewise.
	* tree-ssa-ter.c: Likewise.
	* tree-ssa-threadedge.c: Likewise.
	* tree-ssa-threadedge.h: Likewise.
	* tree-ssa-threadupdate.c: Likewise.
	* tree-ssa-uncprop.c: Likewise.
	* tree-ssa-uninit.c: Likewise.
	* tree-ssa.c: Likewise.
	* tree-stdarg.c: Likewise.
	* tree-switch-conversion.c: Likewise.
	* tree-tailcall.c: Likewise.
	* tree-vect-data-refs.c: Likewise.
	* tree-vect-generic.c: Likewise.
	* tree-vect-loop-manip.c: Likewise.
	* tree-vect-loop.c: Likewise.
	* tree-vect-patterns.c: Likewise.
	* tree-vect-slp.c: Likewise.
	* tree-vect-stmts.c: Likewise.
	* tree-vectorizer.h: Likewise.
	* tree-vrp.c: Likewise.
	* tree.c: Likewise.
	* ubsan.c: Likewise.
	* value-prof.c: Likewise.
	* value-prof.h: Likewise.
	* vtable-verify.c: Likewise.

gcc/c-family/ChangeLog:
	Merger of git branch "gimple-classes-v2-option-3".
	* ChangeLog.gimple-classes: New.
	* c-gimplify.c (add_block_to_enclosing): Strengthen local "stack"
	from being just a vec<gimple> to a vec<gbind *>.

gcc/java/ChangeLog:
	Merger of git branch "gimple-classes-v2-option-3".
	* ChangeLog.gimple-classes: New.
	* java-gimplify.c (java_gimplify_block): Strengthen local "outer"
	from gimple to gbind *.

From-SVN: r217787
This commit is contained in:
David Malcolm 2014-11-19 17:00:54 +00:00 committed by David Malcolm
parent da9c88d86d
commit 538dd0b78f
150 changed files with 9602 additions and 3518 deletions

View File

@ -1,3 +1,726 @@
2014-11-19 David Malcolm <dmalcolm@redhat.com>
Merger of git branch "gimple-classes-v2-option-3".
* ChangeLog.gimple-classes: New.
* coretypes.h (struct gcond): Add forward decl.
(struct gdebug): Likewise.
(struct ggoto): Likewise.
(struct glabel): Likewise.
(struct gswitch): Likewise.
(struct gassign): Likewise.
(struct gasm): Likewise.
(struct gcall): Likewise.
(struct gtransaction): Likewise.
(struct greturn): Likewise.
(struct gbind): Likewise.
(struct gcatch): Likewise.
(struct geh_filter): Likewise.
(struct geh_mnt): Likewise.
(struct geh_else): Likewise.
(struct gresx): Likewise.
(struct geh_dispatch): Likewise.
(struct gphi): Likewise.
(struct gtry): Likewise.
(struct gomp_atomic_load): Likewise.
(struct gomp_atomic_store): Likewise.
(struct gomp_continue): Likewise.
(struct gomp_critical): Likewise.
(struct gomp_for): Likewise.
(struct gomp_parallel): Likewise.
(struct gomp_task): Likewise.
(struct gomp_sections): Likewise.
(struct gomp_single): Likewise.
(struct gomp_target): Likewise.
(struct gomp_teams): Likewise.
* doc/gimple.texi (Class hierarchy of GIMPLE statements): Update
for renaming of gimple subclasses.
* gdbhooks.py: Update.
* gimple-iterator.c (gsi_for_phi): New.
(gsi_start_phis): Strengthen return type from gimple_stmt_iterator
to gphi_iterator.
* gimple-iterator.h (struct gphi_iterator): New subclass of
gimple_stmt_iterator.
(gsi_for_phi): New prototype.
(gsi_start_phis): Strengthen return type from gimple_stmt_iterator
to gphi_iterator.
(gsi_next_nonvirtual_phi): Strengthen param from
gimple_stmt_iterator * to gphi_iterator *, and local "phi" from
gimple to gphi *.
* gsstruct.def: Update for renamings of classes.
* gimple.c (gimple_build_return): Strengthen return type from
gimple to greturn *.
(gimple_call_reset_alias_info): Strengthen param to gcall *.
(gimple_build_call_1): Strengthen return type from gimple to
gcall *.
(gimple_build_call_vec): Likewise.
(gimple_build_call): Likewise.
(gimple_build_call_valist): Likewise.
(gimple_build_call_internal_1): Likewise.
(gimple_build_call_internal): Likewise.
(gimple_build_call_internal_vec): Likewise.
(gimple_build_call_from_tree): Likewise.
(gimple_build_assign_stat): Strengthen return type from gimple to
gassign *.
(gimple_build_assign_with_ops): Likewise.
(gimple_build_assign_with_ops): Likewise.
(gimple_build_cond): Strengthen return type from gimple to
gcond *.
(gimple_build_cond_from_tree): Likewise.
(gimple_cond_set_condition_from_tree): Require a gcond *.
(gimple_build_label): Strengthen return type from gimple to
glabel *.
(gimple_build_goto): Strengthen return type from gimple to
ggoto *.
(gimple_build_bind): Strengthen return type from gimple to
gbind *.
(gimple_build_asm_1): Strengthen return type from gimple to
gasm *.
(gimple_build_asm_vec): Likewise.
(gimple_build_catch): Strengthen return type from gimple to
gcatch *.
(gimple_build_eh_filter): Strengthen return type from gimple to
geh_filter *.
(gimple_build_eh_must_not_throw): Strengthen return type from
gimple to geh_mnt *.
(gimple_build_eh_else): Strengthen return type from gimple to
geh_else *.
(gimple_build_try): Update for renaming of gimple_statement_try to
gtry.
(gimple_build_resx): Strengthen return type from gimple to
gresx *.
(gimple_build_switch_nlabels): Strengthen return type from gimple
to gswitch *.
(gimple_build_switch): Likewise.
(gimple_build_eh_dispatch): Strengthen return type from gimple to
geh_dispatch *.
(gimple_build_debug_bind_stat): Strengthen return type from gimple
to gdebug *.
(gimple_build_debug_source_bind_stat): Strengthen return type from
gimple to gdebug *.
(gimple_build_omp_critical): Strengthen return type from gimple to
gomp_critical *.
(gimple_build_omp_for): Strengthen return type from gimple to
gomp_for *.
(gimple_build_omp_parallel): Strengthen return type from gimple to
gomp_parallel *.
(gimple_build_omp_task): Strengthen return type from gimple to
gomp_task *.
(gimple_build_omp_continue): Strengthen return type from gimple to
gomp_continue *.
(gimple_build_omp_sections): Strengthen return type from gimple to
gomp_sections *.
(gimple_build_omp_single): Strengthen return type from gimple to
gomp_single *.
(gimple_build_omp_target): Strengthen return type from gimple to
gomp_target *.
(gimple_build_omp_teams): Strengthen return type from gimple to
gomp_teams *.
(gimple_build_omp_atomic_load): Strengthen return type from gimple
to gomp_atomic_load *.
(gimple_build_omp_atomic_store): Strengthen return type from gimple
to gomp_atomic_store *.
(gimple_build_transaction): Strengthen return type from gimple
to gtransaction *.
(empty_stmt_p): Replace check for GIMPLE_BIND with a dyn_cast.
(gimple_call_fnspec): Require a const gcall *.
(gimple_call_arg_flags): Likewise.
(gimple_call_return_flags): Likewise.
(gimple_set_bb): Add a checked cast.
(gimple_copy): Within the cases, add locals of the appropriate
subclass and use in place of "stmt" and "copy" for typesafety.
(gimple_has_side_effects): Add a checked cast.
(gimple_could_trap_p_1): Likewise.
(gimple_call_copy_skip_args): Require a gcall *, and return one.
(gimple_asm_clobbers_memory_p): Require a const gasm *.
(infer_nonnull_range): Replace a check for GIMPLE_RETURN with a
dyn_cast, introducing local "return_stmt" and using ti in place
of "stmt".
* gimple.h (gimple_vec): Eliminate this typedef.
(struct gimple_statement_call): Rename to...
(struct gcall): ...this.
(struct gimple_statement_bind): Rename to...
(struct gbind): ...this.
(struct gimple_statement_catch): Rename to...
(struct gcatch): ...this.
(struct gimple_statement_eh_filter): Rename to...
(struct geh_filter): ...this.
(struct gimple_statement_eh_else): Rename to...
(struct geh_else): ...this.
(struct gimple_statement_eh_mnt): Rename to...
(struct geh_mnt): ...this.
(struct gimple_statement_phi): Rename to...
(struct gphi): ...this.
(struct gimple_statement_resx): Rename to...
(struct gresx): ...this.
(struct gimple_statement_eh_dispatch): Rename to...
(struct geh_dispatch): ...this.
(struct gimple_statement_try): Rename to...
(struct gtry): ...this.
(struct gimple_statement_asm): Rename to...
(struct gasm): ...this.
(struct gimple_statement_omp_critical): Rename to...
(struct gomp_critical): ...this.
(struct gimple_statement_omp_for): Rename to...
(struct gomp_for): ...this.
(struct gimple_statement_omp_parallel): Rename to...
(struct gomp_parallel): ...this.
(struct gimple_statement_omp_target): Rename to...
(struct gomp_target): ...this.
(struct gimple_statement_omp_task): Rename to...
(struct gomp_task): ...this.
(struct gimple_statement_omp_sections): Rename to...
(struct gomp_sections): ...this.
(struct gimple_statement_omp_continue): Rename to...
(struct gomp_continue): ...this.
(struct gimple_statement_omp_single): Rename to...
(struct gomp_single): ...this.
(struct gimple_statement_omp_teams): Rename to...
(struct gomp_teams): ...this.
(struct gimple_statement_omp_atomic_load): Rename to...
(struct gomp_atomic_load): ...this.
(struct gimple_statement_omp_atomic_store :): Rename to...
(struct gomp_atomic_store :): ...this.
(struct gimple_statement_transaction): Rename to...
(struct gtransaction): ...this.
(struct gcond): New subclass.
(struct gdebug): New subclass.
(struct ggoto): New subclass.
(struct glabel): New subclass.
(struct gswitch): New subclass.
(struct gassign): New subclass.
(struct greturn): New subclass.
(is_a_helper <gimple_statement_asm *>::test): Rename to...
(is_a_helper <gasm *>::test): ...this.
(is_a_helper <gimple_statement_bind *>::test): Rename to...
(is_a_helper <gbind *>::test): ...this.
(is_a_helper <gassign *>::test): New.
(is_a_helper <gimple_statement_call *>::test): Rename to...
(is_a_helper <gcall *>::test): ...this.
(is_a_helper <gimple_statement_catch *>::test): Rename to...
(is_a_helper <gcatch *>::test): ...this.
(is_a_helper <gimple_statement_resx *>::test): Rename to...
(is_a_helper <gresx *>::test): ...this.
(is_a_helper <gcond *>::test): New.
(is_a_helper <gdebug *>::test): New.
(is_a_helper <ggoto *>::test): New.
(is_a_helper <glabel *>::test): New.
(is_a_helper <gimple_statement_eh_dispatch *>::test): Rename to...
(is_a_helper <geh_dispatch *>::test): ...this.
(is_a_helper <gimple_statement_eh_else *>::test): Rename to...
(is_a_helper <geh_else *>::test): ...this.
(is_a_helper <gimple_statement_eh_filter *>::test): Rename to...
(is_a_helper <geh_filter *>::test): ...this.
(is_a_helper <gimple_statement_eh_mnt *>::test): Rename to...
(is_a_helper <geh_mnt *>::test): ...this.
(is_a_helper <gimple_statement_omp_atomic_load *>::test): Rename to...
(is_a_helper <gomp_atomic_load *>::test): ...this.
(is_a_helper <gimple_statement_omp_atomic_store *>::test): Rename to...
(is_a_helper <gomp_atomic_store *>::test): ...this.
(is_a_helper <gimple_statement_omp_continue *>::test): Rename to...
(is_a_helper <gomp_continue *>::test): ...this.
(is_a_helper <gimple_statement_omp_critical *>::test): Rename to...
(is_a_helper <gomp_critical *>::test): ...this.
(is_a_helper <gimple_statement_omp_for *>::test): Rename to...
(is_a_helper <gomp_for *>::test): ...this.
(is_a_helper <gimple_statement_omp_parallel *>::test): Rename to...
(is_a_helper <gomp_parallel *>::test): ...this.
(is_a_helper <gimple_statement_omp_target *>::test): Rename to...
(is_a_helper <gomp_target *>::test): ...this.
(is_a_helper <gimple_statement_omp_sections *>::test): Rename to...
(is_a_helper <gomp_sections *>::test): ...this.
(is_a_helper <gimple_statement_omp_single *>::test): Rename to...
(is_a_helper <gomp_single *>::test): ...this.
(is_a_helper <gimple_statement_omp_teams *>::test): Rename to...
(is_a_helper <gomp_teams *>::test): ...this.
(is_a_helper <gimple_statement_omp_task *>::test): Rename to...
(is_a_helper <gomp_task *>::test): ...this.
(is_a_helper <gimple_statement_phi *>::test): Rename to...
(is_a_helper <gphi *>::test): ...this.
(is_a_helper <gimple_statement_transaction *>::test): Rename to...
(is_a_helper <gtransaction *>::test): ...this.
(is_a_helper <greturn *>::test): New.
(is_a_helper <gswitch *>::test): New.
(is_a_helper <gimple_statement_try *>::test): Rename to...
(is_a_helper <gtry *>::test): ...this.
(is_a_helper <const gimple_statement_asm *>::test): Rename to...
(is_a_helper <const gasm *>::test): ...this.
(is_a_helper <const gimple_statement_bind *>::test): Rename to...
(is_a_helper <const gbind *>::test): ...this.
(is_a_helper <const gimple_statement_call *>::test): Rename to...
(is_a_helper <const gcall *>::test): ...this.
(is_a_helper <const gimple_statement_catch *>::test): Rename to...
(is_a_helper <const gcatch *>::test): ...this.
(is_a_helper <const gimple_statement_resx *>::test): Rename to...
(is_a_helper <const gresx *>::test): ...this.
(is_a_helper <const gimple_statement_eh_dispatch *>::test): Rename to...
(is_a_helper <const geh_dispatch *>::test): ...this.
(is_a_helper <const gimple_statement_eh_filter *>::test): Rename to...
(is_a_helper <const geh_filter *>::test): ...this.
(is_a_helper <const gimple_statement_omp_atomic_load *>::test):
Rename to...
(is_a_helper <const gomp_atomic_load *>::test): ...this.
(is_a_helper <const gimple_statement_omp_atomic_store *>::test):
Rename to...
(is_a_helper <const gomp_atomic_store *>::test): ...this.
(is_a_helper <const gimple_statement_omp_continue *>::test):
Rename to...
(is_a_helper <const gomp_continue *>::test): ...this.
(is_a_helper <const gimple_statement_omp_critical *>::test):
Rename to...
(is_a_helper <const gomp_critical *>::test): ...this.
(is_a_helper <const gimple_statement_omp_for *>::test): Rename to...
(is_a_helper <const gomp_for *>::test): ...this.
(is_a_helper <const gimple_statement_omp_parallel *>::test):
Rename to...
(is_a_helper <const gomp_parallel *>::test): ...this.
(is_a_helper <const gimple_statement_omp_target *>::test): Rename to...
(is_a_helper <const gomp_target *>::test): ...this.
(is_a_helper <const gimple_statement_omp_sections *>::test):
Rename to...
(is_a_helper <const gomp_sections *>::test): ...this.
(is_a_helper <const gimple_statement_omp_single *>::test): Rename to...
(is_a_helper <const gomp_single *>::test): ...this.
(is_a_helper <const gimple_statement_omp_teams *>::test): Rename to...
(is_a_helper <const gomp_teams *>::test): ...this.
(is_a_helper <const gimple_statement_omp_task *>::test): Rename to...
(is_a_helper <const gomp_task *>::test): ...this.
(is_a_helper <const gimple_statement_phi *>::test): Rename to...
(is_a_helper <const gphi *>::test): ...this.
(is_a_helper <const gimple_statement_transaction *>::test): Rename to...
(is_a_helper <const gtransaction *>::test): ...this.
(gimple_build_return): Strengthen return type to greturn *.
(gimple_call_reset_alias_info): Require a gcall *.
(gimple_build_call_vec): Return a gcall *.
(gimple_build_call): Likewise.
(gimple_build_call_valist): Likewise.
(gimple_build_call_internal): Likewise.
(gimple_build_call_internal_vec): Likewise.
(gimple_build_call_from_tree): Likewise.
(gimple_build_assign_stat): Return a gassign *.
(gimple_build_assign_with_ops): Likewise.
(gimple_build_cond): Return a gcond *.
(gimple_build_cond_from_tree): Likewise.
(gimple_cond_set_condition_from_tree): Require a gcond *.
(gimple_build_label): Return a glabel *.
(gimple_build_goto): Return a ggoto *.
(gimple_build_bind): Return a gbind *.
(gimple_build_asm_vec): Return a gasm *.
(gimple_build_catch): Return a gcatch *.
(gimple_build_eh_filter): Return a geh_filter *.
(gimple_build_eh_must_not_throw): Return a geh_mnt *.
(gimple_build_eh_else): Return a geh_else *.
(gimple_build_try): Return a gtry *.
(gimple_build_resx): Return a gresx *.
(gimple_build_switch_nlabels): Return a gswitch *.
(gimple_build_switch): Return a gswitch *.
(gimple_build_eh_dispatch): Return a geh_dispatch *.
(gimple_build_debug_bind_stat): Return a gdebug *.
(gimple_build_debug_source_bind_stat): Return a gdebug *.
(gimple_build_omp_critical): Return a gomp_critical *.
(gimple_build_omp_for): Return a gomp_for *.
(gimple_build_omp_parallel): Return a gomp_parallel *.
(gimple_build_omp_task): Return a gomp_task *.
(gimple_build_omp_continue): Return a gomp_continue *.
(gimple_build_omp_sections): Return a gomp_sections *.
(gimple_build_omp_single): Return a gomp_single *.
(gimple_build_omp_target): Return a gomp_target *.
(gimple_build_omp_teams): Return a gomp_teams *.
(gimple_build_omp_atomic_load): Return a gomp_atomic_load *.
(gimple_build_omp_atomic_store): Return a gomp_atomic_store *.
(gimple_build_transaction): Return a gtransaction *.
(gimple_call_arg_flags): Require a const gcall *.
(gimple_call_return_flags): Likewise.
(gimple_call_copy_skip_args): Require and return a gcall *.
(gimple_asm_clobbers_memory_p): Require a const gasm *.
(gimple_seq_first_stmt_as_a_bind): New.
(gimple_assign_nontemporal_move_p): Require a const gassign *
rather than a const_gimple.
(gimple_call_internal_fn): Update for renaming to gcall.
(gimple_call_fntype): Likewise.
(gimple_call_set_fntype): Require a gcall * rather than a gimple.
(gimple_call_set_fn): Likewise.
(gimple_call_set_internal_fn): Likewise.
(gimple_call_set_chain): Likewise.
(gimple_call_set_tail): Likewise.
(gimple_call_tail_p): Likewise.
(gimple_call_set_return_slot_opt): Likewise.
(gimple_call_return_slot_opt_p): Likewise.
(gimple_call_set_from_thunk): Likewise.
(gimple_call_from_thunk_p): Likewise.
(gimple_call_set_va_arg_pack): Likewise.
(gimple_call_va_arg_pack_p): Likewise.
(gimple_call_set_nothrow): Likewise.
(gimple_call_nothrow_p): Likewise.
(gimple_call_set_alloca_for_var): Likewise.
(gimple_call_alloca_for_var_p): Likewise.
(gimple_call_use_set): Likewise.
(gimple_call_clobber_set): Likewise.
(gimple_call_return_type): Require a const gcall * rather than a
const_gimple.
(gimple_call_chain_ptr): Likewise.
(gimple_call_copy_flags): Require a pair of gcall *.
(gimple_cond_set_code): Require a gcond * rather than a gimple
(gimple_cond_set_lhs): Likewise.
(gimple_cond_set_rhs): Likewise.
(gimple_cond_set_true_label): Likewise.
(gimple_cond_set_false_label): Likewise.
(gimple_cond_make_false): Likewise.
(gimple_cond_make_true): Likewise.
(gimple_cond_lhs_ptr): Require a const gcond * rather than a
const_gimple.
(gimple_cond_rhs_ptr): Likewise.
(gimple_cond_true_label): Likewise.
(gimple_cond_false_label): Likewise.
(gimple_cond_true_p): Likewise.
(gimple_cond_false_p): Likewise.
(gimple_cond_set_condition): Likewise.
(gimple_label_label): Require a const glabel *.
(gimple_label_set_label): Require a glabel *.
(gimple_goto_set_dest): Require a ggoto *.
(gimple_bind_vars): Require a const gbind *.
(gimple_bind_block): Likewise.
(gimple_bind_set_vars): Require a gbind *.
(gimple_bind_append_vars): Likewise.
(gimple_bind_body_ptr): Likewise.
(gimple_bind_body): Likewise.
(gimple_bind_set_body): Likewise.
(gimple_bind_add_stmt): Likewise.
(gimple_bind_add_seq): Likewise.
(gimple_bind_set_block): Likewise.
(gimple_asm_ninputs): Require a const gasm *.
(gimple_asm_noutputs): Likewise.
(gimple_asm_nclobbers): Likewise.
(gimple_asm_nlabels): Likewise.
(gimple_asm_input_op): Likewise.
(gimple_asm_input_op_ptr): Likewise.
(gimple_asm_output_op): Likewise.
(gimple_asm_output_op_ptr): Likewise.
(gimple_asm_clobber_op): Likewise.
(gimple_asm_label_op): Likewise.
(gimple_asm_string): Likewise.
(gimple_asm_volatile_p): Likewise.
(gimple_asm_input_p): Likewise.
(gimple_asm_set_input_op): Require a gasm *.
(gimple_asm_set_output_op): Likewise.
(gimple_asm_set_clobber_op): Likewise.
(gimple_asm_set_label_op): Likewise.
(gimple_asm_set_volatile): Likewise.
(gimple_asm_set_input): Likewise.
(gimple_catch_types): Require a const gcatch *.
(gimple_catch_types_ptr): Require a gcatch *.
(gimple_catch_handler_ptr): Likewise.
(gimple_catch_handler): Likewise.
(gimple_catch_set_types): Likewise.
(gimple_catch_set_handler): Likewise.
(gimple_eh_filter_types): Update for renaming of subclass to
geh_filter.
(gimple_eh_filter_types_ptr): Likewise.
(gimple_eh_filter_failure_ptr): Likewise.
(gimple_eh_filter_set_types): Require a geh_filter *.
(gimple_eh_filter_set_failure): Likewise.
(gimple_eh_must_not_throw_fndecl): Require a geh_mnt *.
(gimple_eh_must_not_throw_set_fndecl): Likewise.
(gimple_eh_else_n_body_ptr): Require a geh_else *.
(gimple_eh_else_n_body): Likewise.
(gimple_eh_else_e_body_ptr): Likewise.
(gimple_eh_else_e_body): Likewise.
(gimple_eh_else_set_n_body): Likewise.
(gimple_eh_else_set_e_body): Likewise.
(gimple_try_set_kind): Require a gtry *.
(gimple_try_set_catch_is_cleanup): Likewise.
(gimple_try_set_eval): Likewise.
(gimple_try_set_cleanup): Likewise.
(gimple_try_eval_ptr): Update for renaming of subclass to gtry.
(gimple_try_cleanup_ptr): Likewise.
(gimple_phi_capacity): Update for renaming of subclass to gphi.
(gimple_phi_num_args): Likewise.
(gimple_phi_result): Likewise.
(gimple_phi_result_ptr): Likewise.
(gimple_phi_arg): Likewise.
(gimple_phi_set_result): Require a gphi *.
(gimple_phi_set_arg): Likewise.
(gimple_phi_arg_def_ptr): Likewise.
(gimple_phi_arg_edge): Likewise.
(gimple_phi_arg_location): Likewise.
(gimple_phi_arg_location_from_edge): Likewise.
(gimple_phi_arg_set_location): Likewise.
(gimple_phi_arg_has_location): Likewise.
(gimple_resx_region): Require a const gresx *.
(gimple_resx_set_region): Require a gresx *.
(gimple_eh_dispatch_region): Require a const geh_dispatch *.
(gimple_eh_dispatch_set_region): Require a geh_dispatch *.
(gimple_switch_num_labels): Require a const gswitch *.
(gimple_switch_set_num_labels): Likewise.
(gimple_switch_index): Likewise.
(gimple_switch_index_ptr): Likewise.
(gimple_switch_label): Likewise.
(gimple_switch_default_label): Likewise.
(gimple_switch_set_index): Require a gswitch *.
(gimple_switch_set_label): Likewise.
(gimple_switch_set_default_label): Likewise.
(gimple_omp_critical_name): Require a const gomp_critical *.
(gimple_omp_critical_name_ptr): Require a gomp_critical *.
(gimple_omp_critical_set_name): Likewise.
(gimple_omp_for_set_kind): Require a gomp_for *.
(gimple_omp_for_set_combined_p): Likewise.
(gimple_omp_for_set_combined_into_p): Likewise.
(gimple_omp_for_clauses): Update for renaming of subclass to
gomp_for.
(gimple_omp_for_clauses_ptr): Likewise.
(gimple_omp_for_set_clauses): Likewise.
(gimple_omp_for_collapse): Likewise.
(gimple_omp_for_index): Likewise.
(gimple_omp_for_index_ptr): Likewise.
(gimple_omp_for_set_index): Likewise.
(gimple_omp_for_initial): Likewise.
(gimple_omp_for_initial_ptr): Likewise.
(gimple_omp_for_set_initial): Likewise.
(gimple_omp_for_final): Likewise.
(gimple_omp_for_final_ptr): Likewise.
(gimple_omp_for_set_final): Likewise.
(gimple_omp_for_incr): Likewise.
(gimple_omp_for_incr_ptr): Likewise.
(gimple_omp_for_set_incr): Likewise.
(gimple_omp_for_pre_body): Likewise.
(gimple_omp_for_set_pre_body): Likewise.
(gimple_omp_parallel_clauses): Update for renaming of subclass to
gomp_parallel.
(gimple_omp_parallel_clauses_ptr): Require a gomp_parallel *.
(gimple_omp_parallel_set_clauses): Likewise.
(gimple_omp_parallel_child_fn_ptr): Likewise.
(gimple_omp_parallel_set_child_fn): Likewise.
(gimple_omp_parallel_data_arg_ptr): Likewise.
(gimple_omp_parallel_set_data_arg): Likewise.
(gimple_omp_parallel_child_fn): Require a const gomp_parallel *.
(gimple_omp_parallel_data_arg): Likewise.
(gimple_omp_task_clauses): Update for renaming of subclass to
gomp_task.
(gimple_omp_task_clauses_ptr): Likewise.
(gimple_omp_task_set_clauses): Likewise.
(gimple_omp_task_child_fn): Likewise.
(gimple_omp_task_child_fn_ptr): Likewise.
(gimple_omp_task_set_child_fn): Likewise.
(gimple_omp_task_data_arg): Likewise.
(gimple_omp_task_data_arg_ptr): Likewise.
(gimple_omp_task_set_data_arg): Likewise.
(gimple_omp_taskreg_clauses): Whitespace fixes.
(gimple_omp_taskreg_clauses_ptr): Likewise.
(gimple_omp_taskreg_set_clauses): Likewise.
(gimple_omp_taskreg_child_fn): Likewise.
(gimple_omp_taskreg_child_fn_ptr): Likewise.
(gimple_omp_taskreg_set_child_fn): Likewise.
(gimple_omp_taskreg_data_arg): Likewise.
(gimple_omp_taskreg_data_arg_ptr): Likewise.
(gimple_omp_taskreg_set_data_arg): Likewise.
(gimple_omp_task_copy_fn): Update for renaming of subclass to
gomp_task.
(gimple_omp_task_copy_fn_ptr): Likewise.
(gimple_omp_task_set_copy_fn): Likewise.
(gimple_omp_task_arg_size): Likewise.
(gimple_omp_task_arg_size_ptr): Likewise.
(gimple_omp_task_set_arg_size): Likewise.
(gimple_omp_task_arg_align): Likewise.
(gimple_omp_task_arg_align_ptr): Likewise.
(gimple_omp_task_set_arg_align): Likewise.
(gimple_omp_single_clauses): Update for renaming of subclass to
gomp_single.
(gimple_omp_single_clauses_ptr): Likewise.
(gimple_omp_single_set_clauses): Likewise.
(gimple_omp_target_clauses): Update for renaming of subclass to
gomp_target.
(gimple_omp_target_clauses_ptr): Likewise.
(gimple_omp_target_set_clauses): Require a gomp_target *.
(gimple_omp_target_set_kind): Likewise.
(gimple_omp_target_child_fn_ptr): Likewise.
(gimple_omp_target_set_child_fn): Likewise.
(gimple_omp_target_data_arg_ptr): Likewise.
(gimple_omp_target_set_data_arg): Likewise.
(gimple_omp_target_child_fn): Require a const gomp_target *.
(gimple_omp_target_data_arg): Likewise.
(gimple_omp_teams_clauses): Update for renaming of subclass to
gomp_teams.
(gimple_omp_teams_clauses_ptr): Likewise.
(gimple_omp_teams_set_clauses): Require a gomp_teams *.
(gimple_omp_sections_clauses): Update for renaming of subclass to
gomp_sections.
(gimple_omp_sections_clauses_ptr): Likewise.
(gimple_omp_sections_set_clauses): Likewise.
(gimple_omp_sections_control): Likewise.
(gimple_omp_sections_control_ptr): Likewise.
(gimple_omp_sections_set_control): Likewise.
(gimple_omp_for_set_cond): Likewise.
(gimple_omp_for_cond): Likewise.
(gimple_omp_atomic_store_set_val): Require a gomp_atomic_store *.
(gimple_omp_atomic_store_val_ptr): Likewise.
(gimple_omp_atomic_load_set_lhs): Likewise.
(gimple_omp_atomic_store_val): Require a const gomp_atomic_store *.
(gimple_omp_atomic_load_lhs): Likewise.
(gimple_omp_atomic_load_rhs): Likewise.
(gimple_omp_atomic_load_lhs_ptr): Require a gomp_atomic_load *.
(gimple_omp_atomic_load_set_rhs): Likewise.
(gimple_omp_atomic_load_rhs_ptr): Likewise.
(gimple_omp_continue_control_def): Require a const gomp_continue *.
(gimple_omp_continue_control_use): Likewise.
(gimple_omp_continue_control_def_ptr): Require a gomp_continue *.
(gimple_omp_continue_set_control_def): Likewise.
(gimple_omp_continue_control_use_ptr): Likewise.
(gimple_omp_continue_set_control_use): Likewise.
(gimple_transaction_body_ptr): Require a gtransaction *.
(gimple_transaction_body): Likewise.
(gimple_transaction_label_ptr): Likewise.
(gimple_transaction_label): Require a const gtransaction *.
(gimple_transaction_subcode): Likewise.
(gimple_transaction_set_body): Require a gtransaction *.
(gimple_transaction_set_label): Likewise.
(gimple_transaction_set_subcode): Likewise.
(gimple_return_retval_ptr): Require a const greturn *.
(gimple_return_retval): Likewise.
(gimple_return_set_retval): Require a greturn *.
(gimple_expr_type): Introduce local "call_stmt" and use in place of
"stmt" for typesafety.
* asan.c: Use gimple subclasses.
* auto-profile.c: Likewise.
* builtins.c: Likewise.
* builtins.h: Likewise.
* cfgexpand.c: Likewise.
* cfgloop.c: Likewise.
* cfgloopmanip.c: Likewise.
* cgraph.c: Likewise.
* cgraph.h: Likewise.
* cgraphbuild.c: Likewise.
* cgraphclones.c: Likewise.
* cgraphunit.c: Likewise.
* expr.h: Likewise.
* gimple-builder.c: Likewise.
* gimple-builder.h: Likewise.
* gimple-fold.c: Likewise.
* gimple-low.c: Likewise.
* gimple-pretty-print.c: Likewise.
* gimple-ssa-isolate-paths.c: Likewise.
* gimple-ssa-strength-reduction.c: Likewise.
* gimple-streamer-in.c: Likewise.
* gimple-streamer-out.c: Likewise.
* gimple-walk.c: Likewise.
* gimplify-me.c: Likewise.
* gimplify.c: Likewise.
* gimplify.h: Likewise.
* graphite-scop-detection.c: Likewise.
* graphite-sese-to-poly.c: Likewise.
* internal-fn.c: Likewise.
* internal-fn.def:: Likewise.
* internal-fn.h: Likewise.
* ipa-icf-gimple.c: Likewise.
* ipa-icf-gimple.h: Likewise.
* ipa-icf.c: Likewise.
* ipa-inline-analysis.c: Likewise.
* ipa-prop.c: Likewise.
* ipa-prop.h: Likewise.
* ipa-pure-const.c: Likewise.
* ipa-split.c: Likewise.
* lto-streamer-in.c: Likewise.
* lto-streamer-out.c: Likewise.
* omp-low.c: Likewise.
* predict.c: Likewise.
* sanopt.c: Likewise.
* sese.c: Likewise.
* ssa-iterators.h: Likewise.
* stmt.c: Likewise.
* trans-mem.c: Likewise.
* tree-call-cdce.c: Likewise.
* tree-cfg.c: Likewise.
* tree-cfg.h: Likewise.
* tree-cfgcleanup.c: Likewise.
* tree-chkp.c: Likewise.
* tree-chkp.h: Likewise.
* tree-complex.c: Likewise.
* tree-data-ref.c: Likewise.
* tree-dfa.c: Likewise.
* tree-eh.c: Likewise.
* tree-eh.h: Likewise.
* tree-emutls.c: Likewise.
* tree-if-conv.c: Likewise.
* tree-inline.c: Likewise.
* tree-inline.h: Likewise.
* tree-into-ssa.c: Likewise.
* tree-into-ssa.h: Likewise.
* tree-loop-distribution.c: Likewise.
* tree-nrv.c: Likewise.
* tree-object-size.c: Likewise.
* tree-outof-ssa.c: Likewise.
* tree-parloops.c: Likewise.
* tree-phinodes.c: Likewise.
* tree-phinodes.h: Likewise.
* tree-predcom.c: Likewise.
* tree-profile.c: Likewise.
* tree-scalar-evolution.c: Likewise.
* tree-scalar-evolution.h
* tree-sra.cn_function):
* tree-ssa-alias.c: Likewise.
* tree-ssa-alias.h: Likewise.
* tree-ssa-ccp.c: Likewise.
* tree-ssa-coalesce.c: Likewise.
* tree-ssa-copy.c: Likewise.
* tree-ssa-copyrename.c: Likewise.
* tree-ssa-dce.c: Likewise.
* tree-ssa-dom.c: Likewise.
* tree-ssa-forwprop.c: Likewise.
* tree-ssa-ifcombine.c: Likewise.
* tree-ssa-live.c: Likewise.
* tree-ssa-loop-im.c: Likewise.
* tree-ssa-loop-ivcanon.c: Likewise.
* tree-ssa-loop-ivopts.c: Likewise.
* tree-ssa-loop-manip.c: Likewise.
* tree-ssa-loop-niter.c: Likewise.
* tree-ssa-loop-prefetch.c: Likewise.
* tree-ssa-loop-unswitch.c: Likewise.
* tree-ssa-math-opts.c: Likewise.
* tree-ssa-operands.c: Likewise.
* tree-ssa-phiopt.c: Likewise.
* tree-ssa-phiprop.c: Likewise.
* tree-ssa-pre.c: Likewise.
* tree-ssa-propagate.c: Likewise.
* tree-ssa-propagate.h: Likewise.
* tree-ssa-reassoc.c: Likewise.
* tree-ssa-sccvn.c: Likewise.
* tree-ssa-sccvn.h: Likewise.
* tree-ssa-sink.c: Likewise.
* tree-ssa-strlen.c
* tree-ssa-structalias.c
* tree-ssa-tail-merge.c: Likewise.
* tree-ssa-ter.c: Likewise.
* tree-ssa-threadedge.c: Likewise.
* tree-ssa-threadedge.h: Likewise.
* tree-ssa-threadupdate.c: Likewise.
* tree-ssa-uncprop.c: Likewise.
* tree-ssa-uninit.c: Likewise.
* tree-ssa.c: Likewise.
* tree-stdarg.c: Likewise.
* tree-switch-conversion.c: Likewise.
* tree-tailcall.c: Likewise.
* tree-vect-data-refs.c: Likewise.
* tree-vect-generic.c: Likewise.
* tree-vect-loop-manip.c: Likewise.
* tree-vect-loop.c: Likewise.
* tree-vect-patterns.c: Likewise.
* tree-vect-slp.c: Likewise.
* tree-vect-stmts.c: Likewise.
* tree-vectorizer.h: Likewise.
* tree-vrp.c: Likewise.
* tree.c: Likewise.
* ubsan.c: Likewise.
* value-prof.c: Likewise.
* value-prof.h: Likewise.
* vtable-verify.c: Likewise.
2014-11-19 Markus Trippelsdorf <markus@trippelsdorf.de>
* config/rs6000/constraints.md: Avoid signed integer overflows.

4735
gcc/ChangeLog.gimple-classes Normal file

File diff suppressed because it is too large Load Diff

View File

@ -493,7 +493,7 @@ has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
otherwise. */
static bool
get_mem_ref_of_assignment (const gimple assignment,
get_mem_ref_of_assignment (const gassign *assignment,
asan_mem_ref *ref,
bool *ref_is_store)
{
@ -521,7 +521,7 @@ get_mem_ref_of_assignment (const gimple assignment,
representing a builtin call that has to do with memory access. */
static bool
get_mem_refs_of_builtin_call (const gimple call,
get_mem_refs_of_builtin_call (const gcall *call,
asan_mem_ref *src0,
tree *src0_len,
bool *src0_is_store,
@ -864,7 +864,8 @@ has_stmt_been_instrumented_p (gimple stmt)
asan_mem_ref r;
asan_mem_ref_init (&r, NULL, 1);
if (get_mem_ref_of_assignment (stmt, &r, &r_is_store))
if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
&r_is_store))
return has_mem_ref_been_instrumented (&r);
}
else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
@ -877,7 +878,7 @@ has_stmt_been_instrumented_p (gimple stmt)
tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
bool src0_is_store = false, src1_is_store = false,
dest_is_store = false, dest_is_deref = false, intercepted_p = true;
if (get_mem_refs_of_builtin_call (stmt,
if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
&src0, &src0_len, &src0_is_store,
&src1, &src1_len, &src1_is_store,
&dest, &dest_len, &dest_is_store,
@ -1556,7 +1557,7 @@ create_cond_insert_point (gimple_stmt_iterator *iter,
pointing to initially. */
static void
insert_if_then_before_iter (gimple cond,
insert_if_then_before_iter (gcond *cond,
gimple_stmt_iterator *iter,
bool then_more_likely_p,
basic_block *then_bb,
@ -1901,7 +1902,7 @@ instrument_builtin_call (gimple_stmt_iterator *iter)
return false;
bool iter_advanced_p = false;
gimple call = gsi_stmt (*iter);
gcall *call = as_a <gcall *> (gsi_stmt (*iter));
gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
@ -2586,8 +2587,9 @@ asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
gimple_set_location (g, loc);
basic_block then_bb, fallthrough_bb;
insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
&then_bb, &fallthrough_bb);
insert_if_then_before_iter (as_a <gcond *> (g), iter,
/*then_more_likely_p=*/true,
&then_bb, &fallthrough_bb);
/* Note that fallthrough_bb starts with the statement that was
pointed to by ITER. */

View File

@ -243,7 +243,7 @@ public:
/* Read the inlined indirect call target profile for STMT and store it in
MAP, return the total count for all inlined indirect calls. */
gcov_type find_icall_target_map (gimple stmt, icall_target_map *map) const;
gcov_type find_icall_target_map (gcall *stmt, icall_target_map *map) const;
/* Sum of counts that is used during annotation. */
gcov_type total_annotated_count () const;
@ -311,7 +311,7 @@ public:
/* Update value profile INFO for STMT from the inlined indirect callsite.
Return true if INFO is updated. */
bool update_inlined_ind_target (gimple stmt, count_info *info);
bool update_inlined_ind_target (gcall *stmt, count_info *info);
/* Mark LOC as annotated. */
void mark_annotated (location_t loc);
@ -600,7 +600,7 @@ function_instance::mark_annotated (location_t loc)
MAP, return the total count for all inlined indirect calls. */
gcov_type
function_instance::find_icall_target_map (gimple stmt,
function_instance::find_icall_target_map (gcall *stmt,
icall_target_map *map) const
{
gcov_type ret = 0;
@ -769,7 +769,7 @@ autofdo_source_profile::mark_annotated (location_t loc)
Return true if INFO is updated. */
bool
autofdo_source_profile::update_inlined_ind_target (gimple stmt,
autofdo_source_profile::update_inlined_ind_target (gcall *stmt,
count_info *info)
{
if (LOCATION_LOCUS (gimple_location (stmt)) == cfun->function_end_locus)
@ -963,11 +963,13 @@ static void
afdo_indirect_call (gimple_stmt_iterator *gsi, const icall_target_map &map,
bool transform)
{
gimple stmt = gsi_stmt (*gsi);
gimple gs = gsi_stmt (*gsi);
tree callee;
if (map.size () == 0 || gimple_code (stmt) != GIMPLE_CALL
|| gimple_call_fndecl (stmt) != NULL_TREE)
if (map.size () == 0)
return;
gcall *stmt = dyn_cast <gcall *> (gs);
if ((!stmt) || gimple_call_fndecl (stmt) != NULL_TREE)
return;
callee = gimple_call_fn (stmt);
@ -1085,9 +1087,11 @@ afdo_set_bb_count (basic_block bb, const stmt_set &promoted)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
afdo_source_profile->mark_annotated (gimple_location (gsi_stmt (gsi)));
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gpi = gsi_start_phis (bb);
!gsi_end_p (gpi);
gsi_next (&gpi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gpi.phi ();
size_t i;
for (i = 0; i < gimple_phi_num_args (phi); i++)
afdo_source_profile->mark_annotated (gimple_phi_arg_location (phi, i));
@ -1245,7 +1249,7 @@ afdo_propagate_circuit (const bb_set &annotated_bb, edge_set *annotated_edge)
basic_block bb;
FOR_ALL_BB_FN (bb, cfun)
{
gimple phi_stmt;
gimple def_stmt;
tree cmp_rhs, cmp_lhs;
gimple cmp_stmt = last_stmt (bb);
edge e;
@ -1262,12 +1266,15 @@ afdo_propagate_circuit (const bb_set &annotated_bb, edge_set *annotated_edge)
continue;
if (!is_bb_annotated (bb, annotated_bb))
continue;
phi_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
while (phi_stmt && gimple_code (phi_stmt) == GIMPLE_ASSIGN
&& gimple_assign_single_p (phi_stmt)
&& TREE_CODE (gimple_assign_rhs1 (phi_stmt)) == SSA_NAME)
phi_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (phi_stmt));
if (!phi_stmt || gimple_code (phi_stmt) != GIMPLE_PHI)
def_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
while (def_stmt && gimple_code (def_stmt) == GIMPLE_ASSIGN
&& gimple_assign_single_p (def_stmt)
&& TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def_stmt));
if (!def_stmt)
continue;
gphi *phi_stmt = dyn_cast <gphi *> (def_stmt);
if (!phi_stmt)
continue;
FOR_EACH_EDGE (e, ei, bb->succs)
{
@ -1426,11 +1433,11 @@ afdo_vpt_for_early_inline (stmt_set *promoted_stmts)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
gcall *stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
/* IC_promotion and early_inline_2 is done in multiple iterations.
No need to promoted the stmt if its in promoted_stmts (means
it is already been promoted in the previous iterations). */
if (gimple_code (stmt) != GIMPLE_CALL || gimple_call_fn (stmt) == NULL
if ((!stmt) || gimple_call_fn (stmt) == NULL
|| TREE_CODE (gimple_call_fn (stmt)) == FUNCTION_DECL
|| promoted_stmts->find (stmt) != promoted_stmts->end ())
continue;

View File

@ -10953,7 +10953,7 @@ validate_arg (const_tree arg, enum tree_code code)
validate_arglist will then be removed. */
bool
validate_gimple_arglist (const_gimple call, ...)
validate_gimple_arglist (const gcall *call, ...)
{
enum tree_code code;
bool res = 0;
@ -12811,7 +12811,7 @@ do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
call node earlier than the warning is generated. */
tree
fold_call_stmt (gimple stmt, bool ignore)
fold_call_stmt (gcall *stmt, bool ignore)
{
tree ret = NULL_TREE;
tree fndecl = gimple_call_fndecl (stmt);

View File

@ -77,11 +77,11 @@ extern bool avoid_folding_inline_builtin (tree);
extern tree fold_call_expr (location_t, tree, bool);
extern tree fold_builtin_call_array (location_t, tree, tree, int, tree *);
extern tree fold_builtin_n (location_t, tree, tree *, int, bool);
extern bool validate_gimple_arglist (const_gimple, ...);
extern bool validate_gimple_arglist (const gcall *, ...);
extern rtx default_expand_builtin (tree, rtx, rtx, machine_mode, int);
extern bool fold_builtin_next_arg (tree, bool);
extern tree do_mpc_arg2 (tree, tree, tree, int, int (*)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t));
extern tree fold_call_stmt (gimple, bool);
extern tree fold_call_stmt (gcall *, bool);
extern void set_builtin_user_assembler_name (tree decl, const char *asmspec);
extern bool is_simple_builtin (tree);
extern bool is_inexpensive_builtin (tree);

View File

@ -1,3 +1,10 @@
2014-11-19 David Malcolm <dmalcolm@redhat.com>
Merger of git branch "gimple-classes-v2-option-3".
* ChangeLog.gimple-classes: New.
* c-gimplify.c (add_block_to_enclosing): Strengthen local "stack"
from being just a vec<gimple> to a vec<gbind *>.
2014-11-18 Jakub Jelinek <jakub@redhat.com>
PR sanitizer/63813

View File

@ -0,0 +1,20 @@
2014-10-27 David Malcolm <dmalcolm@redhat.com>
Patch autogenerated by rename_gimple_subclasses.py from
https://github.com/davidmalcolm/gcc-refactoring-scripts
revision 7d754b63ff2bf47226a67b2c0af5d74b54d4709f
* c-gimplify.c (add_block_to_enclosing): Rename gimple subclass types.
2014-10-24 David Malcolm <dmalcolm@redhat.com>
Introduce gimple_bind and use it for accessors.
* c-gimplify.c (add_block_to_enclosing): Strengthen local "stack"
from being just a vec<gimple> to a vec<gimple_bind>.
Copyright (C) 2014 Free Software Foundation, Inc.
Copying and distribution of this file, with or without modification,
are permitted in any medium without royalty provided the copyright
notice and this notice are preserved.

View File

@ -175,8 +175,8 @@ add_block_to_enclosing (tree block)
{
unsigned i;
tree enclosing;
gimple bind;
vec<gimple> stack = gimple_bind_expr_stack ();
gbind *bind;
vec<gbind *> stack = gimple_bind_expr_stack ();
FOR_EACH_VEC_ELT (stack, i, bind)
if (gimple_bind_block (bind))

View File

@ -2001,7 +2001,6 @@ label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
{
gimple_stmt_iterator gsi;
tree lab;
gimple lab_stmt;
if (bb->flags & BB_RTL)
return block_label (bb);
@ -2014,8 +2013,10 @@ label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
lab_stmt = gsi_stmt (gsi);
if (gimple_code (lab_stmt) != GIMPLE_LABEL)
glabel *lab_stmt;
lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
if (!lab_stmt)
break;
lab = gimple_label_label (lab_stmt);
@ -2088,7 +2089,7 @@ maybe_cleanup_end_of_block (edge e, rtx_insn *last)
block and created a new one. */
static basic_block
expand_gimple_cond (basic_block bb, gimple stmt)
expand_gimple_cond (basic_block bb, gcond *stmt)
{
basic_block new_bb, dest;
edge new_edge;
@ -2260,7 +2261,7 @@ mark_transaction_restart_calls (gimple stmt)
statement STMT. */
static void
expand_call_stmt (gimple stmt)
expand_call_stmt (gcall *stmt)
{
tree exp, decl, lhs;
bool builtin_p;
@ -2947,7 +2948,7 @@ expand_asm_operands (tree string, tree outputs, tree inputs,
static void
expand_asm_stmt (gimple stmt)
expand_asm_stmt (gasm *stmt)
{
int noutputs;
tree outputs, tail, t;
@ -3274,23 +3275,23 @@ expand_gimple_stmt_1 (gimple stmt)
expand_computed_goto (op0);
break;
case GIMPLE_LABEL:
expand_label (gimple_label_label (stmt));
expand_label (gimple_label_label (as_a <glabel *> (stmt)));
break;
case GIMPLE_NOP:
case GIMPLE_PREDICT:
break;
case GIMPLE_SWITCH:
expand_case (stmt);
expand_case (as_a <gswitch *> (stmt));
break;
case GIMPLE_ASM:
expand_asm_stmt (stmt);
expand_asm_stmt (as_a <gasm *> (stmt));
break;
case GIMPLE_CALL:
expand_call_stmt (stmt);
expand_call_stmt (as_a <gcall *> (stmt));
break;
case GIMPLE_RETURN:
op0 = gimple_return_retval (stmt);
op0 = gimple_return_retval (as_a <greturn *> (stmt));
if (op0 && op0 != error_mark_node)
{
@ -3321,7 +3322,8 @@ expand_gimple_stmt_1 (gimple stmt)
case GIMPLE_ASSIGN:
{
tree lhs = gimple_assign_lhs (stmt);
gassign *assign_stmt = as_a <gassign *> (stmt);
tree lhs = gimple_assign_lhs (assign_stmt);
/* Tree expand used to fiddle with |= and &= of two bitfield
COMPONENT_REFs here. This can't happen with gimple, the LHS
@ -3331,7 +3333,7 @@ expand_gimple_stmt_1 (gimple stmt)
|| get_gimple_rhs_class (gimple_expr_code (stmt))
== GIMPLE_SINGLE_RHS)
{
tree rhs = gimple_assign_rhs1 (stmt);
tree rhs = gimple_assign_rhs1 (assign_stmt);
gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
== GIMPLE_SINGLE_RHS);
if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
@ -3342,12 +3344,13 @@ expand_gimple_stmt_1 (gimple stmt)
;
else
expand_assignment (lhs, rhs,
gimple_assign_nontemporal_move_p (stmt));
gimple_assign_nontemporal_move_p (
assign_stmt));
}
else
{
rtx target, temp;
bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
struct separate_ops ops;
bool promoted = false;
@ -3355,18 +3358,18 @@ expand_gimple_stmt_1 (gimple stmt)
if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
promoted = true;
ops.code = gimple_assign_rhs_code (stmt);
ops.code = gimple_assign_rhs_code (assign_stmt);
ops.type = TREE_TYPE (lhs);
switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
{
case GIMPLE_TERNARY_RHS:
ops.op2 = gimple_assign_rhs3 (stmt);
ops.op2 = gimple_assign_rhs3 (assign_stmt);
/* Fallthru */
case GIMPLE_BINARY_RHS:
ops.op1 = gimple_assign_rhs2 (stmt);
ops.op1 = gimple_assign_rhs2 (assign_stmt);
/* Fallthru */
case GIMPLE_UNARY_RHS:
ops.op0 = gimple_assign_rhs1 (stmt);
ops.op0 = gimple_assign_rhs1 (assign_stmt);
break;
default:
gcc_unreachable ();
@ -3477,7 +3480,7 @@ expand_gimple_stmt (gimple stmt)
tailcall) and the normal result happens via a sqrt instruction. */
static basic_block
expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
{
rtx_insn *last2, *last;
edge e;
@ -4982,7 +4985,7 @@ expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
if (!gsi_end_p (gsi)
&& gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
{
gimple ret_stmt = gsi_stmt (gsi);
greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
gcc_assert (single_succ_p (bb));
gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
@ -5139,7 +5142,7 @@ expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
fixup the CFG accordingly. */
if (gimple_code (stmt) == GIMPLE_COND)
{
new_bb = expand_gimple_cond (bb, stmt);
new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
if (new_bb)
return new_bb;
}
@ -5239,15 +5242,16 @@ expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
}
else
{
if (is_gimple_call (stmt)
&& gimple_call_tail_p (stmt)
gcall *call_stmt = dyn_cast <gcall *> (stmt);
if (call_stmt
&& gimple_call_tail_p (call_stmt)
&& disable_tail_calls)
gimple_call_set_tail (stmt, false);
gimple_call_set_tail (call_stmt, false);
if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
if (call_stmt && gimple_call_tail_p (call_stmt))
{
bool can_fallthru;
new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
if (new_bb)
{
if (can_fallthru)

View File

@ -587,8 +587,8 @@ find_subloop_latch_edge_by_ivs (struct loop *loop ATTRIBUTE_UNUSED, vec<edge> la
{
edge e, latch = latches[0];
unsigned i;
gimple phi;
gimple_stmt_iterator psi;
gphi *phi;
gphi_iterator psi;
tree lop;
basic_block bb;
@ -606,7 +606,7 @@ find_subloop_latch_edge_by_ivs (struct loop *loop ATTRIBUTE_UNUSED, vec<edge> la
a subloop. */
for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
{
phi = gsi_stmt (psi);
phi = psi.phi ();
lop = PHI_ARG_DEF_FROM_EDGE (phi, latch);
/* Ignore the values that are not changed inside the subloop. */

View File

@ -701,7 +701,7 @@ create_empty_if_region_on_edge (edge entry_edge, tree condition)
basic_block cond_bb, true_bb, false_bb, join_bb;
edge e_true, e_false, exit_edge;
gimple cond_stmt;
gcond *cond_stmt;
tree simple_cond;
gimple_stmt_iterator gsi;
@ -787,7 +787,7 @@ create_empty_loop_on_edge (edge entry_edge,
struct loop *loop;
gimple_stmt_iterator gsi;
gimple_seq stmts;
gimple cond_expr;
gcond *cond_expr;
tree exit_test;
edge exit_e;
int prob;

View File

@ -743,7 +743,7 @@ cgraph_node::get_edge (gimple call_stmt)
edge, then update all components. */
void
cgraph_edge::set_call_stmt (gimple new_stmt, bool update_speculative)
cgraph_edge::set_call_stmt (gcall *new_stmt, bool update_speculative)
{
tree decl;
@ -796,8 +796,8 @@ cgraph_edge::set_call_stmt (gimple new_stmt, bool update_speculative)
cgraph_edge *
symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
gimple call_stmt, gcov_type count, int freq,
bool indir_unknown_callee)
gcall *call_stmt, gcov_type count, int freq,
bool indir_unknown_callee)
{
cgraph_edge *edge;
@ -878,7 +878,7 @@ symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
cgraph_edge *
cgraph_node::create_edge (cgraph_node *callee,
gimple call_stmt, gcov_type count, int freq)
gcall *call_stmt, gcov_type count, int freq)
{
cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
freq, false);
@ -914,7 +914,7 @@ cgraph_allocate_init_indirect_info (void)
PARAM_INDEX. */
cgraph_edge *
cgraph_node::create_indirect_edge (gimple call_stmt, int ecf_flags,
cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
gcov_type count, int freq,
bool compute_indirect_info)
{
@ -1291,7 +1291,7 @@ cgraph_edge::redirect_call_stmt_to_callee (void)
tree decl = gimple_call_fndecl (e->call_stmt);
tree lhs = gimple_call_lhs (e->call_stmt);
gimple new_stmt;
gcall *new_stmt;
gimple_stmt_iterator gsi;
#ifdef ENABLE_CHECKING
cgraph_node *node;
@ -1300,7 +1300,7 @@ cgraph_edge::redirect_call_stmt_to_callee (void)
if (e->speculative)
{
cgraph_edge *e2;
gimple new_stmt;
gcall *new_stmt;
ipa_ref *ref;
e->speculative_call_info (e, e2, ref);
@ -1366,8 +1366,8 @@ cgraph_edge::redirect_call_stmt_to_callee (void)
{
tree dresult = gimple_call_lhs (new_stmt);
tree iresult = gimple_call_lhs (e2->call_stmt);
gimple dbndret = chkp_retbnd_call_by_val (dresult);
gimple ibndret = chkp_retbnd_call_by_val (iresult);
gcall *dbndret = chkp_retbnd_call_by_val (dresult);
gcall *ibndret = chkp_retbnd_call_by_val (iresult);
struct cgraph_edge *iedge
= e2->caller->cgraph_node::get_edge (ibndret);
struct cgraph_edge *dedge;
@ -1534,7 +1534,7 @@ cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
if (callee->decl == new_call
|| callee->former_clone_of == new_call)
{
e->set_call_stmt (new_stmt);
e->set_call_stmt (as_a <gcall *> (new_stmt));
return;
}
callee = callee->clone_of;
@ -1563,13 +1563,14 @@ cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
if (new_call)
{
ne = node->create_edge (cgraph_node::get_create (new_call),
new_stmt, count, frequency);
as_a <gcall *> (new_stmt), count,
frequency);
gcc_assert (ne->inline_failed);
}
}
/* We only updated the call stmt; update pointer in cgraph edge.. */
else if (old_stmt != new_stmt)
node->get_edge (old_stmt)->set_call_stmt (new_stmt);
node->get_edge (old_stmt)->set_call_stmt (as_a <gcall *> (new_stmt));
}
/* Update or remove the corresponding cgraph edge if a GIMPLE_CALL

View File

@ -785,7 +785,7 @@ public:
When WHOLE_SPECULATIVE_EDGES is true, all three components of
speculative edge gets updated. Otherwise we update only direct
call. */
void set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
void set_call_stmt_including_clones (gimple old_stmt, gcall *new_stmt,
bool update_speculative = true);
/* Walk the alias chain to return the function cgraph_node is alias of.
@ -966,13 +966,13 @@ public:
/* Create edge from a given function to CALLEE in the cgraph. */
cgraph_edge *create_edge (cgraph_node *callee,
gimple call_stmt, gcov_type count,
gcall *call_stmt, gcov_type count,
int freq);
/* Create an indirect edge with a yet-undetermined callee where the call
statement destination is a formal parameter of the caller with index
PARAM_INDEX. */
cgraph_edge *create_indirect_edge (gimple call_stmt, int ecf_flags,
cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags,
gcov_type count, int freq,
bool compute_indirect_info = true);
@ -980,7 +980,7 @@ public:
same function body. If clones already have edge for OLD_STMT; only
update the edge same way as cgraph_set_call_stmt_including_clones does. */
void create_edge_including_clones (cgraph_node *callee,
gimple old_stmt, gimple stmt,
gimple old_stmt, gcall *stmt,
gcov_type count,
int freq,
cgraph_inline_failed_t reason);
@ -1462,7 +1462,7 @@ struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
/* Change field call_stmt of edge to NEW_STMT.
If UPDATE_SPECULATIVE and E is any component of speculative
edge, then update all components. */
void set_call_stmt (gimple new_stmt, bool update_speculative = true);
void set_call_stmt (gcall *new_stmt, bool update_speculative = true);
/* Redirect callee of the edge to N. The function does not update underlying
call expression. */
@ -1495,7 +1495,7 @@ struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
/* Create clone of edge in the node N represented
by CALL_EXPR the callgraph. */
cgraph_edge * clone (cgraph_node *n, gimple call_stmt, unsigned stmt_uid,
cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
gcov_type count_scale, int freq_scale, bool update_original);
/* Return true when call of edge can not lead to return from caller
@ -1525,7 +1525,7 @@ struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
cgraph_edge *next_caller;
cgraph_edge *prev_callee;
cgraph_edge *next_callee;
gimple call_stmt;
gcall *call_stmt;
/* Additional information about an indirect call. Not cleared when an edge
becomes direct. */
cgraph_indirect_call_info *indirect_info;
@ -2052,7 +2052,7 @@ private:
parameters of which only CALLEE can be NULL (when creating an indirect call
edge). */
cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
gimple call_stmt, gcov_type count, int freq,
gcall *call_stmt, gcov_type count, int freq,
bool indir_unknown_callee);
/* Put the edge onto the free list. */

View File

@ -353,25 +353,24 @@ pass_build_cgraph_edges::execute (function *fun)
if (is_gimple_debug (stmt))
continue;
if (is_gimple_call (stmt))
if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
int freq = compute_call_stmt_bb_frequency (current_function_decl,
bb);
decl = gimple_call_fndecl (stmt);
decl = gimple_call_fndecl (call_stmt);
if (decl)
node->create_edge (cgraph_node::get_create (decl), stmt, bb->count, freq);
else if (gimple_call_internal_p (stmt))
node->create_edge (cgraph_node::get_create (decl), call_stmt, bb->count, freq);
else if (gimple_call_internal_p (call_stmt))
;
else
node->create_indirect_edge (stmt,
gimple_call_flags (stmt),
node->create_indirect_edge (call_stmt,
gimple_call_flags (call_stmt),
bb->count, freq);
}
node->record_stmt_references (stmt);
if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
&& gimple_omp_parallel_child_fn (stmt))
if (gomp_parallel *omp_par_stmt = dyn_cast <gomp_parallel *> (stmt))
{
tree fn = gimple_omp_parallel_child_fn (stmt);
tree fn = gimple_omp_parallel_child_fn (omp_par_stmt);
node->create_reference (cgraph_node::get_create (fn),
IPA_REF_ADDR, stmt);
}
@ -449,19 +448,19 @@ cgraph_edge::rebuild_edges (void)
gimple stmt = gsi_stmt (gsi);
tree decl;
if (is_gimple_call (stmt))
if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
int freq = compute_call_stmt_bb_frequency (current_function_decl,
bb);
decl = gimple_call_fndecl (stmt);
decl = gimple_call_fndecl (call_stmt);
if (decl)
node->create_edge (cgraph_node::get_create (decl), stmt,
node->create_edge (cgraph_node::get_create (decl), call_stmt,
bb->count, freq);
else if (gimple_call_internal_p (stmt))
else if (gimple_call_internal_p (call_stmt))
;
else
node->create_indirect_edge (stmt,
gimple_call_flags (stmt),
node->create_indirect_edge (call_stmt,
gimple_call_flags (call_stmt),
bb->count, freq);
}
node->record_stmt_references (stmt);

View File

@ -117,7 +117,7 @@ along with GCC; see the file COPYING3. If not see
the callgraph. */
cgraph_edge *
cgraph_edge::clone (cgraph_node *n, gimple call_stmt, unsigned stmt_uid,
cgraph_edge::clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
gcov_type count_scale, int freq_scale, bool update_original)
{
cgraph_edge *new_edge;
@ -702,7 +702,8 @@ cgraph_node::find_replacement (void)
call. */
void
cgraph_node::set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
cgraph_node::set_call_stmt_including_clones (gimple old_stmt,
gcall *new_stmt,
bool update_speculative)
{
cgraph_node *node;
@ -757,7 +758,7 @@ cgraph_node::set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
void
cgraph_node::create_edge_including_clones (cgraph_node *callee,
gimple old_stmt, gimple stmt,
gimple old_stmt, gcall *stmt,
gcov_type count,
int freq,
cgraph_inline_failed_t reason)

View File

@ -1340,7 +1340,7 @@ thunk_adjust (gimple_stmt_iterator * bsi,
tree ptr, bool this_adjusting,
HOST_WIDE_INT fixed_offset, tree virtual_offset)
{
gimple stmt;
gassign *stmt;
tree ret;
if (this_adjusting
@ -1516,8 +1516,8 @@ cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
tree resdecl;
tree restmp = NULL;
gimple call;
gimple ret;
gcall *call;
greturn *ret;
if (in_lto_p)
get_untransformed_body ();

View File

@ -86,6 +86,41 @@ typedef struct gimple_statement_base *gimple;
typedef const struct gimple_statement_base *const_gimple;
typedef gimple gimple_seq;
struct gimple_stmt_iterator;
/* Forward decls for leaf gimple subclasses (for individual gimple codes).
Keep this in the same order as the corresponding codes in gimple.def. */
struct gcond;
struct gdebug;
struct ggoto;
struct glabel;
struct gswitch;
struct gassign;
struct gasm;
struct gcall;
struct gtransaction;
struct greturn;
struct gbind;
struct gcatch;
struct geh_filter;
struct geh_mnt;
struct geh_else;
struct gresx;
struct geh_dispatch;
struct gphi;
struct gtry;
struct gomp_atomic_load;
struct gomp_atomic_store;
struct gomp_continue;
struct gomp_critical;
struct gomp_for;
struct gomp_parallel;
struct gomp_task;
struct gomp_sections;
struct gomp_single;
struct gomp_target;
struct gomp_teams;
union section;
typedef union section section;
struct gcc_options;

View File

@ -311,111 +311,126 @@ kinds, along with their relationships to @code{GSS_} values (layouts) and
| | (no GSS layout)
| |
| + gimple_statement_with_ops
| | layout: GSS_WITH_OPS
| | Used for 5 codes: GIMPLE_COND
| | GIMPLE_DEBUG
| | GIMPLE_GOTO
| | GIMPLE_LABEL
| | GIMPLE_SWITCH
| | | layout: GSS_WITH_OPS
| | |
| | + gcond
| | | code: GIMPLE_COND
| | |
| | + gdebug
| | | code: GIMPLE_DEBUG
| | |
| | + ggoto
| | | code: GIMPLE_GOTO
| | |
| | + glabel
| | | code: GIMPLE_LABEL
| | |
| | + gswitch
| | code: GIMPLE_SWITCH
| |
| + gimple_statement_with_memory_ops_base
| | layout: GSS_WITH_MEM_OPS_BASE
| |
| + gimple_statement_with_memory_ops
| | layout: GSS_WITH_MEM_OPS.
| | used for codes GIMPLE_ASSIGN and GIMPLE_RETURN.
| | | layout: GSS_WITH_MEM_OPS
| | |
| | + gassign
| | | code GIMPLE_ASSIGN
| | |
| | + greturn
| | code GIMPLE_RETURN
| |
| + gimple_statement_call
| + gcall
| | layout: GSS_CALL, code: GIMPLE_CALL
| |
| + gimple_statement_asm
| + gasm
| | layout: GSS_ASM, code: GIMPLE_ASM
| |
| + gimple_statement_transaction
| + gtransaction
| layout: GSS_TRANSACTION, code: GIMPLE_TRANSACTION
|
+ gimple_statement_omp
| | layout: GSS_OMP. Used for code GIMPLE_OMP_SECTION
| |
| + gimple_statement_omp_critical
| + gomp_critical
| | layout: GSS_OMP_CRITICAL, code: GIMPLE_OMP_CRITICAL
| |
| + gimple_statement_omp_for
| + gomp_for
| | layout: GSS_OMP_FOR, code: GIMPLE_OMP_FOR
| |
| + gimple_statement_omp_parallel_layout
| + gomp_parallel_layout
| | | layout: GSS_OMP_PARALLEL_LAYOUT
| | |
| | + gimple_statement_omp_taskreg
| | | |
| | | + gimple_statement_omp_parallel
| | | + gomp_parallel
| | | | code: GIMPLE_OMP_PARALLEL
| | | |
| | | + gimple_statement_omp_task
| | | + gomp_task
| | | code: GIMPLE_OMP_TASK
| | |
| | + gimple_statement_omp_target
| | code: GIMPLE_OMP_TARGET
| |
| + gimple_statement_omp_sections
| + gomp_sections
| | layout: GSS_OMP_SECTIONS, code: GIMPLE_OMP_SECTIONS
| |
| + gimple_statement_omp_single_layout
| | layout: GSS_OMP_SINGLE_LAYOUT
| |
| + gimple_statement_omp_single
| + gomp_single
| | code: GIMPLE_OMP_SINGLE
| |
| + gimple_statement_omp_teams
| + gomp_teams
| code: GIMPLE_OMP_TEAMS
|
+ gimple_statement_bind
+ gbind
| layout: GSS_BIND, code: GIMPLE_BIND
|
+ gimple_statement_catch
+ gcatch
| layout: GSS_CATCH, code: GIMPLE_CATCH
|
+ gimple_statement_eh_filter
+ geh_filter
| layout: GSS_EH_FILTER, code: GIMPLE_EH_FILTER
|
+ gimple_statement_eh_else
+ geh_else
| layout: GSS_EH_ELSE, code: GIMPLE_EH_ELSE
|
+ gimple_statement_eh_mnt
+ geh_mnt
| layout: GSS_EH_MNT, code: GIMPLE_EH_MUST_NOT_THROW
|
+ gimple_statement_phi
+ gphi
| layout: GSS_PHI, code: GIMPLE_PHI
|
+ gimple_statement_eh_ctrl
| | layout: GSS_EH_CTRL
| |
| + gimple_statement_resx
| + gresx
| | code: GIMPLE_RESX
| |
| + gimple_statement_eh_dispatch
| + geh_dispatch
| code: GIMPLE_EH_DISPATCH
|
+ gimple_statement_try
+ gtry
| layout: GSS_TRY, code: GIMPLE_TRY
|
+ gimple_statement_wce
| layout: GSS_WCE, code: GIMPLE_WITH_CLEANUP_EXPR
|
+ gimple_statement_omp_continue
+ gomp_continue
| layout: GSS_OMP_CONTINUE, code: GIMPLE_OMP_CONTINUE
|
+ gimple_statement_omp_atomic_load
+ gomp_atomic_load
| layout: GSS_OMP_ATOMIC_LOAD, code: GIMPLE_OMP_ATOMIC_LOAD
|
+ gimple_statement_omp_atomic_store_layout
| layout: GSS_OMP_ATOMIC_STORE_LAYOUT,
| code: GIMPLE_OMP_ATOMIC_STORE
|
+ gimple_statement_omp_atomic_store
+ gomp_atomic_store
| code: GIMPLE_OMP_ATOMIC_STORE
|
+ gimple_statement_omp_return
+ gomp_return
code: GIMPLE_OMP_RETURN
@end smallexample

View File

@ -595,7 +595,7 @@ rtx get_personality_function (tree);
/* In stmt.c */
/* Expand a GIMPLE_SWITCH statement. */
extern void expand_case (gimple);
extern void expand_case (gswitch *);
/* Like expand_case but special-case for SJLJ exception dispatching. */
extern void expand_sjlj_dispatch_table (rtx, vec<tree> );

View File

@ -455,7 +455,24 @@ def build_pretty_printer():
'tree', TreePrinter)
pp.add_printer_for_types(['cgraph_node *'],
'cgraph_node', CGraphNodePrinter)
pp.add_printer_for_types(['gimple', 'gimple_statement_base *'],
pp.add_printer_for_types(['gimple', 'gimple_statement_base *',
# Keep this in the same order as gimple.def:
'gimple_cond', 'const_gimple_cond',
'gimple_statement_cond *',
'gimple_debug', 'const_gimple_debug',
'gimple_statement_debug *',
'gimple_label', 'const_gimple_label',
'gimple_statement_label *',
'gimple_switch', 'const_gimple_switch',
'gimple_statement_switch *',
'gimple_assign', 'const_gimple_assign',
'gimple_statement_assign *',
'gimple_bind', 'const_gimple_bind',
'gimple_statement_bind *',
'gimple_phi', 'const_gimple_phi',
'gimple_statement_phi *'],
'gimple',
GimplePrinter)
pp.add_printer_for_types(['basic_block', 'basic_block_def *'],

View File

@ -60,7 +60,7 @@ get_expr_type (enum tree_code code, tree op)
the expression code for the RHS. OP1 is the first operand and VAL
is an integer value to be used as the second operand. */
gimple
gassign *
build_assign (enum tree_code code, tree op1, int val, tree lhs)
{
tree op2 = build_int_cst (TREE_TYPE (op1), val);
@ -69,7 +69,7 @@ build_assign (enum tree_code code, tree op1, int val, tree lhs)
return gimple_build_assign_with_ops (code, lhs, op1, op2);
}
gimple
gassign *
build_assign (enum tree_code code, gimple g, int val, tree lhs )
{
return build_assign (code, gimple_assign_lhs (g), val, lhs);
@ -84,7 +84,7 @@ build_assign (enum tree_code code, gimple g, int val, tree lhs )
in normal form depending on the type of builder invoking this
function. */
gimple
gassign *
build_assign (enum tree_code code, tree op1, tree op2, tree lhs)
{
if (lhs == NULL_TREE)
@ -92,19 +92,19 @@ build_assign (enum tree_code code, tree op1, tree op2, tree lhs)
return gimple_build_assign_with_ops (code, lhs, op1, op2);
}
gimple
gassign *
build_assign (enum tree_code code, gimple op1, tree op2, tree lhs)
{
return build_assign (code, gimple_assign_lhs (op1), op2, lhs);
}
gimple
gassign *
build_assign (enum tree_code code, tree op1, gimple op2, tree lhs)
{
return build_assign (code, op1, gimple_assign_lhs (op2), lhs);
}
gimple
gassign *
build_assign (enum tree_code code, gimple op1, gimple op2, tree lhs)
{
return build_assign (code, gimple_assign_lhs (op1), gimple_assign_lhs (op2),
@ -115,7 +115,7 @@ build_assign (enum tree_code code, gimple op1, gimple op2, tree lhs)
/* Create and return a type cast assignment. This creates a NOP_EXPR
that converts OP to TO_TYPE. */
gimple
gassign *
build_type_cast (tree to_type, tree op, tree lhs)
{
if (lhs == NULL_TREE)
@ -123,7 +123,7 @@ build_type_cast (tree to_type, tree op, tree lhs)
return gimple_build_assign_with_ops (NOP_EXPR, lhs, op);
}
gimple
gassign *
build_type_cast (tree to_type, gimple op, tree lhs)
{
return build_type_cast (to_type, gimple_assign_lhs (op), lhs);

View File

@ -21,13 +21,13 @@ along with GCC; see the file COPYING3. If not see
#ifndef GCC_GIMPLE_BUILDER_H
#define GCC_GIMPLE_BUILDER_H
gimple build_assign (enum tree_code, tree, int, tree lhs = NULL_TREE);
gimple build_assign (enum tree_code, gimple, int, tree lhs = NULL_TREE);
gimple build_assign (enum tree_code, tree, tree, tree lhs = NULL_TREE);
gimple build_assign (enum tree_code, gimple, tree, tree lhs = NULL_TREE);
gimple build_assign (enum tree_code, tree, gimple, tree lhs = NULL_TREE);
gimple build_assign (enum tree_code, gimple, gimple, tree lhs = NULL_TREE);
gimple build_type_cast (tree, tree, tree lhs = NULL_TREE);
gimple build_type_cast (tree, gimple, tree lhs = NULL_TREE);
gassign *build_assign (enum tree_code, tree, int, tree lhs = NULL_TREE);
gassign *build_assign (enum tree_code, gimple, int, tree lhs = NULL_TREE);
gassign *build_assign (enum tree_code, tree, tree, tree lhs = NULL_TREE);
gassign *build_assign (enum tree_code, gimple, tree, tree lhs = NULL_TREE);
gassign *build_assign (enum tree_code, tree, gimple, tree lhs = NULL_TREE);
gassign *build_assign (enum tree_code, gimple, gimple, tree lhs = NULL_TREE);
gassign *build_type_cast (tree, tree, tree lhs = NULL_TREE);
gassign *build_type_cast (tree, gimple, tree lhs = NULL_TREE);
#endif /* GCC_GIMPLE_BUILDER_H */

View File

@ -545,7 +545,7 @@ fold_gimple_assign (gimple_stmt_iterator *si)
assumed that the operands have been previously folded. */
static bool
fold_gimple_cond (gimple stmt)
fold_gimple_cond (gcond *stmt)
{
tree result = fold_binary_loc (gimple_location (stmt),
gimple_cond_code (stmt),
@ -2024,7 +2024,7 @@ static bool
gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
enum built_in_function fcode)
{
gimple stmt = gsi_stmt (*gsi);
gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
tree dest, size, len, fn, fmt, flag;
const char *fmt_str;
@ -2104,7 +2104,7 @@ static bool
gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
enum built_in_function fcode)
{
gimple stmt = gsi_stmt (*gsi);
gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
tree dest, size, len, fn, fmt, flag;
const char *fmt_str;
unsigned nargs = gimple_call_num_args (stmt);
@ -2327,7 +2327,7 @@ gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
static bool
gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
{
gimple stmt = gsi_stmt (*gsi);
gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
tree dest = gimple_call_arg (stmt, 0);
tree destsize = gimple_call_arg (stmt, 1);
tree fmt = gimple_call_arg (stmt, 2);
@ -2477,7 +2477,7 @@ gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
static bool
gimple_fold_builtin (gimple_stmt_iterator *gsi)
{
gimple stmt = gsi_stmt (*gsi);
gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
tree callee = gimple_call_fndecl (stmt);
/* Give up for always_inline inline builtins until they are
@ -2621,7 +2621,7 @@ arith_overflowed_p (enum tree_code code, const_tree type,
static bool
gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
{
gimple stmt = gsi_stmt (*gsi);
gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
tree callee;
bool changed = false;
unsigned i;
@ -2931,7 +2931,7 @@ replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
return false;
if (gimple_code (stmt) == GIMPLE_COND)
if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
{
gcc_assert (rcode.is_tree_code ());
if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
@ -2941,16 +2941,16 @@ replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
|| !operation_could_trap_p (rcode,
FLOAT_TYPE_P (TREE_TYPE (ops[0])),
false, NULL_TREE)))
gimple_cond_set_condition (stmt, rcode, ops[0], ops[1]);
gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
else if (rcode == SSA_NAME)
gimple_cond_set_condition (stmt, NE_EXPR, ops[0],
gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
build_zero_cst (TREE_TYPE (ops[0])));
else if (rcode == INTEGER_CST)
{
if (integer_zerop (ops[0]))
gimple_cond_make_false (stmt);
gimple_cond_make_false (cond_stmt);
else
gimple_cond_make_true (stmt);
gimple_cond_make_true (cond_stmt);
}
else if (!inplace)
{
@ -2958,7 +2958,7 @@ replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
ops, seq);
if (!res)
return false;
gimple_cond_set_condition (stmt, NE_EXPR, res,
gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
build_zero_cst (TREE_TYPE (res)));
}
else
@ -3149,17 +3149,18 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
}
case GIMPLE_ASM:
{
for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
gasm *asm_stmt = as_a <gasm *> (stmt);
for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
{
tree link = gimple_asm_output_op (stmt, i);
tree link = gimple_asm_output_op (asm_stmt, i);
tree op = TREE_VALUE (link);
if (REFERENCE_CLASS_P (op)
&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
changed = true;
}
for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
{
tree link = gimple_asm_input_op (stmt, i);
tree link = gimple_asm_input_op (asm_stmt, i);
tree op = TREE_VALUE (link);
if ((REFERENCE_CLASS_P (op)
|| TREE_CODE (op) == ADDR_EXPR)
@ -3238,7 +3239,7 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
}
case GIMPLE_COND:
changed |= fold_gimple_cond (stmt);
changed |= fold_gimple_cond (as_a <gcond *> (stmt));
break;
case GIMPLE_CALL:
@ -3248,17 +3249,18 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
case GIMPLE_ASM:
/* Fold *& in asm operands. */
{
gasm *asm_stmt = as_a <gasm *> (stmt);
size_t noutputs;
const char **oconstraints;
const char *constraint;
bool allows_mem, allows_reg;
noutputs = gimple_asm_noutputs (stmt);
noutputs = gimple_asm_noutputs (asm_stmt);
oconstraints = XALLOCAVEC (const char *, noutputs);
for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
{
tree link = gimple_asm_output_op (stmt, i);
tree link = gimple_asm_output_op (asm_stmt, i);
tree op = TREE_VALUE (link);
oconstraints[i]
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
@ -3269,9 +3271,9 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
changed = true;
}
}
for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
{
tree link = gimple_asm_input_op (stmt, i);
tree link = gimple_asm_input_op (asm_stmt, i);
tree op = TREE_VALUE (link);
constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
@ -4683,6 +4685,7 @@ gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
case GIMPLE_CALL:
{
tree fn;
gcall *call_stmt = as_a <gcall *> (stmt);
if (gimple_call_internal_p (stmt))
{
@ -4747,14 +4750,15 @@ gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
for (i = 0; i < gimple_call_num_args (stmt); ++i)
args[i] = (*valueize) (gimple_call_arg (stmt, i));
call = build_call_array_loc (loc,
gimple_call_return_type (stmt),
gimple_call_return_type (call_stmt),
fn, gimple_call_num_args (stmt), args);
retval = fold_call_expr (EXPR_LOCATION (call), call, false);
if (retval)
{
/* fold_call_expr wraps the result inside a NOP_EXPR. */
STRIP_NOPS (retval);
retval = fold_convert (gimple_call_return_type (stmt), retval);
retval = fold_convert (gimple_call_return_type (call_stmt),
retval);
}
return retval;
}

View File

@ -639,6 +639,19 @@ gsi_for_stmt (gimple stmt)
return i;
}
/* Finds iterator for PHI. */
gphi_iterator
gsi_for_phi (gphi *phi)
{
gphi_iterator i;
basic_block bb = gimple_bb (phi);
i = gsi_start_phis (bb);
i.ptr = phi;
return i;
}
/* Move the statement at FROM so it comes right after the statement at TO. */
@ -899,9 +912,17 @@ gsi_commit_one_edge_insert (edge e, basic_block *new_bb)
/* Returns iterator at the start of the list of phi nodes of BB. */
gimple_stmt_iterator
gphi_iterator
gsi_start_phis (basic_block bb)
{
gimple_seq *pseq = phi_nodes_ptr (bb);
return gsi_start_1 (pseq);
/* Adapted from gsi_start_1. */
gphi_iterator i;
i.ptr = gimple_seq_first (*pseq);
i.seq = pseq;
i.bb = i.ptr ? gimple_bb (i.ptr) : NULL;
return i;
}

View File

@ -34,6 +34,15 @@ struct gimple_stmt_iterator
gimple_seq *seq;
basic_block bb;
};
/* Iterator over GIMPLE_PHI statements. */
struct gphi_iterator : public gimple_stmt_iterator
{
gphi *phi () const
{
return as_a <gphi *> (ptr);
}
};
enum gsi_iterator_update
{
@ -70,6 +79,7 @@ extern void gsi_insert_after (gimple_stmt_iterator *, gimple,
enum gsi_iterator_update);
extern bool gsi_remove (gimple_stmt_iterator *, bool);
extern gimple_stmt_iterator gsi_for_stmt (gimple);
extern gphi_iterator gsi_for_phi (gphi *);
extern void gsi_move_after (gimple_stmt_iterator *, gimple_stmt_iterator *);
extern void gsi_move_before (gimple_stmt_iterator *, gimple_stmt_iterator *);
extern void gsi_move_to_bb_end (gimple_stmt_iterator *, basic_block);
@ -79,7 +89,7 @@ extern basic_block gsi_insert_on_edge_immediate (edge, gimple);
extern basic_block gsi_insert_seq_on_edge_immediate (edge, gimple_seq);
extern void gsi_commit_edge_inserts (void);
extern void gsi_commit_one_edge_insert (edge, basic_block *);
extern gimple_stmt_iterator gsi_start_phis (basic_block);
extern gphi_iterator gsi_start_phis (basic_block);
/* Return a new iterator pointing to GIMPLE_SEQ's first statement. */
@ -284,14 +294,14 @@ gsi_last_nondebug_bb (basic_block bb)
/* Iterates I statement iterator to the next non-virtual statement. */
static inline void
gsi_next_nonvirtual_phi (gimple_stmt_iterator *i)
gsi_next_nonvirtual_phi (gphi_iterator *i)
{
gimple phi;
gphi *phi;
if (gsi_end_p (*i))
return;
phi = gsi_stmt (*i);
phi = i->phi ();
gcc_assert (phi != NULL);
while (virtual_operand_p (gimple_phi_result (phi)))
@ -301,7 +311,7 @@ gsi_next_nonvirtual_phi (gimple_stmt_iterator *i)
if (gsi_end_p (*i))
return;
phi = gsi_stmt (*i);
phi = i->phi ();
}
}

View File

@ -67,7 +67,7 @@ along with GCC; see the file COPYING3. If not see
struct return_statements_t
{
tree label;
gimple stmt;
greturn *stmt;
};
typedef struct return_statements_t return_statements_t;
@ -305,8 +305,11 @@ lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
return;
case GIMPLE_EH_ELSE:
lower_sequence (gimple_eh_else_n_body_ptr (stmt), data);
lower_sequence (gimple_eh_else_e_body_ptr (stmt), data);
{
geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
}
break;
case GIMPLE_NOP:
@ -378,7 +381,9 @@ lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
return;
case GIMPLE_TRANSACTION:
lower_sequence (gimple_transaction_body_ptr (stmt), data);
lower_sequence (gimple_transaction_body_ptr (
as_a <gtransaction *> (stmt)),
data);
break;
default:
@ -395,7 +400,7 @@ static void
lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
{
tree old_block = data->block;
gimple stmt = gsi_stmt (*gsi);
gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
tree new_block = gimple_bind_block (stmt);
if (new_block)
@ -468,7 +473,9 @@ lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
for (; !gsi_end_p (i); gsi_next (&i))
{
data->cannot_fallthru = false;
lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i)), data);
lower_sequence (gimple_catch_handler_ptr (
as_a <gcatch *> (gsi_stmt (i))),
data);
if (!data->cannot_fallthru)
cannot_fallthru = false;
}
@ -509,7 +516,7 @@ lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
This is a subroutine of gimple_stmt_may_fallthru. */
static bool
gimple_try_catch_may_fallthru (gimple stmt)
gimple_try_catch_may_fallthru (gtry *stmt)
{
gimple_stmt_iterator i;
@ -530,7 +537,8 @@ gimple_try_catch_may_fallthru (gimple stmt)
through iff any of the catch bodies falls through. */
for (; !gsi_end_p (i); gsi_next (&i))
{
if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
if (gimple_seq_may_fallthru (gimple_catch_handler (
as_a <gcatch *> (gsi_stmt (i)))))
return true;
}
return false;
@ -589,11 +597,12 @@ gimple_stmt_may_fallthru (gimple stmt)
return false;
case GIMPLE_BIND:
return gimple_seq_may_fallthru (gimple_bind_body (stmt));
return gimple_seq_may_fallthru (
gimple_bind_body (as_a <gbind *> (stmt)));
case GIMPLE_TRY:
if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
return gimple_try_catch_may_fallthru (stmt);
return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
/* It must be a GIMPLE_TRY_FINALLY. */
@ -608,8 +617,12 @@ gimple_stmt_may_fallthru (gimple stmt)
&& gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
case GIMPLE_EH_ELSE:
return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
|| gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
{
geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
|| gimple_seq_may_fallthru (gimple_eh_else_e_body (
eh_else_stmt)));
}
case GIMPLE_CALL:
/* Functions that do not return do not fall through. */
@ -635,7 +648,7 @@ gimple_seq_may_fallthru (gimple_seq seq)
static void
lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
{
gimple stmt = gsi_stmt (*gsi);
greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
gimple t;
int i;
return_statements_t tmp_rs;

View File

@ -271,7 +271,7 @@ dump_gimple_fmt (pretty_printer *buffer, int spc, int flags,
assignment GS. BUFFER, SPC and FLAGS are as in pp_gimple_stmt_1. */
static void
dump_unary_rhs (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_unary_rhs (pretty_printer *buffer, gassign *gs, int spc, int flags)
{
enum tree_code rhs_code = gimple_assign_rhs_code (gs);
tree lhs = gimple_assign_lhs (gs);
@ -355,7 +355,7 @@ dump_unary_rhs (pretty_printer *buffer, gimple gs, int spc, int flags)
assignment GS. BUFFER, SPC and FLAGS are as in pp_gimple_stmt_1. */
static void
dump_binary_rhs (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_binary_rhs (pretty_printer *buffer, gassign *gs, int spc, int flags)
{
const char *p;
enum tree_code code = gimple_assign_rhs_code (gs);
@ -411,7 +411,7 @@ dump_binary_rhs (pretty_printer *buffer, gimple gs, int spc, int flags)
assignment GS. BUFFER, SPC and FLAGS are as in pp_gimple_stmt_1. */
static void
dump_ternary_rhs (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_ternary_rhs (pretty_printer *buffer, gassign *gs, int spc, int flags)
{
const char *p;
enum tree_code code = gimple_assign_rhs_code (gs);
@ -506,7 +506,7 @@ dump_ternary_rhs (pretty_printer *buffer, gimple gs, int spc, int flags)
pp_gimple_stmt_1. */
static void
dump_gimple_assign (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_assign (pretty_printer *buffer, gassign *gs, int spc, int flags)
{
if (flags & TDF_RAW)
{
@ -565,7 +565,7 @@ dump_gimple_assign (pretty_printer *buffer, gimple gs, int spc, int flags)
pp_gimple_stmt_1. */
static void
dump_gimple_return (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_return (pretty_printer *buffer, greturn *gs, int spc, int flags)
{
tree t, t2;
@ -595,7 +595,7 @@ dump_gimple_return (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_call. */
static void
dump_gimple_call_args (pretty_printer *buffer, gimple gs, int flags)
dump_gimple_call_args (pretty_printer *buffer, gcall *gs, int flags)
{
size_t i;
@ -668,7 +668,7 @@ pp_points_to_solution (pretty_printer *buffer, struct pt_solution *pt)
pp_gimple_stmt_1. */
static void
dump_gimple_call (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_call (pretty_printer *buffer, gcall *gs, int spc, int flags)
{
tree lhs = gimple_call_lhs (gs);
tree fn = gimple_call_fn (gs);
@ -801,7 +801,8 @@ dump_gimple_call (pretty_printer *buffer, gimple gs, int spc, int flags)
pp_gimple_stmt_1. */
static void
dump_gimple_switch (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_switch (pretty_printer *buffer, gswitch *gs, int spc,
int flags)
{
unsigned int i;
@ -834,7 +835,7 @@ dump_gimple_switch (pretty_printer *buffer, gimple gs, int spc, int flags)
pp_gimple_stmt_1. */
static void
dump_gimple_cond (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_cond (pretty_printer *buffer, gcond *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%s, %T, %T, %T, %T>", gs,
@ -878,7 +879,7 @@ dump_gimple_cond (pretty_printer *buffer, gimple gs, int spc, int flags)
TDF_* in dumpfils.h). */
static void
dump_gimple_label (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_label (pretty_printer *buffer, glabel *gs, int spc, int flags)
{
tree label = gimple_label_label (gs);
if (flags & TDF_RAW)
@ -899,7 +900,7 @@ dump_gimple_label (pretty_printer *buffer, gimple gs, int spc, int flags)
TDF_* in dumpfile.h). */
static void
dump_gimple_goto (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_goto (pretty_printer *buffer, ggoto *gs, int spc, int flags)
{
tree label = gimple_goto_dest (gs);
if (flags & TDF_RAW)
@ -914,7 +915,7 @@ dump_gimple_goto (pretty_printer *buffer, gimple gs, int spc, int flags)
TDF_* in dumpfile.h). */
static void
dump_gimple_bind (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_bind (pretty_printer *buffer, gbind *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <", gs);
@ -947,7 +948,7 @@ dump_gimple_bind (pretty_printer *buffer, gimple gs, int spc, int flags)
dumpfile.h). */
static void
dump_gimple_try (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_try (pretty_printer *buffer, gtry *gs, int spc, int flags)
{
if (flags & TDF_RAW)
{
@ -1003,7 +1004,7 @@ dump_gimple_try (pretty_printer *buffer, gimple gs, int spc, int flags)
dumpfile.h). */
static void
dump_gimple_catch (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_catch (pretty_printer *buffer, gcatch *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%T, %+CATCH <%S>%->", gs,
@ -1019,7 +1020,8 @@ dump_gimple_catch (pretty_printer *buffer, gimple gs, int spc, int flags)
dumpfile.h). */
static void
dump_gimple_eh_filter (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_eh_filter (pretty_printer *buffer, geh_filter *gs, int spc,
int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%T, %+FAILURE <%S>%->", gs,
@ -1035,8 +1037,8 @@ dump_gimple_eh_filter (pretty_printer *buffer, gimple gs, int spc, int flags)
/* Dump a GIMPLE_EH_MUST_NOT_THROW tuple. */
static void
dump_gimple_eh_must_not_throw (pretty_printer *buffer, gimple gs,
int spc, int flags)
dump_gimple_eh_must_not_throw (pretty_printer *buffer,
geh_mnt *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%T>", gs,
@ -1052,7 +1054,8 @@ dump_gimple_eh_must_not_throw (pretty_printer *buffer, gimple gs,
dumpfile.h). */
static void
dump_gimple_eh_else (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_eh_else (pretty_printer *buffer, geh_else *gs, int spc,
int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags,
@ -1070,7 +1073,7 @@ dump_gimple_eh_else (pretty_printer *buffer, gimple gs, int spc, int flags)
dumpfile.h). */
static void
dump_gimple_resx (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_resx (pretty_printer *buffer, gresx *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%d>", gs,
@ -1082,7 +1085,7 @@ dump_gimple_resx (pretty_printer *buffer, gimple gs, int spc, int flags)
/* Dump a GIMPLE_EH_DISPATCH tuple on the pretty_printer BUFFER. */
static void
dump_gimple_eh_dispatch (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_eh_dispatch (pretty_printer *buffer, geh_dispatch *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%d>", gs,
@ -1097,7 +1100,7 @@ dump_gimple_eh_dispatch (pretty_printer *buffer, gimple gs, int spc, int flags)
in dumpfile.h). */
static void
dump_gimple_debug (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_debug (pretty_printer *buffer, gdebug *gs, int spc, int flags)
{
switch (gs->subcode)
{
@ -1130,7 +1133,7 @@ dump_gimple_debug (pretty_printer *buffer, gimple gs, int spc, int flags)
/* Dump a GIMPLE_OMP_FOR tuple on the pretty_printer BUFFER. */
static void
dump_gimple_omp_for (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_omp_for (pretty_printer *buffer, gomp_for *gs, int spc, int flags)
{
size_t i;
@ -1266,7 +1269,8 @@ dump_gimple_omp_for (pretty_printer *buffer, gimple gs, int spc, int flags)
/* Dump a GIMPLE_OMP_CONTINUE tuple on the pretty_printer BUFFER. */
static void
dump_gimple_omp_continue (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_omp_continue (pretty_printer *buffer, gomp_continue *gs,
int spc, int flags)
{
if (flags & TDF_RAW)
{
@ -1290,7 +1294,8 @@ dump_gimple_omp_continue (pretty_printer *buffer, gimple gs, int spc, int flags)
/* Dump a GIMPLE_OMP_SINGLE tuple on the pretty_printer BUFFER. */
static void
dump_gimple_omp_single (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_omp_single (pretty_printer *buffer, gomp_single *gs,
int spc, int flags)
{
if (flags & TDF_RAW)
{
@ -1318,7 +1323,8 @@ dump_gimple_omp_single (pretty_printer *buffer, gimple gs, int spc, int flags)
/* Dump a GIMPLE_OMP_TARGET tuple on the pretty_printer BUFFER. */
static void
dump_gimple_omp_target (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_omp_target (pretty_printer *buffer, gomp_target *gs,
int spc, int flags)
{
const char *kind;
switch (gimple_omp_target_kind (gs))
@ -1369,7 +1375,8 @@ dump_gimple_omp_target (pretty_printer *buffer, gimple gs, int spc, int flags)
/* Dump a GIMPLE_OMP_TEAMS tuple on the pretty_printer BUFFER. */
static void
dump_gimple_omp_teams (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_omp_teams (pretty_printer *buffer, gomp_teams *gs, int spc,
int flags)
{
if (flags & TDF_RAW)
{
@ -1397,8 +1404,8 @@ dump_gimple_omp_teams (pretty_printer *buffer, gimple gs, int spc, int flags)
/* Dump a GIMPLE_OMP_SECTIONS tuple on the pretty_printer BUFFER. */
static void
dump_gimple_omp_sections (pretty_printer *buffer, gimple gs, int spc,
int flags)
dump_gimple_omp_sections (pretty_printer *buffer, gomp_sections *gs,
int spc, int flags)
{
if (flags & TDF_RAW)
{
@ -1473,8 +1480,8 @@ dump_gimple_omp_block (pretty_printer *buffer, gimple gs, int spc, int flags)
/* Dump a GIMPLE_OMP_CRITICAL tuple on the pretty_printer BUFFER. */
static void
dump_gimple_omp_critical (pretty_printer *buffer, gimple gs, int spc,
int flags)
dump_gimple_omp_critical (pretty_printer *buffer, gomp_critical *gs,
int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%+BODY <%S> >", gs,
@ -1534,7 +1541,8 @@ dump_gimple_omp_return (pretty_printer *buffer, gimple gs, int spc, int flags)
/* Dump a GIMPLE_TRANSACTION tuple on the pretty_printer BUFFER. */
static void
dump_gimple_transaction (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_transaction (pretty_printer *buffer, gtransaction *gs,
int spc, int flags)
{
unsigned subcode = gimple_transaction_subcode (gs);
@ -1621,7 +1629,7 @@ dump_gimple_transaction (pretty_printer *buffer, gimple gs, int spc, int flags)
dumpfile.h). */
static void
dump_gimple_asm (pretty_printer *buffer, gimple gs, int spc, int flags)
dump_gimple_asm (pretty_printer *buffer, gasm *gs, int spc, int flags)
{
unsigned int i, n, f, fields;
@ -1827,7 +1835,7 @@ dump_ssaname_info (pretty_printer *buffer, tree node, int spc)
pretty printer. If COMMENT is true, print this after #. */
static void
dump_gimple_phi (pretty_printer *buffer, gimple phi, int spc, bool comment,
dump_gimple_phi (pretty_printer *buffer, gphi *phi, int spc, bool comment,
int flags)
{
size_t i;
@ -1868,8 +1876,8 @@ dump_gimple_phi (pretty_printer *buffer, gimple phi, int spc, bool comment,
dumpfile.h). */
static void
dump_gimple_omp_parallel (pretty_printer *buffer, gimple gs, int spc,
int flags)
dump_gimple_omp_parallel (pretty_printer *buffer, gomp_parallel *gs,
int spc, int flags)
{
if (flags & TDF_RAW)
{
@ -1922,7 +1930,7 @@ dump_gimple_omp_parallel (pretty_printer *buffer, gimple gs, int spc,
dumpfile.h). */
static void
dump_gimple_omp_task (pretty_printer *buffer, gimple gs, int spc,
dump_gimple_omp_task (pretty_printer *buffer, gomp_task *gs, int spc,
int flags)
{
if (flags & TDF_RAW)
@ -1979,8 +1987,8 @@ dump_gimple_omp_task (pretty_printer *buffer, gimple gs, int spc,
in dumpfile.h). */
static void
dump_gimple_omp_atomic_load (pretty_printer *buffer, gimple gs, int spc,
int flags)
dump_gimple_omp_atomic_load (pretty_printer *buffer, gomp_atomic_load *gs,
int spc, int flags)
{
if (flags & TDF_RAW)
{
@ -2012,8 +2020,8 @@ dump_gimple_omp_atomic_load (pretty_printer *buffer, gimple gs, int spc,
in dumpfile.h). */
static void
dump_gimple_omp_atomic_store (pretty_printer *buffer, gimple gs, int spc,
int flags)
dump_gimple_omp_atomic_store (pretty_printer *buffer,
gomp_atomic_store *gs, int spc, int flags)
{
if (flags & TDF_RAW)
{
@ -2100,31 +2108,31 @@ pp_gimple_stmt_1 (pretty_printer *buffer, gimple gs, int spc, int flags)
switch (gimple_code (gs))
{
case GIMPLE_ASM:
dump_gimple_asm (buffer, gs, spc, flags);
dump_gimple_asm (buffer, as_a <gasm *> (gs), spc, flags);
break;
case GIMPLE_ASSIGN:
dump_gimple_assign (buffer, gs, spc, flags);
dump_gimple_assign (buffer, as_a <gassign *> (gs), spc, flags);
break;
case GIMPLE_BIND:
dump_gimple_bind (buffer, gs, spc, flags);
dump_gimple_bind (buffer, as_a <gbind *> (gs), spc, flags);
break;
case GIMPLE_CALL:
dump_gimple_call (buffer, gs, spc, flags);
dump_gimple_call (buffer, as_a <gcall *> (gs), spc, flags);
break;
case GIMPLE_COND:
dump_gimple_cond (buffer, gs, spc, flags);
dump_gimple_cond (buffer, as_a <gcond *> (gs), spc, flags);
break;
case GIMPLE_LABEL:
dump_gimple_label (buffer, gs, spc, flags);
dump_gimple_label (buffer, as_a <glabel *> (gs), spc, flags);
break;
case GIMPLE_GOTO:
dump_gimple_goto (buffer, gs, spc, flags);
dump_gimple_goto (buffer, as_a <ggoto *> (gs), spc, flags);
break;
case GIMPLE_NOP:
@ -2132,56 +2140,63 @@ pp_gimple_stmt_1 (pretty_printer *buffer, gimple gs, int spc, int flags)
break;
case GIMPLE_RETURN:
dump_gimple_return (buffer, gs, spc, flags);
dump_gimple_return (buffer, as_a <greturn *> (gs), spc, flags);
break;
case GIMPLE_SWITCH:
dump_gimple_switch (buffer, gs, spc, flags);
dump_gimple_switch (buffer, as_a <gswitch *> (gs), spc, flags);
break;
case GIMPLE_TRY:
dump_gimple_try (buffer, gs, spc, flags);
dump_gimple_try (buffer, as_a <gtry *> (gs), spc, flags);
break;
case GIMPLE_PHI:
dump_gimple_phi (buffer, gs, spc, false, flags);
dump_gimple_phi (buffer, as_a <gphi *> (gs), spc, false, flags);
break;
case GIMPLE_OMP_PARALLEL:
dump_gimple_omp_parallel (buffer, gs, spc, flags);
dump_gimple_omp_parallel (buffer, as_a <gomp_parallel *> (gs), spc,
flags);
break;
case GIMPLE_OMP_TASK:
dump_gimple_omp_task (buffer, gs, spc, flags);
dump_gimple_omp_task (buffer, as_a <gomp_task *> (gs), spc, flags);
break;
case GIMPLE_OMP_ATOMIC_LOAD:
dump_gimple_omp_atomic_load (buffer, gs, spc, flags);
dump_gimple_omp_atomic_load (buffer, as_a <gomp_atomic_load *> (gs),
spc, flags);
break;
case GIMPLE_OMP_ATOMIC_STORE:
dump_gimple_omp_atomic_store (buffer, gs, spc, flags);
dump_gimple_omp_atomic_store (buffer,
as_a <gomp_atomic_store *> (gs),
spc, flags);
break;
case GIMPLE_OMP_FOR:
dump_gimple_omp_for (buffer, gs, spc, flags);
dump_gimple_omp_for (buffer, as_a <gomp_for *> (gs), spc, flags);
break;
case GIMPLE_OMP_CONTINUE:
dump_gimple_omp_continue (buffer, gs, spc, flags);
dump_gimple_omp_continue (buffer, as_a <gomp_continue *> (gs), spc,
flags);
break;
case GIMPLE_OMP_SINGLE:
dump_gimple_omp_single (buffer, gs, spc, flags);
dump_gimple_omp_single (buffer, as_a <gomp_single *> (gs), spc,
flags);
break;
case GIMPLE_OMP_TARGET:
dump_gimple_omp_target (buffer, gs, spc, flags);
dump_gimple_omp_target (buffer, as_a <gomp_target *> (gs), spc,
flags);
break;
case GIMPLE_OMP_TEAMS:
dump_gimple_omp_teams (buffer, gs, spc, flags);
dump_gimple_omp_teams (buffer, as_a <gomp_teams *> (gs), spc,
flags);
break;
case GIMPLE_OMP_RETURN:
@ -2189,7 +2204,8 @@ pp_gimple_stmt_1 (pretty_printer *buffer, gimple gs, int spc, int flags)
break;
case GIMPLE_OMP_SECTIONS:
dump_gimple_omp_sections (buffer, gs, spc, flags);
dump_gimple_omp_sections (buffer, as_a <gomp_sections *> (gs),
spc, flags);
break;
case GIMPLE_OMP_SECTIONS_SWITCH:
@ -2204,35 +2220,39 @@ pp_gimple_stmt_1 (pretty_printer *buffer, gimple gs, int spc, int flags)
break;
case GIMPLE_OMP_CRITICAL:
dump_gimple_omp_critical (buffer, gs, spc, flags);
dump_gimple_omp_critical (buffer, as_a <gomp_critical *> (gs), spc,
flags);
break;
case GIMPLE_CATCH:
dump_gimple_catch (buffer, gs, spc, flags);
dump_gimple_catch (buffer, as_a <gcatch *> (gs), spc, flags);
break;
case GIMPLE_EH_FILTER:
dump_gimple_eh_filter (buffer, gs, spc, flags);
dump_gimple_eh_filter (buffer, as_a <geh_filter *> (gs), spc, flags);
break;
case GIMPLE_EH_MUST_NOT_THROW:
dump_gimple_eh_must_not_throw (buffer, gs, spc, flags);
dump_gimple_eh_must_not_throw (buffer,
as_a <geh_mnt *> (gs),
spc, flags);
break;
case GIMPLE_EH_ELSE:
dump_gimple_eh_else (buffer, gs, spc, flags);
dump_gimple_eh_else (buffer, as_a <geh_else *> (gs), spc, flags);
break;
case GIMPLE_RESX:
dump_gimple_resx (buffer, gs, spc, flags);
dump_gimple_resx (buffer, as_a <gresx *> (gs), spc, flags);
break;
case GIMPLE_EH_DISPATCH:
dump_gimple_eh_dispatch (buffer, gs, spc, flags);
dump_gimple_eh_dispatch (buffer, as_a <geh_dispatch *> (gs), spc,
flags);
break;
case GIMPLE_DEBUG:
dump_gimple_debug (buffer, gs, spc, flags);
dump_gimple_debug (buffer, as_a <gdebug *> (gs), spc, flags);
break;
case GIMPLE_PREDICT:
@ -2246,7 +2266,8 @@ pp_gimple_stmt_1 (pretty_printer *buffer, gimple gs, int spc, int flags)
break;
case GIMPLE_TRANSACTION:
dump_gimple_transaction (buffer, gs, spc, flags);
dump_gimple_transaction (buffer, as_a <gtransaction *> (gs), spc,
flags);
break;
default:
@ -2312,11 +2333,11 @@ dump_gimple_bb_footer (FILE *outf ATTRIBUTE_UNUSED,
static void
dump_phi_nodes (pretty_printer *buffer, basic_block bb, int indent, int flags)
{
gimple_stmt_iterator i;
gphi_iterator i;
for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
{
gimple phi = gsi_stmt (i);
gphi *phi = i.phi ();
if (!virtual_operand_p (gimple_phi_result (phi)) || (flags & TDF_VOPS))
{
INDENT (indent);
@ -2343,7 +2364,9 @@ pp_cfg_jump (pretty_printer *buffer, basic_block bb)
if (stmt && gimple_code (stmt) == GIMPLE_LABEL)
{
pp_string (buffer, " (");
dump_generic_node (buffer, gimple_label_label (stmt), 0, 0, false);
dump_generic_node (buffer,
gimple_label_label (as_a <glabel *> (stmt)),
0, 0, false);
pp_right_paren (buffer);
pp_semicolon (buffer);
}
@ -2465,14 +2488,13 @@ gimple_dump_bb (FILE *file, basic_block bb, int indent, int flags)
void
gimple_dump_bb_for_graph (pretty_printer *pp, basic_block bb)
{
gimple_stmt_iterator gsi;
pp_printf (pp, "<bb %d>:\n", bb->index);
pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/true);
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
if (!virtual_operand_p (gimple_phi_result (phi))
|| (dump_flags & TDF_VOPS))
{
@ -2485,7 +2507,8 @@ gimple_dump_bb_for_graph (pretty_printer *pp, basic_block bb)
}
}
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
pp_bar (pp);

View File

@ -107,7 +107,7 @@ insert_trap_and_remove_trailing_statements (gimple_stmt_iterator *si_p, tree op)
update_stmt (stmt);
}
gimple new_stmt
gcall *new_stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
gimple_seq seq = NULL;
gimple_seq_add_stmt (&seq, new_stmt);
@ -216,7 +216,7 @@ isolate_path (basic_block bb, basic_block duplicate,
{
if (ret_zero)
{
gimple ret = gsi_stmt (si2);
greturn *ret = as_a <greturn *> (gsi_stmt (si2));
tree zero = build_zero_cst (TREE_TYPE (gimple_return_retval (ret)));
gimple_return_set_retval (ret, zero);
update_stmt (ret);
@ -243,7 +243,7 @@ find_implicit_erroneous_behaviour (void)
FOR_EACH_BB_FN (bb, cfun)
{
gimple_stmt_iterator si;
gphi_iterator si;
/* Out of an abundance of caution, do not isolate paths to a
block where the block has any abnormal outgoing edges.
@ -262,7 +262,7 @@ find_implicit_erroneous_behaviour (void)
cases. */
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
{
gimple phi = gsi_stmt (si);
gphi *phi = si.phi ();
tree lhs = gimple_phi_result (phi);
/* If the result is not a pointer, then there is no need to
@ -296,8 +296,12 @@ find_implicit_erroneous_behaviour (void)
{
FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
{
if (gimple_code (use_stmt) != GIMPLE_RETURN
|| gimple_return_retval (use_stmt) != lhs)
greturn *return_stmt
= dyn_cast <greturn *> (use_stmt);
if (!return_stmt)
continue;
if (gimple_return_retval (return_stmt) != lhs)
continue;
if (warning_at (gimple_location (use_stmt),
@ -410,9 +414,9 @@ find_explicit_erroneous_behaviour (void)
/* Detect returning the address of a local variable. This only
becomes undefined behavior if the result is used, so we do not
insert a trap and only return NULL instead. */
if (gimple_code (stmt) == GIMPLE_RETURN)
if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
tree val = gimple_return_retval (stmt);
tree val = gimple_return_retval (return_stmt);
if (val && TREE_CODE (val) == ADDR_EXPR)
{
tree valbase = get_base_address (TREE_OPERAND (val, 0));
@ -436,7 +440,7 @@ find_explicit_erroneous_behaviour (void)
OPT_Wreturn_local_addr, msg))
inform (DECL_SOURCE_LOCATION(valbase), "declared here");
tree zero = build_zero_cst (TREE_TYPE (val));
gimple_return_set_retval (stmt, zero);
gimple_return_set_retval (return_stmt, zero);
update_stmt (stmt);
}
}

View File

@ -753,7 +753,7 @@ add_cand_for_stmt (gimple gs, slsr_cand_t c)
is used to help find a basis for subsequent candidates. */
static void
slsr_process_phi (gimple phi, bool speed)
slsr_process_phi (gphi *phi, bool speed)
{
unsigned i;
tree arg0_base = NULL_TREE, base_type;
@ -1678,12 +1678,13 @@ void
find_candidates_dom_walker::before_dom_children (basic_block bb)
{
bool speed = optimize_bb_for_speed_p (bb);
gimple_stmt_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
slsr_process_phi (gsi_stmt (gsi), speed);
for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
slsr_process_phi (gsi.phi (), speed);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple gs = gsi_stmt (gsi);
@ -2063,7 +2064,7 @@ replace_mult_candidate (slsr_cand_t c, tree basis_name, widest_int bump)
if (bump == 0)
{
tree lhs = gimple_assign_lhs (c->cand_stmt);
gimple copy_stmt = gimple_build_assign (lhs, basis_name);
gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
gsi_replace (&gsi, copy_stmt, false);
@ -2162,7 +2163,7 @@ create_add_on_incoming_edge (slsr_cand_t c, tree basis_name,
basic_block insert_bb;
gimple_stmt_iterator gsi;
tree lhs, basis_type;
gimple new_stmt;
gassign *new_stmt;
/* If the add candidate along this incoming edge has the same
index as C's hidden basis, the hidden basis represents this
@ -2246,7 +2247,7 @@ create_phi_basis (slsr_cand_t c, gimple from_phi, tree basis_name,
{
int i;
tree name, phi_arg;
gimple phi;
gphi *phi;
vec<tree> phi_args;
slsr_cand_t basis = lookup_cand (c->basis);
int nargs = gimple_phi_num_args (from_phi);
@ -2981,7 +2982,7 @@ ncd_for_two_cands (basic_block bb1, basic_block bb2,
candidates, return the earliest candidate in the block in *WHERE. */
static basic_block
ncd_with_phi (slsr_cand_t c, const widest_int &incr, gimple phi,
ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
basic_block ncd, slsr_cand_t *where)
{
unsigned i;
@ -2997,7 +2998,8 @@ ncd_with_phi (slsr_cand_t c, const widest_int &incr, gimple phi,
gimple arg_def = SSA_NAME_DEF_STMT (arg);
if (gimple_code (arg_def) == GIMPLE_PHI)
ncd = ncd_with_phi (c, incr, arg_def, ncd, where);
ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
where);
else
{
slsr_cand_t arg_cand = base_cand_from_table (arg);
@ -3031,7 +3033,8 @@ ncd_of_cand_and_phis (slsr_cand_t c, const widest_int &incr, slsr_cand_t *where)
}
if (phi_dependent_cand_p (c))
ncd = ncd_with_phi (c, incr, lookup_cand (c->def_phi)->cand_stmt,
ncd = ncd_with_phi (c, incr,
as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
ncd, where);
return ncd;
@ -3119,7 +3122,7 @@ insert_initializers (slsr_cand_t c)
{
basic_block bb;
slsr_cand_t where = NULL;
gimple init_stmt;
gassign *init_stmt;
tree stride_type, new_name, incr_tree;
widest_int incr = incr_vec[i].incr;
@ -3257,7 +3260,7 @@ static tree
introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
{
tree cast_lhs;
gimple cast_stmt;
gassign *cast_stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
@ -3418,7 +3421,7 @@ replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
if (types_compatible_p (lhs_type, basis_type))
{
gimple copy_stmt = gimple_build_assign (lhs, basis_name);
gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
gsi_replace (&gsi, copy_stmt, false);
@ -3430,8 +3433,8 @@ replace_one_candidate (slsr_cand_t c, unsigned i, tree basis_name)
else
{
gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
gimple cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, lhs,
basis_name);
gassign *cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, lhs,
basis_name);
gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
gsi_replace (&gsi, cast_stmt, false);
c->cand_stmt = cast_stmt;

View File

@ -59,14 +59,14 @@ along with GCC; see the file COPYING3. If not see
/* Read a PHI function for basic block BB in function FN. DATA_IN is
the file being read. IB is the input block to use for reading. */
static gimple
static gphi *
input_phi (struct lto_input_block *ib, basic_block bb, struct data_in *data_in,
struct function *fn)
{
unsigned HOST_WIDE_INT ix;
tree phi_result;
int i, len;
gimple result;
gphi *result;
ix = streamer_read_uhwi (ib);
phi_result = (*SSANAMES (fn))[ix];
@ -138,21 +138,25 @@ input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in,
switch (code)
{
case GIMPLE_RESX:
gimple_resx_set_region (stmt, streamer_read_hwi (ib));
gimple_resx_set_region (as_a <gresx *> (stmt),
streamer_read_hwi (ib));
break;
case GIMPLE_EH_MUST_NOT_THROW:
gimple_eh_must_not_throw_set_fndecl (stmt, stream_read_tree (ib, data_in));
gimple_eh_must_not_throw_set_fndecl (
as_a <geh_mnt *> (stmt),
stream_read_tree (ib, data_in));
break;
case GIMPLE_EH_DISPATCH:
gimple_eh_dispatch_set_region (stmt, streamer_read_hwi (ib));
gimple_eh_dispatch_set_region (as_a <geh_dispatch *> (stmt),
streamer_read_hwi (ib));
break;
case GIMPLE_ASM:
{
/* FIXME lto. Move most of this into a new gimple_asm_set_string(). */
gimple_statement_asm *asm_stmt = as_a <gimple_statement_asm *> (stmt);
gasm *asm_stmt = as_a <gasm *> (stmt);
tree str;
asm_stmt->ni = streamer_read_uhwi (ib);
asm_stmt->no = streamer_read_uhwi (ib);
@ -200,13 +204,13 @@ input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in,
== TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*opp, 0), 0))))
*opp = TREE_OPERAND (TREE_OPERAND (*opp, 0), 0);
}
if (is_gimple_call (stmt))
if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
if (gimple_call_internal_p (stmt))
if (gimple_call_internal_p (call_stmt))
gimple_call_set_internal_fn
(stmt, streamer_read_enum (ib, internal_fn, IFN_LAST));
(call_stmt, streamer_read_enum (ib, internal_fn, IFN_LAST));
else
gimple_call_set_fntype (stmt, stream_read_tree (ib, data_in));
gimple_call_set_fntype (call_stmt, stream_read_tree (ib, data_in));
}
break;
@ -215,7 +219,8 @@ input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in,
break;
case GIMPLE_TRANSACTION:
gimple_transaction_set_label (stmt, stream_read_tree (ib, data_in));
gimple_transaction_set_label (as_a <gtransaction *> (stmt),
stream_read_tree (ib, data_in));
break;
default:
@ -233,11 +238,12 @@ input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in,
}
else if (code == GIMPLE_ASM)
{
gasm *asm_stmt = as_a <gasm *> (stmt);
unsigned i;
for (i = 0; i < gimple_asm_noutputs (stmt); i++)
for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
{
tree op = TREE_VALUE (gimple_asm_output_op (stmt, i));
tree op = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
if (TREE_CODE (op) == SSA_NAME)
SSA_NAME_DEF_STMT (op) = stmt;
}
@ -245,7 +251,7 @@ input_gimple_stmt (struct lto_input_block *ib, struct data_in *data_in,
/* Reset alias information. */
if (code == GIMPLE_CALL)
gimple_call_reset_alias_info (stmt);
gimple_call_reset_alias_info (as_a <gcall *> (stmt));
/* Mark the statement modified so its operand vectors can be filled in. */
gimple_set_modified (stmt, true);

View File

@ -54,7 +54,7 @@ along with GCC; see the file COPYING3. If not see
/* Output PHI function PHI to the main stream in OB. */
static void
output_phi (struct output_block *ob, gimple phi)
output_phi (struct output_block *ob, gphi *phi)
{
unsigned i, len = gimple_phi_num_args (phi);
@ -93,7 +93,10 @@ output_gimple_stmt (struct output_block *ob, gimple stmt)
bp_pack_var_len_unsigned (&bp, gimple_num_ops (stmt));
bp_pack_value (&bp, gimple_no_warning_p (stmt), 1);
if (is_gimple_assign (stmt))
bp_pack_value (&bp, gimple_assign_nontemporal_move_p (stmt), 1);
bp_pack_value (&bp,
gimple_assign_nontemporal_move_p (
as_a <gassign *> (stmt)),
1);
bp_pack_value (&bp, gimple_has_volatile_ops (stmt), 1);
hist = gimple_histogram_value (cfun, stmt);
bp_pack_value (&bp, hist != NULL, 1);
@ -110,24 +113,32 @@ output_gimple_stmt (struct output_block *ob, gimple stmt)
switch (gimple_code (stmt))
{
case GIMPLE_RESX:
streamer_write_hwi (ob, gimple_resx_region (stmt));
streamer_write_hwi (ob, gimple_resx_region (as_a <gresx *> (stmt)));
break;
case GIMPLE_EH_MUST_NOT_THROW:
stream_write_tree (ob, gimple_eh_must_not_throw_fndecl (stmt), true);
stream_write_tree (ob,
gimple_eh_must_not_throw_fndecl (
as_a <geh_mnt *> (stmt)),
true);
break;
case GIMPLE_EH_DISPATCH:
streamer_write_hwi (ob, gimple_eh_dispatch_region (stmt));
streamer_write_hwi (ob,
gimple_eh_dispatch_region (
as_a <geh_dispatch *> (stmt)));
break;
case GIMPLE_ASM:
streamer_write_uhwi (ob, gimple_asm_ninputs (stmt));
streamer_write_uhwi (ob, gimple_asm_noutputs (stmt));
streamer_write_uhwi (ob, gimple_asm_nclobbers (stmt));
streamer_write_uhwi (ob, gimple_asm_nlabels (stmt));
streamer_write_string (ob, ob->main_stream, gimple_asm_string (stmt),
true);
{
gasm *asm_stmt = as_a <gasm *> (stmt);
streamer_write_uhwi (ob, gimple_asm_ninputs (asm_stmt));
streamer_write_uhwi (ob, gimple_asm_noutputs (asm_stmt));
streamer_write_uhwi (ob, gimple_asm_nclobbers (asm_stmt));
streamer_write_uhwi (ob, gimple_asm_nlabels (asm_stmt));
streamer_write_string (ob, ob->main_stream,
gimple_asm_string (asm_stmt), true);
}
/* Fallthru */
case GIMPLE_ASSIGN:
@ -187,8 +198,11 @@ output_gimple_stmt (struct output_block *ob, gimple stmt)
break;
case GIMPLE_TRANSACTION:
gcc_assert (gimple_transaction_body (stmt) == NULL);
stream_write_tree (ob, gimple_transaction_label (stmt), true);
{
gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
gcc_assert (gimple_transaction_body (trans_stmt) == NULL);
stream_write_tree (ob, gimple_transaction_label (trans_stmt), true);
}
break;
default:
@ -240,9 +254,11 @@ output_bb (struct output_block *ob, basic_block bb, struct function *fn)
streamer_write_record_start (ob, LTO_null);
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
for (gphi_iterator psi = gsi_start_phis (bb);
!gsi_end_p (psi);
gsi_next (&psi))
{
gimple phi = gsi_stmt (bsi);
gphi *phi = psi.phi ();
/* Only emit PHIs for gimple registers. PHI nodes for .MEM
will be filled in on reading when the SSA form is

View File

@ -102,7 +102,7 @@ walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
/* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
static tree
walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
walk_gimple_asm (gasm *stmt, walk_tree_fn callback_op,
struct walk_stmt_info *wi)
{
tree ret, op;
@ -244,7 +244,8 @@ walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
wi->val_only = true;
}
ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
ret = walk_tree (gimple_call_chain_ptr (as_a <gcall *> (stmt)),
callback_op, wi, pset);
if (ret)
return ret;
@ -285,8 +286,8 @@ walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
break;
case GIMPLE_CATCH:
ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
pset);
ret = walk_tree (gimple_catch_types_ptr (as_a <gcatch *> (stmt)),
callback_op, wi, pset);
if (ret)
return ret;
break;
@ -299,26 +300,30 @@ walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
break;
case GIMPLE_ASM:
ret = walk_gimple_asm (stmt, callback_op, wi);
ret = walk_gimple_asm (as_a <gasm *> (stmt), callback_op, wi);
if (ret)
return ret;
break;
case GIMPLE_OMP_CONTINUE:
ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
callback_op, wi, pset);
if (ret)
return ret;
{
gomp_continue *cont_stmt = as_a <gomp_continue *> (stmt);
ret = walk_tree (gimple_omp_continue_control_def_ptr (cont_stmt),
callback_op, wi, pset);
if (ret)
return ret;
ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
callback_op, wi, pset);
if (ret)
return ret;
ret = walk_tree (gimple_omp_continue_control_use_ptr (cont_stmt),
callback_op, wi, pset);
if (ret)
return ret;
}
break;
case GIMPLE_OMP_CRITICAL:
ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
pset);
ret = walk_tree (gimple_omp_critical_name_ptr (
as_a <gomp_critical *> (stmt)),
callback_op, wi, pset);
if (ret)
return ret;
break;
@ -350,18 +355,21 @@ walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
break;
case GIMPLE_OMP_PARALLEL:
ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
wi, pset);
if (ret)
return ret;
ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
wi, pset);
if (ret)
return ret;
ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
wi, pset);
if (ret)
return ret;
{
gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
ret = walk_tree (gimple_omp_parallel_clauses_ptr (omp_par_stmt),
callback_op, wi, pset);
if (ret)
return ret;
ret = walk_tree (gimple_omp_parallel_child_fn_ptr (omp_par_stmt),
callback_op, wi, pset);
if (ret)
return ret;
ret = walk_tree (gimple_omp_parallel_data_arg_ptr (omp_par_stmt),
callback_op, wi, pset);
if (ret)
return ret;
}
break;
case GIMPLE_OMP_TASK:
@ -426,27 +434,32 @@ walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
break;
case GIMPLE_OMP_ATOMIC_LOAD:
ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
pset);
if (ret)
return ret;
{
gomp_atomic_load *omp_stmt = as_a <gomp_atomic_load *> (stmt);
ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (omp_stmt),
callback_op, wi, pset);
if (ret)
return ret;
ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
pset);
if (ret)
return ret;
ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (omp_stmt),
callback_op, wi, pset);
if (ret)
return ret;
}
break;
case GIMPLE_OMP_ATOMIC_STORE:
ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
wi, pset);
ret = walk_tree (gimple_omp_atomic_store_val_ptr (
as_a <gomp_atomic_store *> (stmt)),
callback_op, wi, pset);
if (ret)
return ret;
break;
case GIMPLE_TRANSACTION:
ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
wi, pset);
ret = walk_tree (gimple_transaction_label_ptr (
as_a <gtransaction *> (stmt)),
callback_op, wi, pset);
if (ret)
return ret;
break;
@ -549,15 +562,16 @@ walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
switch (gimple_code (stmt))
{
case GIMPLE_BIND:
ret = walk_gimple_seq_mod (gimple_bind_body_ptr (stmt), callback_stmt,
callback_op, wi);
ret = walk_gimple_seq_mod (gimple_bind_body_ptr (as_a <gbind *> (stmt)),
callback_stmt, callback_op, wi);
if (ret)
return wi->callback_result;
break;
case GIMPLE_CATCH:
ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt), callback_stmt,
callback_op, wi);
ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (
as_a <gcatch *> (stmt)),
callback_stmt, callback_op, wi);
if (ret)
return wi->callback_result;
break;
@ -570,14 +584,17 @@ walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
break;
case GIMPLE_EH_ELSE:
ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt),
callback_stmt, callback_op, wi);
if (ret)
return wi->callback_result;
ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt),
callback_stmt, callback_op, wi);
if (ret)
return wi->callback_result;
{
geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (eh_else_stmt),
callback_stmt, callback_op, wi);
if (ret)
return wi->callback_result;
ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (eh_else_stmt),
callback_stmt, callback_op, wi);
if (ret)
return wi->callback_result;
}
break;
case GIMPLE_TRY:
@ -624,7 +641,8 @@ walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
break;
case GIMPLE_TRANSACTION:
ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt),
ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (
as_a <gtransaction *> (stmt)),
callback_stmt, callback_op, wi);
if (ret)
return wi->callback_result;
@ -747,11 +765,11 @@ walk_stmt_load_store_addr_ops (gimple stmt, void *data,
}
}
}
else if (is_gimple_call (stmt))
else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
if (visit_store)
{
tree arg = gimple_call_lhs (stmt);
tree arg = gimple_call_lhs (call_stmt);
if (arg)
{
tree lhs = get_base_loadstore (arg);
@ -760,9 +778,9 @@ walk_stmt_load_store_addr_ops (gimple stmt, void *data,
}
}
if (visit_load || visit_addr)
for (i = 0; i < gimple_call_num_args (stmt); ++i)
for (i = 0; i < gimple_call_num_args (call_stmt); ++i)
{
tree arg = gimple_call_arg (stmt, i);
tree arg = gimple_call_arg (call_stmt, i);
if (visit_addr
&& TREE_CODE (arg) == ADDR_EXPR)
ret |= visit_addr (stmt, TREE_OPERAND (arg, 0), arg, data);
@ -774,29 +792,29 @@ walk_stmt_load_store_addr_ops (gimple stmt, void *data,
}
}
if (visit_addr
&& gimple_call_chain (stmt)
&& TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
gimple_call_chain (stmt), data);
&& gimple_call_chain (call_stmt)
&& TREE_CODE (gimple_call_chain (call_stmt)) == ADDR_EXPR)
ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (call_stmt), 0),
gimple_call_chain (call_stmt), data);
if (visit_addr
&& gimple_call_return_slot_opt_p (stmt)
&& gimple_call_lhs (stmt) != NULL_TREE
&& TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
ret |= visit_addr (stmt, gimple_call_lhs (stmt),
gimple_call_lhs (stmt), data);
&& gimple_call_return_slot_opt_p (call_stmt)
&& gimple_call_lhs (call_stmt) != NULL_TREE
&& TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call_stmt))))
ret |= visit_addr (stmt, gimple_call_lhs (call_stmt),
gimple_call_lhs (call_stmt), data);
}
else if (gimple_code (stmt) == GIMPLE_ASM)
else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
{
unsigned noutputs;
const char *constraint;
const char **oconstraints;
bool allows_mem, allows_reg, is_inout;
noutputs = gimple_asm_noutputs (stmt);
noutputs = gimple_asm_noutputs (asm_stmt);
oconstraints = XALLOCAVEC (const char *, noutputs);
if (visit_store || visit_addr)
for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
{
tree link = gimple_asm_output_op (stmt, i);
tree link = gimple_asm_output_op (asm_stmt, i);
tree op = get_base_loadstore (TREE_VALUE (link));
if (op && visit_store)
ret |= visit_store (stmt, op, TREE_VALUE (link), data);
@ -812,9 +830,9 @@ walk_stmt_load_store_addr_ops (gimple stmt, void *data,
}
}
if (visit_load || visit_addr)
for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
{
tree link = gimple_asm_input_op (stmt, i);
tree link = gimple_asm_input_op (asm_stmt, i);
tree op = TREE_VALUE (link);
if (visit_addr
&& TREE_CODE (op) == ADDR_EXPR)
@ -841,9 +859,9 @@ walk_stmt_load_store_addr_ops (gimple stmt, void *data,
}
}
}
else if (gimple_code (stmt) == GIMPLE_RETURN)
else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
tree op = gimple_return_retval (stmt);
tree op = gimple_return_retval (return_stmt);
if (op)
{
if (visit_addr

View File

@ -191,10 +191,12 @@ gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
/* Build a GIMPLE_RETURN statement returning RETVAL. */
gimple
greturn *
gimple_build_return (tree retval)
{
gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 2);
greturn *s
= as_a <greturn *> (gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK,
2));
if (retval)
gimple_return_set_retval (s, retval);
return s;
@ -203,7 +205,7 @@ gimple_build_return (tree retval)
/* Reset alias information on call S. */
void
gimple_call_reset_alias_info (gimple s)
gimple_call_reset_alias_info (gcall *s)
{
if (gimple_call_flags (s) & ECF_CONST)
memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
@ -220,10 +222,12 @@ gimple_call_reset_alias_info (gimple s)
components of a GIMPLE_CALL statement to function FN with NARGS
arguments. */
static inline gimple
static inline gcall *
gimple_build_call_1 (tree fn, unsigned nargs)
{
gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
gcall *s
= as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
nargs + 3));
if (TREE_CODE (fn) == FUNCTION_DECL)
fn = build_fold_addr_expr (fn);
gimple_set_op (s, 1, fn);
@ -236,12 +240,12 @@ gimple_build_call_1 (tree fn, unsigned nargs)
/* Build a GIMPLE_CALL statement to function FN with the arguments
specified in vector ARGS. */
gimple
gcall *
gimple_build_call_vec (tree fn, vec<tree> args)
{
unsigned i;
unsigned nargs = args.length ();
gimple call = gimple_build_call_1 (fn, nargs);
gcall *call = gimple_build_call_1 (fn, nargs);
for (i = 0; i < nargs; i++)
gimple_call_set_arg (call, i, args[i]);
@ -253,11 +257,11 @@ gimple_build_call_vec (tree fn, vec<tree> args)
/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
arguments. The ... are the arguments. */
gimple
gcall *
gimple_build_call (tree fn, unsigned nargs, ...)
{
va_list ap;
gimple call;
gcall *call;
unsigned i;
gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
@ -276,10 +280,10 @@ gimple_build_call (tree fn, unsigned nargs, ...)
/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
arguments. AP contains the arguments. */
gimple
gcall *
gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
{
gimple call;
gcall *call;
unsigned i;
gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
@ -297,10 +301,12 @@ gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
Build the basic components of a GIMPLE_CALL statement to internal
function FN with NARGS arguments. */
static inline gimple
static inline gcall *
gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
{
gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
gcall *s
= as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
nargs + 3));
s->subcode |= GF_CALL_INTERNAL;
gimple_call_set_internal_fn (s, fn);
gimple_call_reset_alias_info (s);
@ -311,11 +317,11 @@ gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
/* Build a GIMPLE_CALL statement to internal function FN. NARGS is
the number of arguments. The ... are the arguments. */
gimple
gcall *
gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
{
va_list ap;
gimple call;
gcall *call;
unsigned i;
call = gimple_build_call_internal_1 (fn, nargs);
@ -331,11 +337,11 @@ gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
/* Build a GIMPLE_CALL statement to internal function FN with the arguments
specified in vector ARGS. */
gimple
gcall *
gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
{
unsigned i, nargs;
gimple call;
gcall *call;
nargs = args.length ();
call = gimple_build_call_internal_1 (fn, nargs);
@ -350,11 +356,11 @@ gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
assumed to be in GIMPLE form already. Minimal checking is done of
this fact. */
gimple
gcall *
gimple_build_call_from_tree (tree t)
{
unsigned i, nargs;
gimple call;
gcall *call;
tree fndecl = get_callee_fndecl (t);
gcc_assert (TREE_CODE (t) == CALL_EXPR);
@ -392,7 +398,7 @@ gimple_build_call_from_tree (tree t)
LHS of the assignment.
RHS of the assignment which can be unary or binary. */
gimple
gassign *
gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
{
enum tree_code subcode;
@ -408,19 +414,20 @@ gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
gimple
gassign *
gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
tree op2, tree op3 MEM_STAT_DECL)
{
unsigned num_ops;
gimple p;
gassign *p;
/* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
code). */
num_ops = get_gimple_rhs_num_ops (subcode) + 1;
p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
PASS_MEM_STAT);
p = as_a <gassign *> (
gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
PASS_MEM_STAT));
gimple_assign_set_lhs (p, lhs);
gimple_assign_set_rhs1 (p, op1);
if (op2)
@ -438,7 +445,7 @@ gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
return p;
}
gimple
gassign *
gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
tree op2 MEM_STAT_DECL)
{
@ -446,7 +453,7 @@ gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
PASS_MEM_STAT);
}
gimple
gassign *
gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1
MEM_STAT_DECL)
{
@ -461,14 +468,14 @@ gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1
T_LABEL is the label to jump to if the condition is true.
F_LABEL is the label to jump to otherwise. */
gimple
gcond *
gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
tree t_label, tree f_label)
{
gimple p;
gcond *p;
gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
p = as_a <gcond *> (gimple_build_with_ops (GIMPLE_COND, pred_code, 4));
gimple_cond_set_lhs (p, lhs);
gimple_cond_set_rhs (p, rhs);
gimple_cond_set_true_label (p, t_label);
@ -479,7 +486,7 @@ gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
/* Build a GIMPLE_COND statement from the conditional expression tree
COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
gimple
gcond *
gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
{
enum tree_code code;
@ -493,7 +500,7 @@ gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
boolean expression tree COND. */
void
gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
gimple_cond_set_condition_from_tree (gcond *stmt, tree cond)
{
enum tree_code code;
tree lhs, rhs;
@ -504,20 +511,22 @@ gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
/* Build a GIMPLE_LABEL statement for LABEL. */
gimple
glabel *
gimple_build_label (tree label)
{
gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
glabel *p
= as_a <glabel *> (gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1));
gimple_label_set_label (p, label);
return p;
}
/* Build a GIMPLE_GOTO statement to label DEST. */
gimple
ggoto *
gimple_build_goto (tree dest)
{
gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
ggoto *p
= as_a <ggoto *> (gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1));
gimple_goto_set_dest (p, dest);
return p;
}
@ -536,10 +545,10 @@ gimple_build_nop (void)
VARS are the variables in BODY.
BLOCK is the containing block. */
gimple
gbind *
gimple_build_bind (tree vars, gimple_seq body, tree block)
{
gimple p = gimple_alloc (GIMPLE_BIND, 0);
gbind *p = as_a <gbind *> (gimple_alloc (GIMPLE_BIND, 0));
gimple_bind_set_vars (p, vars);
if (body)
gimple_bind_set_body (p, body);
@ -556,18 +565,18 @@ gimple_build_bind (tree vars, gimple_seq body, tree block)
NCLOBBERS is the number of clobbered registers.
*/
static inline gimple
static inline gasm *
gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
unsigned nclobbers, unsigned nlabels)
{
gimple_statement_asm *p;
gasm *p;
int size = strlen (string);
/* ASMs with labels cannot have outputs. This should have been
enforced by the front end. */
gcc_assert (nlabels == 0 || noutputs == 0);
p = as_a <gimple_statement_asm *> (
p = as_a <gasm *> (
gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
ninputs + noutputs + nclobbers + nlabels));
@ -594,12 +603,12 @@ gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
CLOBBERS is a vector of the clobbered register parameters.
LABELS is a vector of destination labels. */
gimple
gasm *
gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
vec<tree, va_gc> *labels)
{
gimple p;
gasm *p;
unsigned i;
p = gimple_build_asm_1 (string,
@ -628,10 +637,10 @@ gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
TYPES are the catch types.
HANDLER is the exception handler. */
gimple
gcatch *
gimple_build_catch (tree types, gimple_seq handler)
{
gimple p = gimple_alloc (GIMPLE_CATCH, 0);
gcatch *p = as_a <gcatch *> (gimple_alloc (GIMPLE_CATCH, 0));
gimple_catch_set_types (p, types);
if (handler)
gimple_catch_set_handler (p, handler);
@ -644,10 +653,10 @@ gimple_build_catch (tree types, gimple_seq handler)
TYPES are the filter's types.
FAILURE is the filter's failure action. */
gimple
geh_filter *
gimple_build_eh_filter (tree types, gimple_seq failure)
{
gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
geh_filter *p = as_a <geh_filter *> (gimple_alloc (GIMPLE_EH_FILTER, 0));
gimple_eh_filter_set_types (p, types);
if (failure)
gimple_eh_filter_set_failure (p, failure);
@ -657,10 +666,10 @@ gimple_build_eh_filter (tree types, gimple_seq failure)
/* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
gimple
geh_mnt *
gimple_build_eh_must_not_throw (tree decl)
{
gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
geh_mnt *p = as_a <geh_mnt *> (gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0));
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
@ -671,10 +680,10 @@ gimple_build_eh_must_not_throw (tree decl)
/* Build a GIMPLE_EH_ELSE statement. */
gimple
geh_else *
gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
{
gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
geh_else *p = as_a <geh_else *> (gimple_alloc (GIMPLE_EH_ELSE, 0));
gimple_eh_else_set_n_body (p, n_body);
gimple_eh_else_set_e_body (p, e_body);
return p;
@ -687,14 +696,14 @@ gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
whether this is a try/catch or a try/finally respectively. */
gimple_statement_try *
gtry *
gimple_build_try (gimple_seq eval, gimple_seq cleanup,
enum gimple_try_flags kind)
{
gimple_statement_try *p;
gtry *p;
gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
p = as_a <gimple_statement_try *> (gimple_alloc (GIMPLE_TRY, 0));
p = as_a <gtry *> (gimple_alloc (GIMPLE_TRY, 0));
gimple_set_subcode (p, kind);
if (eval)
gimple_try_set_eval (p, eval);
@ -721,12 +730,11 @@ gimple_build_wce (gimple_seq cleanup)
/* Build a GIMPLE_RESX statement. */
gimple
gresx *
gimple_build_resx (int region)
{
gimple_statement_resx *p =
as_a <gimple_statement_resx *> (
gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
gresx *p
= as_a <gresx *> (gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
p->region = region;
return p;
}
@ -737,13 +745,14 @@ gimple_build_resx (int region)
NLABELS is the number of labels in the switch excluding the default.
DEFAULT_LABEL is the default label for the switch statement. */
gimple
gswitch *
gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
{
/* nlabels + 1 default label + 1 index. */
gcc_checking_assert (default_label);
gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
1 + 1 + nlabels);
gswitch *p = as_a <gswitch *> (gimple_build_with_ops (GIMPLE_SWITCH,
ERROR_MARK,
1 + 1 + nlabels));
gimple_switch_set_index (p, index);
gimple_switch_set_default_label (p, default_label);
return p;
@ -755,12 +764,12 @@ gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
DEFAULT_LABEL is the default label
ARGS is a vector of labels excluding the default. */
gimple
gswitch *
gimple_build_switch (tree index, tree default_label, vec<tree> args)
{
unsigned i, nlabels = args.length ();
gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
gswitch *p = gimple_build_switch_nlabels (nlabels, index, default_label);
/* Copy the labels from the vector to the switch statement. */
for (i = 0; i < nlabels; i++)
@ -771,12 +780,12 @@ gimple_build_switch (tree index, tree default_label, vec<tree> args)
/* Build a GIMPLE_EH_DISPATCH statement. */
gimple
geh_dispatch *
gimple_build_eh_dispatch (int region)
{
gimple_statement_eh_dispatch *p =
as_a <gimple_statement_eh_dispatch *> (
gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
geh_dispatch *p
= as_a <geh_dispatch *> (
gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
p->region = region;
return p;
}
@ -785,13 +794,13 @@ gimple_build_eh_dispatch (int region)
VAR is bound to VALUE; block and location are taken from STMT. */
gimple
gdebug *
gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
{
gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
(unsigned)GIMPLE_DEBUG_BIND, 2
PASS_MEM_STAT);
gdebug *p
= as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
(unsigned)GIMPLE_DEBUG_BIND, 2
PASS_MEM_STAT));
gimple_debug_bind_set_var (p, var);
gimple_debug_bind_set_value (p, value);
if (stmt)
@ -805,13 +814,15 @@ gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
VAR is bound to VALUE; block and location are taken from STMT. */
gimple
gdebug *
gimple_build_debug_source_bind_stat (tree var, tree value,
gimple stmt MEM_STAT_DECL)
{
gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
(unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
PASS_MEM_STAT);
gdebug *p
= as_a <gdebug *> (
gimple_build_with_ops_stat (GIMPLE_DEBUG,
(unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
PASS_MEM_STAT));
gimple_debug_source_bind_set_var (p, var);
gimple_debug_source_bind_set_value (p, value);
@ -827,10 +838,11 @@ gimple_build_debug_source_bind_stat (tree var, tree value,
BODY is the sequence of statements for which only one thread can execute.
NAME is optional identifier for this critical block. */
gimple
gomp_critical *
gimple_build_omp_critical (gimple_seq body, tree name)
{
gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
gomp_critical *p
= as_a <gomp_critical *> (gimple_alloc (GIMPLE_OMP_CRITICAL, 0));
gimple_omp_critical_set_name (p, name);
if (body)
gimple_omp_set_body (p, body);
@ -847,12 +859,11 @@ gimple_build_omp_critical (gimple_seq body, tree name)
COLLAPSE is the collapse count.
PRE_BODY is the sequence of statements that are loop invariant. */
gimple
gomp_for *
gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
gimple_seq pre_body)
{
gimple_statement_omp_for *p =
as_a <gimple_statement_omp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0));
gomp_for *p = as_a <gomp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_for_set_clauses (p, clauses);
@ -874,11 +885,12 @@ gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
CHILD_FN is the function created for the parallel threads to execute.
DATA_ARG are the shared data argument(s). */
gimple
gomp_parallel *
gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
tree data_arg)
{
gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
gomp_parallel *p
= as_a <gomp_parallel *> (gimple_alloc (GIMPLE_OMP_PARALLEL, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_parallel_set_clauses (p, clauses);
@ -898,12 +910,12 @@ gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
COPY_FN is the optional function for firstprivate initialization.
ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
gimple
gomp_task *
gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
tree data_arg, tree copy_fn, tree arg_size,
tree arg_align)
{
gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
gomp_task *p = as_a <gomp_task *> (gimple_alloc (GIMPLE_OMP_TASK, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_task_set_clauses (p, clauses);
@ -968,10 +980,11 @@ gimple_build_omp_taskgroup (gimple_seq body)
CONTROL_DEF is the definition of the control variable.
CONTROL_USE is the use of the control variable. */
gimple
gomp_continue *
gimple_build_omp_continue (tree control_def, tree control_use)
{
gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
gomp_continue *p
= as_a <gomp_continue *> (gimple_alloc (GIMPLE_OMP_CONTINUE, 0));
gimple_omp_continue_set_control_def (p, control_def);
gimple_omp_continue_set_control_use (p, control_use);
return p;
@ -1013,10 +1026,11 @@ gimple_build_omp_return (bool wait_p)
CLAUSES are any of the OMP sections contsruct's clauses: private,
firstprivate, lastprivate, reduction, and nowait. */
gimple
gomp_sections *
gimple_build_omp_sections (gimple_seq body, tree clauses)
{
gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
gomp_sections *p
= as_a <gomp_sections *> (gimple_alloc (GIMPLE_OMP_SECTIONS, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_sections_set_clauses (p, clauses);
@ -1040,10 +1054,11 @@ gimple_build_omp_sections_switch (void)
CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
copyprivate, nowait. */
gimple
gomp_single *
gimple_build_omp_single (gimple_seq body, tree clauses)
{
gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
gomp_single *p
= as_a <gomp_single *> (gimple_alloc (GIMPLE_OMP_SINGLE, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_single_set_clauses (p, clauses);
@ -1057,10 +1072,11 @@ gimple_build_omp_single (gimple_seq body, tree clauses)
BODY is the sequence of statements that will be executed.
CLAUSES are any of the OMP target construct's clauses. */
gimple
gomp_target *
gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
{
gimple p = gimple_alloc (GIMPLE_OMP_TARGET, 0);
gomp_target *p
= as_a <gomp_target *> (gimple_alloc (GIMPLE_OMP_TARGET, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_target_set_clauses (p, clauses);
@ -1075,10 +1091,10 @@ gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
BODY is the sequence of statements that will be executed.
CLAUSES are any of the OMP teams construct's clauses. */
gimple
gomp_teams *
gimple_build_omp_teams (gimple_seq body, tree clauses)
{
gimple p = gimple_alloc (GIMPLE_OMP_TEAMS, 0);
gomp_teams *p = as_a <gomp_teams *> (gimple_alloc (GIMPLE_OMP_TEAMS, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_teams_set_clauses (p, clauses);
@ -1089,10 +1105,11 @@ gimple_build_omp_teams (gimple_seq body, tree clauses)
/* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
gimple
gomp_atomic_load *
gimple_build_omp_atomic_load (tree lhs, tree rhs)
{
gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
gomp_atomic_load *p
= as_a <gomp_atomic_load *> (gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0));
gimple_omp_atomic_load_set_lhs (p, lhs);
gimple_omp_atomic_load_set_rhs (p, rhs);
return p;
@ -1102,20 +1119,22 @@ gimple_build_omp_atomic_load (tree lhs, tree rhs)
VAL is the value we are storing. */
gimple
gomp_atomic_store *
gimple_build_omp_atomic_store (tree val)
{
gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
gomp_atomic_store *p
= as_a <gomp_atomic_store *> (gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0));
gimple_omp_atomic_store_set_val (p, val);
return p;
}
/* Build a GIMPLE_TRANSACTION statement. */
gimple
gtransaction *
gimple_build_transaction (gimple_seq body, tree label)
{
gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
gtransaction *p
= as_a <gtransaction *> (gimple_alloc (GIMPLE_TRANSACTION, 0));
gimple_transaction_set_body (p, body);
gimple_transaction_set_label (p, label);
return p;
@ -1283,8 +1302,8 @@ empty_stmt_p (gimple stmt)
{
if (gimple_code (stmt) == GIMPLE_NOP)
return true;
if (gimple_code (stmt) == GIMPLE_BIND)
return empty_body_p (gimple_bind_body (stmt));
if (gbind *bind_stmt = dyn_cast <gbind *> (stmt))
return empty_body_p (gimple_bind_body (bind_stmt));
return false;
}
@ -1366,7 +1385,7 @@ gimple_call_flags (const_gimple stmt)
/* Return the "fn spec" string for call STMT. */
static const_tree
gimple_call_fnspec (const_gimple stmt)
gimple_call_fnspec (const gcall *stmt)
{
tree type, attr;
@ -1387,7 +1406,7 @@ gimple_call_fnspec (const_gimple stmt)
/* Detects argument flags for argument number ARG on call STMT. */
int
gimple_call_arg_flags (const_gimple stmt, unsigned arg)
gimple_call_arg_flags (const gcall *stmt, unsigned arg)
{
const_tree attr = gimple_call_fnspec (stmt);
@ -1421,7 +1440,7 @@ gimple_call_arg_flags (const_gimple stmt, unsigned arg)
/* Detects return flags for the call STMT. */
int
gimple_call_return_flags (const_gimple stmt)
gimple_call_return_flags (const gcall *stmt)
{
const_tree attr;
@ -1513,7 +1532,7 @@ gimple_set_bb (gimple stmt, basic_block bb)
tree t;
int uid;
t = gimple_label_label (stmt);
t = gimple_label_label (as_a <glabel *> (stmt));
uid = LABEL_DECL_UID (t);
if (uid == -1)
{
@ -1662,38 +1681,60 @@ gimple_copy (gimple stmt)
switch (gimple_code (stmt))
{
case GIMPLE_BIND:
new_seq = gimple_seq_copy (gimple_bind_body (stmt));
gimple_bind_set_body (copy, new_seq);
gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
gimple_bind_set_block (copy, gimple_bind_block (stmt));
{
gbind *bind_stmt = as_a <gbind *> (stmt);
gbind *bind_copy = as_a <gbind *> (copy);
new_seq = gimple_seq_copy (gimple_bind_body (bind_stmt));
gimple_bind_set_body (bind_copy, new_seq);
gimple_bind_set_vars (bind_copy,
unshare_expr (gimple_bind_vars (bind_stmt)));
gimple_bind_set_block (bind_copy, gimple_bind_block (bind_stmt));
}
break;
case GIMPLE_CATCH:
new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
gimple_catch_set_handler (copy, new_seq);
t = unshare_expr (gimple_catch_types (stmt));
gimple_catch_set_types (copy, t);
{
gcatch *catch_stmt = as_a <gcatch *> (stmt);
gcatch *catch_copy = as_a <gcatch *> (copy);
new_seq = gimple_seq_copy (gimple_catch_handler (catch_stmt));
gimple_catch_set_handler (catch_copy, new_seq);
t = unshare_expr (gimple_catch_types (catch_stmt));
gimple_catch_set_types (catch_copy, t);
}
break;
case GIMPLE_EH_FILTER:
new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
gimple_eh_filter_set_failure (copy, new_seq);
t = unshare_expr (gimple_eh_filter_types (stmt));
gimple_eh_filter_set_types (copy, t);
{
geh_filter *eh_filter_stmt = as_a <geh_filter *> (stmt);
geh_filter *eh_filter_copy = as_a <geh_filter *> (copy);
new_seq
= gimple_seq_copy (gimple_eh_filter_failure (eh_filter_stmt));
gimple_eh_filter_set_failure (eh_filter_copy, new_seq);
t = unshare_expr (gimple_eh_filter_types (eh_filter_stmt));
gimple_eh_filter_set_types (eh_filter_copy, t);
}
break;
case GIMPLE_EH_ELSE:
new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
gimple_eh_else_set_n_body (copy, new_seq);
new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
gimple_eh_else_set_e_body (copy, new_seq);
{
geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
geh_else *eh_else_copy = as_a <geh_else *> (copy);
new_seq = gimple_seq_copy (gimple_eh_else_n_body (eh_else_stmt));
gimple_eh_else_set_n_body (eh_else_copy, new_seq);
new_seq = gimple_seq_copy (gimple_eh_else_e_body (eh_else_stmt));
gimple_eh_else_set_e_body (eh_else_copy, new_seq);
}
break;
case GIMPLE_TRY:
new_seq = gimple_seq_copy (gimple_try_eval (stmt));
gimple_try_set_eval (copy, new_seq);
new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
gimple_try_set_cleanup (copy, new_seq);
{
gtry *try_stmt = as_a <gtry *> (stmt);
gtry *try_copy = as_a <gtry *> (copy);
new_seq = gimple_seq_copy (gimple_try_eval (try_stmt));
gimple_try_set_eval (try_copy, new_seq);
new_seq = gimple_seq_copy (gimple_try_cleanup (try_stmt));
gimple_try_set_cleanup (try_copy, new_seq);
}
break;
case GIMPLE_OMP_FOR:
@ -1702,8 +1743,7 @@ gimple_copy (gimple stmt)
t = unshare_expr (gimple_omp_for_clauses (stmt));
gimple_omp_for_set_clauses (copy, t);
{
gimple_statement_omp_for *omp_for_copy =
as_a <gimple_statement_omp_for *> (copy);
gomp_for *omp_for_copy = as_a <gomp_for *> (copy);
omp_for_copy->iter = ggc_vec_alloc<gimple_omp_for_iter>
( gimple_omp_for_collapse (stmt));
}
@ -1723,12 +1763,16 @@ gimple_copy (gimple stmt)
goto copy_omp_body;
case GIMPLE_OMP_PARALLEL:
t = unshare_expr (gimple_omp_parallel_clauses (stmt));
gimple_omp_parallel_set_clauses (copy, t);
t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
gimple_omp_parallel_set_child_fn (copy, t);
t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
gimple_omp_parallel_set_data_arg (copy, t);
{
gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
gomp_parallel *omp_par_copy = as_a <gomp_parallel *> (copy);
t = unshare_expr (gimple_omp_parallel_clauses (omp_par_stmt));
gimple_omp_parallel_set_clauses (omp_par_copy, t);
t = unshare_expr (gimple_omp_parallel_child_fn (omp_par_stmt));
gimple_omp_parallel_set_child_fn (omp_par_copy, t);
t = unshare_expr (gimple_omp_parallel_data_arg (omp_par_stmt));
gimple_omp_parallel_set_data_arg (omp_par_copy, t);
}
goto copy_omp_body;
case GIMPLE_OMP_TASK:
@ -1747,8 +1791,9 @@ gimple_copy (gimple stmt)
goto copy_omp_body;
case GIMPLE_OMP_CRITICAL:
t = unshare_expr (gimple_omp_critical_name (stmt));
gimple_omp_critical_set_name (copy, t);
t = unshare_expr (gimple_omp_critical_name (
as_a <gomp_critical *> (stmt)));
gimple_omp_critical_set_name (as_a <gomp_critical *> (copy), t);
goto copy_omp_body;
case GIMPLE_OMP_SECTIONS:
@ -1771,8 +1816,10 @@ gimple_copy (gimple stmt)
break;
case GIMPLE_TRANSACTION:
new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
gimple_transaction_set_body (copy, new_seq);
new_seq = gimple_seq_copy (gimple_transaction_body (
as_a <gtransaction *> (stmt)));
gimple_transaction_set_body (as_a <gtransaction *> (copy),
new_seq);
break;
case GIMPLE_WITH_CLEANUP_EXPR:
@ -1827,7 +1874,7 @@ gimple_has_side_effects (const_gimple s)
return true;
if (gimple_code (s) == GIMPLE_ASM
&& gimple_asm_volatile_p (s))
&& gimple_asm_volatile_p (as_a <const gasm *> (s)))
return true;
if (is_gimple_call (s))
@ -1868,7 +1915,7 @@ gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
switch (gimple_code (s))
{
case GIMPLE_ASM:
return gimple_asm_volatile_p (s);
return gimple_asm_volatile_p (as_a <gasm *> (s));
case GIMPLE_CALL:
t = gimple_call_fndecl (s);
@ -2041,13 +2088,13 @@ canonicalize_cond_expr_cond (tree t)
/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
the positions marked by the set ARGS_TO_SKIP. */
gimple
gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
gcall *
gimple_call_copy_skip_args (gcall *stmt, bitmap args_to_skip)
{
int i;
int nargs = gimple_call_num_args (stmt);
auto_vec<tree> vargs (nargs);
gimple new_stmt;
gcall *new_stmt;
for (i = 0; i < nargs; i++)
if (!bitmap_bit_p (args_to_skip, i))
@ -2493,7 +2540,7 @@ gimple_call_builtin_p (const_gimple stmt, enum built_in_function code)
GIMPLE_ASM. */
bool
gimple_asm_clobbers_memory_p (const_gimple stmt)
gimple_asm_clobbers_memory_p (const gasm *stmt)
{
unsigned i;
@ -2652,13 +2699,13 @@ infer_nonnull_range (gimple stmt, tree op, bool dereference, bool attribute)
/* If this function is marked as returning non-null, then we can
infer OP is non-null if it is used in the return statement. */
if (attribute
&& gimple_code (stmt) == GIMPLE_RETURN
&& gimple_return_retval (stmt)
&& operand_equal_p (gimple_return_retval (stmt), op, 0)
&& lookup_attribute ("returns_nonnull",
TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
return true;
if (attribute)
if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
if (gimple_return_retval (return_stmt)
&& operand_equal_p (gimple_return_retval (return_stmt), op, 0)
&& lookup_attribute ("returns_nonnull",
TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
return true;
return false;
}

File diff suppressed because it is too large Load Diff

View File

@ -177,22 +177,27 @@ gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
switch (gimple_code (stmt))
{
case GIMPLE_COND:
gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
is_gimple_val, fb_rvalue);
gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
is_gimple_val, fb_rvalue);
{
gcond *cond_stmt = as_a <gcond *> (stmt);
gimplify_expr (gimple_cond_lhs_ptr (cond_stmt), &pre, NULL,
is_gimple_val, fb_rvalue);
gimplify_expr (gimple_cond_rhs_ptr (cond_stmt), &pre, NULL,
is_gimple_val, fb_rvalue);
}
break;
case GIMPLE_SWITCH:
gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
is_gimple_val, fb_rvalue);
gimplify_expr (gimple_switch_index_ptr (as_a <gswitch *> (stmt)),
&pre, NULL, is_gimple_val, fb_rvalue);
break;
case GIMPLE_OMP_ATOMIC_LOAD:
gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
is_gimple_val, fb_rvalue);
gimplify_expr (gimple_omp_atomic_load_rhs_ptr (
as_a <gomp_atomic_load *> (stmt)),
&pre, NULL, is_gimple_val, fb_rvalue);
break;
case GIMPLE_ASM:
{
size_t i, noutputs = gimple_asm_noutputs (stmt);
gasm *asm_stmt = as_a <gasm *> (stmt);
size_t i, noutputs = gimple_asm_noutputs (asm_stmt);
const char *constraint, **oconstraints;
bool allows_mem, allows_reg, is_inout;
@ -200,7 +205,7 @@ gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
= (const char **) alloca ((noutputs) * sizeof (const char *));
for (i = 0; i < noutputs; i++)
{
tree op = gimple_asm_output_op (stmt, i);
tree op = gimple_asm_output_op (asm_stmt, i);
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
oconstraints[i] = constraint;
parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
@ -209,9 +214,9 @@ gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
is_inout ? is_gimple_min_lval : is_gimple_lvalue,
fb_lvalue | fb_mayfail);
}
for (i = 0; i < gimple_asm_ninputs (stmt); i++)
for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
{
tree op = gimple_asm_input_op (stmt, i);
tree op = gimple_asm_input_op (asm_stmt, i);
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
parse_input_constraint (&constraint, 0, 0, noutputs, 0,
oconstraints, &allows_mem, &allows_reg);

View File

@ -124,7 +124,7 @@ struct gimplify_ctx
{
struct gimplify_ctx *prev_context;
vec<gimple> bind_expr_stack;
vec<gbind *> bind_expr_stack;
tree temps;
gimple_seq conditional_cleanups;
tree exit_label;
@ -275,10 +275,10 @@ pop_gimplify_context (gimple body)
/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
static void
gimple_push_bind_expr (gimple gimple_bind)
gimple_push_bind_expr (gbind *bind_stmt)
{
gimplify_ctxp->bind_expr_stack.reserve (8);
gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
}
/* Pop the first element off the stack of bindings. */
@ -291,7 +291,7 @@ gimple_pop_bind_expr (void)
/* Return the first element of the stack of bindings. */
gimple
gbind *
gimple_current_bind_expr (void)
{
return gimplify_ctxp->bind_expr_stack.last ();
@ -299,7 +299,7 @@ gimple_current_bind_expr (void)
/* Return the stack of bindings created during gimplification. */
vec<gimple>
vec<gbind *>
gimple_bind_expr_stack (void)
{
return gimplify_ctxp->bind_expr_stack;
@ -576,14 +576,14 @@ get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
generate debug info for them; otherwise don't. */
void
declare_vars (tree vars, gimple scope, bool debug_info)
declare_vars (tree vars, gimple gs, bool debug_info)
{
tree last = vars;
if (last)
{
tree temps, block;
gcc_assert (gimple_code (scope) == GIMPLE_BIND);
gbind *scope = as_a <gbind *> (gs);
temps = nreverse (last);
@ -1033,7 +1033,7 @@ voidify_wrapper_expr (tree wrapper, tree temp)
a temporary through which they communicate. */
static void
build_stack_save_restore (gimple *save, gimple *restore)
build_stack_save_restore (gcall **save, gcall **restore)
{
tree tmp_var;
@ -1054,9 +1054,9 @@ gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
tree bind_expr = *expr_p;
bool old_save_stack = gimplify_ctxp->save_stack;
tree t;
gimple gimple_bind;
gbind *bind_stmt;
gimple_seq body, cleanup;
gimple stack_save;
gcall *stack_save;
location_t start_locus = 0, end_locus = 0;
tree temp = voidify_wrapper_expr (bind_expr, NULL);
@ -1099,16 +1099,16 @@ gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
DECL_GIMPLE_REG_P (t) = 1;
}
gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
BIND_EXPR_BLOCK (bind_expr));
gimple_push_bind_expr (gimple_bind);
gimple_push_bind_expr (bind_stmt);
gimplify_ctxp->save_stack = false;
/* Gimplify the body into the GIMPLE_BIND tuple's body. */
body = NULL;
gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
gimple_bind_set_body (gimple_bind, body);
gimple_bind_set_body (bind_stmt, body);
/* Source location wise, the cleanup code (stack_restore and clobbers)
belongs to the end of the block, so propagate what we have. The
@ -1127,7 +1127,7 @@ gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
stack_save = NULL;
if (gimplify_ctxp->save_stack)
{
gimple stack_restore;
gcall *stack_restore;
/* Save stack on entry and restore it on exit. Add a try_finally
block to achieve this. */
@ -1164,23 +1164,23 @@ gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
if (cleanup)
{
gimple gs;
gtry *gs;
gimple_seq new_body;
new_body = NULL;
gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
GIMPLE_TRY_FINALLY);
if (stack_save)
gimplify_seq_add_stmt (&new_body, stack_save);
gimplify_seq_add_stmt (&new_body, gs);
gimple_bind_set_body (gimple_bind, new_body);
gimple_bind_set_body (bind_stmt, new_body);
}
gimplify_ctxp->save_stack = old_save_stack;
gimple_pop_bind_expr ();
gimplify_seq_add_stmt (pre_p, gimple_bind);
gimplify_seq_add_stmt (pre_p, bind_stmt);
if (temp)
{
@ -1202,7 +1202,7 @@ gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
static enum gimplify_status
gimplify_return_expr (tree stmt, gimple_seq *pre_p)
{
gimple ret;
greturn *ret;
tree ret_expr = TREE_OPERAND (stmt, 0);
tree result_decl, result;
@ -1222,7 +1222,7 @@ gimplify_return_expr (tree stmt, gimple_seq *pre_p)
|| TREE_CODE (ret_expr) == RESULT_DECL
|| ret_expr == error_mark_node)
{
gimple ret = gimple_build_return (ret_expr);
greturn *ret = gimple_build_return (ret_expr);
gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
gimplify_seq_add_stmt (pre_p, ret);
return GS_ALL_DONE;
@ -1495,7 +1495,7 @@ gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
vec<tree> labels;
vec<tree> saved_labels;
tree default_case = NULL_TREE;
gimple gimple_switch;
gswitch *switch_stmt;
/* If someone can be bothered to fill in the labels, they can
be bothered to null out the body too. */
@ -1515,7 +1515,7 @@ gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
if (!default_case)
{
gimple new_default;
glabel *new_default;
default_case
= build_case_label (NULL_TREE, NULL_TREE,
@ -1524,9 +1524,9 @@ gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
gimplify_seq_add_stmt (&switch_body_seq, new_default);
}
gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
default_case, labels);
gimplify_seq_add_stmt (pre_p, gimple_switch);
gimplify_seq_add_stmt (pre_p, switch_stmt);
gimplify_seq_add_seq (pre_p, switch_body_seq);
labels.release ();
}
@ -1542,7 +1542,7 @@ static enum gimplify_status
gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
{
struct gimplify_ctx *ctxp;
gimple gimple_label;
glabel *label_stmt;
/* Invalid OpenMP programs can play Duff's Device type games with
#pragma omp parallel. At least in the C front end, we don't
@ -1551,9 +1551,9 @@ gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
if (ctxp->case_labels.exists ())
break;
gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
ctxp->case_labels.safe_push (*expr_p);
gimplify_seq_add_stmt (pre_p, gimple_label);
gimplify_seq_add_stmt (pre_p, label_stmt);
return GS_ALL_DONE;
}
@ -2267,7 +2267,7 @@ gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
tree fndecl, parms, p, fnptrtype;
enum gimplify_status ret;
int i, nargs;
gimple call;
gcall *call;
bool builtin_va_start_p = false;
location_t loc = EXPR_LOCATION (*expr_p);
@ -2990,7 +2990,7 @@ gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
enum gimplify_status ret;
tree label_true, label_false, label_cont;
bool have_then_clause_p, have_else_clause_p;
gimple gimple_cond;
gcond *cond_stmt;
enum tree_code pred_code;
gimple_seq seq = NULL;
@ -3139,10 +3139,10 @@ gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
&arm2);
gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
label_false);
gimplify_seq_add_stmt (&seq, gimple_cond);
gimplify_seq_add_stmt (&seq, cond_stmt);
label_cont = NULL_TREE;
if (!have_then_clause_p)
{
@ -3238,7 +3238,7 @@ gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
gimple_seq *seq_p)
{
tree t, to, to_ptr, from, from_ptr;
gimple gs;
gcall *gs;
location_t loc = EXPR_LOCATION (*expr_p);
to = TREE_OPERAND (*expr_p, 0);
@ -3285,7 +3285,7 @@ gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
gimple_seq *seq_p)
{
tree t, from, to, to_ptr;
gimple gs;
gcall *gs;
location_t loc = EXPR_LOCATION (*expr_p);
/* Assert our assumptions, to abort instead of producing wrong code
@ -4101,7 +4101,7 @@ gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
{
tree lhs = TREE_OPERAND (*expr_p, 0);
tree rhs = TREE_OPERAND (*expr_p, 1);
gimple init = gimple_build_assign (lhs, rhs);
gassign *init = gimple_build_assign (lhs, rhs);
gimplify_seq_add_stmt (pre_p, init);
*expr_p = NULL;
}
@ -4662,6 +4662,7 @@ gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
{
/* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
instead of a GIMPLE_ASSIGN. */
gcall *call_stmt;
if (CALL_EXPR_FN (*from_p) == NULL_TREE)
{
/* Gimplify internal functions created in the FEs. */
@ -4675,8 +4676,8 @@ gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
EXPR_LOCATION (*from_p));
vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
}
assign = gimple_build_call_internal_vec (ifn, vargs);
gimple_set_location (assign, EXPR_LOCATION (*expr_p));
call_stmt = gimple_build_call_internal_vec (ifn, vargs);
gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
}
else
{
@ -4688,19 +4689,20 @@ gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
&& DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
&& call_expr_nargs (*from_p) == 3)
assign = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
CALL_EXPR_ARG (*from_p, 0),
CALL_EXPR_ARG (*from_p, 1),
CALL_EXPR_ARG (*from_p, 2));
call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
CALL_EXPR_ARG (*from_p, 0),
CALL_EXPR_ARG (*from_p, 1),
CALL_EXPR_ARG (*from_p, 2));
else
{
assign = gimple_build_call_from_tree (*from_p);
gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
call_stmt = gimple_build_call_from_tree (*from_p);
gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
}
}
notice_special_calls (assign);
if (!gimple_call_noreturn_p (assign))
gimple_call_set_lhs (assign, *to_p);
notice_special_calls (call_stmt);
if (!gimple_call_noreturn_p (call_stmt))
gimple_call_set_lhs (call_stmt, *to_p);
assign = call_stmt;
}
else
{
@ -4979,7 +4981,7 @@ gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
const char *constraint;
bool allows_mem, allows_reg, is_inout;
enum gimplify_status ret, tret;
gimple stmt;
gasm *stmt;
vec<tree, va_gc> *inputs;
vec<tree, va_gc> *outputs;
vec<tree, va_gc> *clobbers;
@ -5269,7 +5271,7 @@ gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
}
else
{
gimple_statement_try *gtry;
gtry *gtry;
gimple_seq seq;
enum gimple_try_flags kind;
@ -5341,8 +5343,8 @@ gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
val
*/
tree flag = create_tmp_var (boolean_type_node, "cleanup");
gimple ffalse = gimple_build_assign (flag, boolean_false_node);
gimple ftrue = gimple_build_assign (flag, boolean_true_node);
gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
gimplify_stmt (&cleanup, &cleanup_stmts);
@ -6769,7 +6771,7 @@ gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
tree for_stmt, orig_for_stmt, decl, var, t;
enum gimplify_status ret = GS_ALL_DONE;
enum gimplify_status tret;
gimple gfor;
gomp_for *gfor;
gimple_seq for_body, for_pre_body;
int i;
bool simd;
@ -7241,7 +7243,7 @@ static void
gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
{
tree expr = *expr_p;
gimple stmt;
gomp_target *stmt;
gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
ORT_WORKSHARE);
@ -7369,7 +7371,8 @@ gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
? NULL : TREE_OPERAND (*expr_p, 1);
tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
tree tmp_load;
gimple loadstmt, storestmt;
gomp_atomic_load *loadstmt;
gomp_atomic_store *storestmt;
tmp_load = create_tmp_reg (type, NULL);
if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
@ -7420,7 +7423,8 @@ static enum gimplify_status
gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
{
tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
gimple g;
gimple body_stmt;
gtransaction *trans_stmt;
gimple_seq body = NULL;
int subcode = 0;
@ -7437,17 +7441,17 @@ gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
push_gimplify_context ();
temp = voidify_wrapper_expr (*expr_p, NULL);
g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
pop_gimplify_context (g);
body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
pop_gimplify_context (body_stmt);
g = gimple_build_transaction (body, NULL);
trans_stmt = gimple_build_transaction (body, NULL);
if (TRANSACTION_EXPR_OUTER (expr))
subcode = GTMA_IS_OUTER;
else if (TRANSACTION_EXPR_RELAXED (expr))
subcode = GTMA_IS_RELAXED;
gimple_transaction_set_subcode (g, subcode);
gimple_transaction_set_subcode (trans_stmt, subcode);
gimplify_seq_add_stmt (pre_p, g);
gimplify_seq_add_stmt (pre_p, trans_stmt);
if (temp)
{
@ -7789,7 +7793,7 @@ gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
}
tree tmp = create_tmp_var (type, NULL);
gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
gimple call
gcall *call
= gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
gimple_call_set_lhs (call, tmp);
gimplify_seq_add_stmt (pre_p, call);
@ -8052,7 +8056,7 @@ gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
case TRY_CATCH_EXPR:
{
gimple_seq eval, cleanup;
gimple try_;
gtry *try_;
/* Calls to destructors are generated automatically in FINALLY/CATCH
block. They should have location as UNKNOWN_LOCATION. However,
@ -8791,12 +8795,13 @@ gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
containing the sequence of corresponding GIMPLE statements. If DO_PARMS
is true, also gimplify the parameters. */
gimple
gbind *
gimplify_body (tree fndecl, bool do_parms)
{
location_t saved_location = input_location;
gimple_seq parm_stmts, seq;
gimple outer_bind;
gimple outer_stmt;
gbind *outer_bind;
struct cgraph_node *cgn;
timevar_push (TV_TREE_GIMPLIFY);
@ -8836,18 +8841,18 @@ gimplify_body (tree fndecl, bool do_parms)
/* Gimplify the function's body. */
seq = NULL;
gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
outer_bind = gimple_seq_first_stmt (seq);
if (!outer_bind)
outer_stmt = gimple_seq_first_stmt (seq);
if (!outer_stmt)
{
outer_bind = gimple_build_nop ();
gimplify_seq_add_stmt (&seq, outer_bind);
outer_stmt = gimple_build_nop ();
gimplify_seq_add_stmt (&seq, outer_stmt);
}
/* The body must contain exactly one statement, a GIMPLE_BIND. If this is
not the case, wrap everything in a GIMPLE_BIND to make it so. */
if (gimple_code (outer_bind) == GIMPLE_BIND
if (gimple_code (outer_stmt) == GIMPLE_BIND
&& gimple_seq_first (seq) == gimple_seq_last (seq))
;
outer_bind = as_a <gbind *> (outer_stmt);
else
outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
@ -8961,7 +8966,7 @@ gimplify_function_tree (tree fndecl)
{
tree parm, ret;
gimple_seq seq;
gimple bind;
gbind *bind;
gcc_assert (!gimple_body (fndecl));
@ -9005,11 +9010,11 @@ gimplify_function_tree (tree fndecl)
&& !flag_instrument_functions_exclude_p (fndecl))
{
tree x;
gimple new_bind;
gbind *new_bind;
gimple tf;
gimple_seq cleanup = NULL, body = NULL;
tree tmp_var;
gimple call;
gcall *call;
x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
call = gimple_build_call (x, 1, integer_zero_node);

View File

@ -53,8 +53,8 @@ extern void free_gimplify_stack (void);
extern void push_gimplify_context (bool in_ssa = false,
bool rhs_cond_ok = false);
extern void pop_gimplify_context (gimple);
extern gimple gimple_current_bind_expr (void);
extern vec<gimple> gimple_bind_expr_stack (void);
extern gbind *gimple_current_bind_expr (void);
extern vec<gbind *> gimple_bind_expr_stack (void);
extern void gimplify_and_add (tree, gimple_seq *);
extern tree get_formal_tmp_var (tree, gimple_seq *);
extern tree get_initialized_tmp_var (tree, gimple_seq *, gimple_seq *);
@ -76,7 +76,7 @@ extern enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
extern void gimplify_type_sizes (tree, gimple_seq *);
extern void gimplify_one_sizepos (tree *, gimple_seq *);
extern gimple gimplify_body (tree, bool);
extern gbind *gimplify_body (tree, bool);
extern enum gimplify_status gimplify_arg (tree *, gimple_seq *, location_t);
extern void gimplify_function_tree (tree);
extern enum gimplify_status gimplify_va_arg_expr (tree *, gimple_seq *,

View File

@ -1262,7 +1262,7 @@ limit_scops (vec<scop_p> *scops)
argument. */
static inline bool
same_close_phi_node (gimple p1, gimple p2)
same_close_phi_node (gphi *p1, gphi *p2)
{
return operand_equal_p (gimple_phi_arg_def (p1, 0),
gimple_phi_arg_def (p2, 0), 0);
@ -1272,15 +1272,15 @@ same_close_phi_node (gimple p1, gimple p2)
of PHI. */
static void
remove_duplicate_close_phi (gimple phi, gimple_stmt_iterator *gsi)
remove_duplicate_close_phi (gphi *phi, gphi_iterator *gsi)
{
gimple use_stmt;
use_operand_p use_p;
imm_use_iterator imm_iter;
tree res = gimple_phi_result (phi);
tree def = gimple_phi_result (gsi_stmt (*gsi));
tree def = gimple_phi_result (gsi->phi ());
gcc_assert (same_close_phi_node (phi, gsi_stmt (*gsi)));
gcc_assert (same_close_phi_node (phi, gsi->phi ()));
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, def)
{
@ -1305,12 +1305,12 @@ remove_duplicate_close_phi (gimple phi, gimple_stmt_iterator *gsi)
static void
make_close_phi_nodes_unique (basic_block bb)
{
gimple_stmt_iterator psi;
gphi_iterator psi;
for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
{
gimple_stmt_iterator gsi = psi;
gimple phi = gsi_stmt (psi);
gphi_iterator gsi = psi;
gphi *phi = psi.phi ();
/* At this point, PHI should be a close phi in normal form. */
gcc_assert (gimple_phi_num_args (phi) == 1);
@ -1318,7 +1318,7 @@ make_close_phi_nodes_unique (basic_block bb)
/* Iterate over the next phis and remove duplicates. */
gsi_next (&gsi);
while (!gsi_end_p (gsi))
if (same_close_phi_node (phi, gsi_stmt (gsi)))
if (same_close_phi_node (phi, gsi.phi ()))
remove_duplicate_close_phi (phi, &gsi);
else
gsi_next (&gsi);
@ -1345,14 +1345,14 @@ canonicalize_loop_closed_ssa (loop_p loop)
}
else
{
gimple_stmt_iterator psi;
gphi_iterator psi;
basic_block close = split_edge (e);
e = single_succ_edge (close);
for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
{
gimple phi = gsi_stmt (psi);
gphi *phi = psi.phi ();
unsigned i;
for (i = 0; i < gimple_phi_num_args (phi); i++)
@ -1360,7 +1360,7 @@ canonicalize_loop_closed_ssa (loop_p loop)
{
tree res, arg = gimple_phi_arg_def (phi, i);
use_operand_p use_p;
gimple close_phi;
gphi *close_phi;
if (TREE_CODE (arg) != SSA_NAME)
continue;

View File

@ -100,7 +100,7 @@ tree_int_to_gmp (tree t, mpz_t res)
loop. */
static size_t
phi_arg_in_outermost_loop (gimple phi)
phi_arg_in_outermost_loop (gphi *phi)
{
loop_p loop = gimple_bb (phi)->loop_father;
size_t i, res = 0;
@ -119,13 +119,13 @@ phi_arg_in_outermost_loop (gimple phi)
PSI by inserting on the loop ENTRY edge assignment "RES = INIT". */
static void
remove_simple_copy_phi (gimple_stmt_iterator *psi)
remove_simple_copy_phi (gphi_iterator *psi)
{
gimple phi = gsi_stmt (*psi);
gphi *phi = psi->phi ();
tree res = gimple_phi_result (phi);
size_t entry = phi_arg_in_outermost_loop (phi);
tree init = gimple_phi_arg_def (phi, entry);
gimple stmt = gimple_build_assign (res, init);
gassign *stmt = gimple_build_assign (res, init);
edge e = gimple_phi_arg_edge (phi, entry);
remove_phi_node (psi, false);
@ -136,16 +136,16 @@ remove_simple_copy_phi (gimple_stmt_iterator *psi)
loop ENTRY edge the assignment RES = INIT. */
static void
remove_invariant_phi (sese region, gimple_stmt_iterator *psi)
remove_invariant_phi (sese region, gphi_iterator *psi)
{
gimple phi = gsi_stmt (*psi);
gphi *phi = psi->phi ();
loop_p loop = loop_containing_stmt (phi);
tree res = gimple_phi_result (phi);
tree scev = scalar_evolution_in_region (region, loop, res);
size_t entry = phi_arg_in_outermost_loop (phi);
edge e = gimple_phi_arg_edge (phi, entry);
tree var;
gimple stmt;
gassign *stmt;
gimple_seq stmts = NULL;
if (tree_contains_chrecs (scev, NULL))
@ -164,7 +164,7 @@ remove_invariant_phi (sese region, gimple_stmt_iterator *psi)
/* Returns true when the phi node at PSI is of the form "a = phi (a, x)". */
static inline bool
simple_copy_phi_p (gimple phi)
simple_copy_phi_p (gphi *phi)
{
tree res;
@ -181,10 +181,10 @@ simple_copy_phi_p (gimple phi)
be considered. */
static bool
reduction_phi_p (sese region, gimple_stmt_iterator *psi)
reduction_phi_p (sese region, gphi_iterator *psi)
{
loop_p loop;
gimple phi = gsi_stmt (*psi);
gphi *phi = psi->phi ();
tree res = gimple_phi_result (phi);
loop = loop_containing_stmt (phi);
@ -1128,7 +1128,7 @@ create_pw_aff_from_tree (poly_bb_p pbb, tree t)
inequalities. */
static void
add_condition_to_pbb (poly_bb_p pbb, gimple stmt, enum tree_code code)
add_condition_to_pbb (poly_bb_p pbb, gcond *stmt, enum tree_code code)
{
isl_pw_aff *lhs = create_pw_aff_from_tree (pbb, gimple_cond_lhs (stmt));
isl_pw_aff *rhs = create_pw_aff_from_tree (pbb, gimple_cond_rhs (stmt));
@ -1188,13 +1188,14 @@ add_conditions_to_domain (poly_bb_p pbb)
{
case GIMPLE_COND:
{
enum tree_code code = gimple_cond_code (stmt);
gcond *cond_stmt = as_a <gcond *> (stmt);
enum tree_code code = gimple_cond_code (cond_stmt);
/* The conditions for ELSE-branches are inverted. */
if (!GBB_CONDITION_CASES (gbb)[i])
code = invert_tree_comparison (code, false);
add_condition_to_pbb (pbb, stmt, code);
add_condition_to_pbb (pbb, cond_stmt, code);
break;
}
@ -1224,7 +1225,7 @@ add_conditions_to_constraints (scop_p scop)
edge between BB and its predecessor is not a loop exit edge, and
the last statement of the single predecessor is a COND_EXPR. */
static gimple
static gcond *
single_pred_cond_non_loop_exit (basic_block bb)
{
if (single_pred_p (bb))
@ -1239,7 +1240,7 @@ single_pred_cond_non_loop_exit (basic_block bb)
stmt = last_stmt (pred);
if (stmt && gimple_code (stmt) == GIMPLE_COND)
return stmt;
return as_a <gcond *> (stmt);
}
return NULL;
@ -1270,7 +1271,7 @@ void
sese_dom_walker::before_dom_children (basic_block bb)
{
gimple_bb_p gbb;
gimple stmt;
gcond *stmt;
if (!bb_in_sese_p (bb, m_region))
return;
@ -1936,14 +1937,14 @@ build_scop_drs (scop_p scop)
/* Return a gsi at the position of the phi node STMT. */
static gimple_stmt_iterator
gsi_for_phi_node (gimple stmt)
static gphi_iterator
gsi_for_phi_node (gphi *stmt)
{
gimple_stmt_iterator psi;
gphi_iterator psi;
basic_block bb = gimple_bb (stmt);
for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
if (stmt == gsi_stmt (psi))
if (stmt == psi.phi ())
return psi;
gcc_unreachable ();
@ -2011,7 +2012,7 @@ insert_out_of_ssa_copy (scop_p scop, tree res, tree expr, gimple after_stmt)
gimple_seq stmts;
gimple_stmt_iterator gsi;
tree var = force_gimple_operand (expr, &stmts, true, NULL_TREE);
gimple stmt = gimple_build_assign (unshare_expr (res), var);
gassign *stmt = gimple_build_assign (unshare_expr (res), var);
auto_vec<gimple, 3> x;
gimple_seq_add_stmt (&stmts, stmt);
@ -2240,10 +2241,10 @@ rewrite_close_phi_out_of_ssa (scop_p scop, gimple_stmt_iterator *psi)
dimension array for it. */
static void
rewrite_phi_out_of_ssa (scop_p scop, gimple_stmt_iterator *psi)
rewrite_phi_out_of_ssa (scop_p scop, gphi_iterator *psi)
{
size_t i;
gimple phi = gsi_stmt (*psi);
gphi *phi = psi->phi ();
basic_block bb = gimple_bb (phi);
tree res = gimple_phi_result (phi);
tree zero_dim_array = create_zero_dim_array (res, "phi_out_of_ssa");
@ -2274,12 +2275,12 @@ rewrite_phi_out_of_ssa (scop_p scop, gimple_stmt_iterator *psi)
form "x = phi (y, y, ..., y)" to "x = y". */
static void
rewrite_degenerate_phi (gimple_stmt_iterator *psi)
rewrite_degenerate_phi (gphi_iterator *psi)
{
tree rhs;
gimple stmt;
gimple_stmt_iterator gsi;
gimple phi = gsi_stmt (*psi);
gphi *phi = psi->phi ();
tree res = gimple_phi_result (phi);
basic_block bb;
@ -2300,14 +2301,14 @@ static void
rewrite_reductions_out_of_ssa (scop_p scop)
{
basic_block bb;
gimple_stmt_iterator psi;
gphi_iterator psi;
sese region = SCOP_REGION (scop);
FOR_EACH_BB_FN (bb, cfun)
if (bb_in_sese_p (bb, region))
for (psi = gsi_start_phis (bb); !gsi_end_p (psi);)
{
gimple phi = gsi_stmt (psi);
gphi *phi = psi.phi ();
if (virtual_operand_p (gimple_phi_result (phi)))
{
@ -2454,7 +2455,7 @@ rewrite_cross_bb_scalar_deps (scop_p scop, gimple_stmt_iterator *gsi)
if (gimple_code (use_stmt) == GIMPLE_PHI
&& (res = true))
{
gimple_stmt_iterator psi = gsi_for_stmt (use_stmt);
gphi_iterator psi = gsi_start_phis (gimple_bb (use_stmt));
if (scalar_close_phi_node_p (gsi_stmt (psi)))
rewrite_close_phi_out_of_ssa (scop, &psi);
@ -2623,7 +2624,7 @@ is_reduction_operation_p (gimple stmt)
/* Returns true when PHI contains an argument ARG. */
static bool
phi_contains_arg (gimple phi, tree arg)
phi_contains_arg (gphi *phi, tree arg)
{
size_t i;
@ -2636,7 +2637,7 @@ phi_contains_arg (gimple phi, tree arg)
/* Return a loop phi node that corresponds to a reduction containing LHS. */
static gimple
static gphi *
follow_ssa_with_commutative_ops (tree arg, tree lhs)
{
gimple stmt;
@ -2650,10 +2651,10 @@ follow_ssa_with_commutative_ops (tree arg, tree lhs)
|| gimple_code (stmt) == GIMPLE_CALL)
return NULL;
if (gimple_code (stmt) == GIMPLE_PHI)
if (gphi *phi = dyn_cast <gphi *> (stmt))
{
if (phi_contains_arg (stmt, lhs))
return stmt;
if (phi_contains_arg (phi, lhs))
return phi;
return NULL;
}
@ -2665,7 +2666,8 @@ follow_ssa_with_commutative_ops (tree arg, tree lhs)
if (is_reduction_operation_p (stmt))
{
gimple res = follow_ssa_with_commutative_ops (gimple_assign_rhs1 (stmt), lhs);
gphi *res
= follow_ssa_with_commutative_ops (gimple_assign_rhs1 (stmt), lhs);
return res ? res :
follow_ssa_with_commutative_ops (gimple_assign_rhs2 (stmt), lhs);
@ -2677,12 +2679,12 @@ follow_ssa_with_commutative_ops (tree arg, tree lhs)
/* Detect commutative and associative scalar reductions starting at
the STMT. Return the phi node of the reduction cycle, or NULL. */
static gimple
static gphi *
detect_commutative_reduction_arg (tree lhs, gimple stmt, tree arg,
vec<gimple> *in,
vec<gimple> *out)
{
gimple phi = follow_ssa_with_commutative_ops (arg, lhs);
gphi *phi = follow_ssa_with_commutative_ops (arg, lhs);
if (!phi)
return NULL;
@ -2695,7 +2697,7 @@ detect_commutative_reduction_arg (tree lhs, gimple stmt, tree arg,
/* Detect commutative and associative scalar reductions starting at
STMT. Return the phi node of the reduction cycle, or NULL. */
static gimple
static gphi *
detect_commutative_reduction_assign (gimple stmt, vec<gimple> *in,
vec<gimple> *out)
{
@ -2708,9 +2710,9 @@ detect_commutative_reduction_assign (gimple stmt, vec<gimple> *in,
if (is_reduction_operation_p (stmt))
{
gimple res = detect_commutative_reduction_arg (lhs, stmt,
gimple_assign_rhs1 (stmt),
in, out);
gphi *res = detect_commutative_reduction_arg (lhs, stmt,
gimple_assign_rhs1 (stmt),
in, out);
return res ? res
: detect_commutative_reduction_arg (lhs, stmt,
gimple_assign_rhs2 (stmt),
@ -2722,7 +2724,7 @@ detect_commutative_reduction_assign (gimple stmt, vec<gimple> *in,
/* Return a loop phi node that corresponds to a reduction containing LHS. */
static gimple
static gphi *
follow_inital_value_to_phi (tree arg, tree lhs)
{
gimple stmt;
@ -2732,9 +2734,9 @@ follow_inital_value_to_phi (tree arg, tree lhs)
stmt = SSA_NAME_DEF_STMT (arg);
if (gimple_code (stmt) == GIMPLE_PHI
&& phi_contains_arg (stmt, lhs))
return stmt;
if (gphi *phi = dyn_cast <gphi *> (stmt))
if (phi_contains_arg (phi, lhs))
return phi;
return NULL;
}
@ -2744,7 +2746,7 @@ follow_inital_value_to_phi (tree arg, tree lhs)
from outside the loop. */
static edge
edge_initial_value_for_loop_phi (gimple phi)
edge_initial_value_for_loop_phi (gphi *phi)
{
size_t i;
@ -2764,7 +2766,7 @@ edge_initial_value_for_loop_phi (gimple phi)
from outside the loop. */
static tree
initial_value_for_loop_phi (gimple phi)
initial_value_for_loop_phi (gphi *phi)
{
size_t i;
@ -2808,13 +2810,14 @@ used_outside_reduction (tree def, gimple loop_phi)
the SCOP starting at the loop closed phi node STMT. Return the phi
node of the reduction cycle, or NULL. */
static gimple
static gphi *
detect_commutative_reduction (scop_p scop, gimple stmt, vec<gimple> *in,
vec<gimple> *out)
{
if (scalar_close_phi_node_p (stmt))
{
gimple def, loop_phi, phi, close_phi = stmt;
gimple def;
gphi *loop_phi, *phi, *close_phi = as_a <gphi *> (stmt);
tree init, lhs, arg = gimple_phi_arg_def (close_phi, 0);
if (TREE_CODE (arg) != SSA_NAME)
@ -2854,10 +2857,10 @@ detect_commutative_reduction (scop_p scop, gimple stmt, vec<gimple> *in,
static void
translate_scalar_reduction_to_array_for_stmt (scop_p scop, tree red,
gimple stmt, gimple loop_phi)
gimple stmt, gphi *loop_phi)
{
tree res = gimple_phi_result (loop_phi);
gimple assign = gimple_build_assign (res, unshare_expr (red));
gassign *assign = gimple_build_assign (res, unshare_expr (red));
gimple_stmt_iterator gsi;
insert_stmts (scop, assign, NULL, gsi_after_labels (gimple_bb (loop_phi)));
@ -2872,7 +2875,7 @@ translate_scalar_reduction_to_array_for_stmt (scop_p scop, tree red,
the PHI_RESULT. */
static void
remove_phi (gimple phi)
remove_phi (gphi *phi)
{
imm_use_iterator imm_iter;
tree def;
@ -2935,7 +2938,7 @@ dr_indices_valid_in_loop (tree ref ATTRIBUTE_UNUSED, tree *index, void *data)
NULL_TREE. */
static tree
close_phi_written_to_memory (gimple close_phi)
close_phi_written_to_memory (gphi *close_phi)
{
imm_use_iterator imm_iter;
use_operand_p use_p;
@ -2990,30 +2993,33 @@ translate_scalar_reduction_to_array (scop_p scop,
vec<gimple> in,
vec<gimple> out)
{
gimple loop_phi;
gimple loop_stmt;
unsigned int i = out.length () - 1;
tree red = close_phi_written_to_memory (out[i]);
tree red = close_phi_written_to_memory (as_a <gphi *> (out[i]));
FOR_EACH_VEC_ELT (in, i, loop_phi)
FOR_EACH_VEC_ELT (in, i, loop_stmt)
{
gimple close_phi = out[i];
gimple close_stmt = out[i];
if (i == 0)
{
gimple stmt = loop_phi;
basic_block bb = split_reduction_stmt (scop, stmt);
basic_block bb = split_reduction_stmt (scop, loop_stmt);
poly_bb_p pbb = pbb_from_bb (bb);
PBB_IS_REDUCTION (pbb) = true;
gcc_assert (close_phi == loop_phi);
gcc_assert (close_stmt == loop_stmt);
if (!red)
red = create_zero_dim_array
(gimple_assign_lhs (stmt), "Commutative_Associative_Reduction");
(gimple_assign_lhs (loop_stmt), "Commutative_Associative_Reduction");
translate_scalar_reduction_to_array_for_stmt (scop, red, stmt, in[1]);
translate_scalar_reduction_to_array_for_stmt (scop, red, loop_stmt,
as_a <gphi *> (in[1]));
continue;
}
gphi *loop_phi = as_a <gphi *> (loop_stmt);
gphi *close_phi = as_a <gphi *> (close_stmt);
if (i == in.length () - 1)
{
insert_out_of_ssa_copy (scop, gimple_phi_result (close_phi),
@ -3033,7 +3039,7 @@ translate_scalar_reduction_to_array (scop_p scop,
static bool
rewrite_commutative_reductions_out_of_ssa_close_phi (scop_p scop,
gimple close_phi)
gphi *close_phi)
{
bool res;
auto_vec<gimple, 10> in;
@ -3054,7 +3060,7 @@ static bool
rewrite_commutative_reductions_out_of_ssa_loop (scop_p scop,
loop_p loop)
{
gimple_stmt_iterator gsi;
gphi_iterator gsi;
edge exit = single_exit (loop);
tree res;
bool changed = false;
@ -3063,11 +3069,11 @@ rewrite_commutative_reductions_out_of_ssa_loop (scop_p scop,
return false;
for (gsi = gsi_start_phis (exit->dest); !gsi_end_p (gsi); gsi_next (&gsi))
if ((res = gimple_phi_result (gsi_stmt (gsi)))
if ((res = gimple_phi_result (gsi.phi ()))
&& !virtual_operand_p (res)
&& !scev_analyzable_p (res, SCOP_REGION (scop)))
changed |= rewrite_commutative_reductions_out_of_ssa_close_phi
(scop, gsi_stmt (gsi));
(scop, gsi.phi ());
return changed;
}
@ -3104,7 +3110,7 @@ static bool
scop_ivs_can_be_represented (scop_p scop)
{
loop_p loop;
gimple_stmt_iterator psi;
gphi_iterator psi;
bool result = true;
FOR_EACH_LOOP (loop, 0)
@ -3115,7 +3121,7 @@ scop_ivs_can_be_represented (scop_p scop)
for (psi = gsi_start_phis (loop->header);
!gsi_end_p (psi); gsi_next (&psi))
{
gimple phi = gsi_stmt (psi);
gphi *phi = psi.phi ();
tree res = PHI_RESULT (phi);
tree type = TREE_TYPE (res);

View File

@ -29,25 +29,25 @@ DEFGSSTRUCT(GSS_BASE, gimple_statement_base, false)
DEFGSSTRUCT(GSS_WITH_OPS, gimple_statement_with_ops, true)
DEFGSSTRUCT(GSS_WITH_MEM_OPS_BASE, gimple_statement_with_memory_ops_base, false)
DEFGSSTRUCT(GSS_WITH_MEM_OPS, gimple_statement_with_memory_ops, true)
DEFGSSTRUCT(GSS_CALL, gimple_statement_call, true)
DEFGSSTRUCT(GSS_ASM, gimple_statement_asm, true)
DEFGSSTRUCT(GSS_BIND, gimple_statement_bind, false)
DEFGSSTRUCT(GSS_PHI, gimple_statement_phi, false)
DEFGSSTRUCT(GSS_TRY, gimple_statement_try, false)
DEFGSSTRUCT(GSS_CATCH, gimple_statement_catch, false)
DEFGSSTRUCT(GSS_EH_FILTER, gimple_statement_eh_filter, false)
DEFGSSTRUCT(GSS_EH_MNT, gimple_statement_eh_mnt, false)
DEFGSSTRUCT(GSS_CALL, gcall, true)
DEFGSSTRUCT(GSS_ASM, gasm, true)
DEFGSSTRUCT(GSS_BIND, gbind, false)
DEFGSSTRUCT(GSS_PHI, gphi, false)
DEFGSSTRUCT(GSS_TRY, gtry, false)
DEFGSSTRUCT(GSS_CATCH, gcatch, false)
DEFGSSTRUCT(GSS_EH_FILTER, geh_filter, false)
DEFGSSTRUCT(GSS_EH_MNT, geh_mnt, false)
DEFGSSTRUCT(GSS_EH_CTRL, gimple_statement_eh_ctrl, false)
DEFGSSTRUCT(GSS_EH_ELSE, gimple_statement_eh_else, false)
DEFGSSTRUCT(GSS_EH_ELSE, geh_else, false)
DEFGSSTRUCT(GSS_WCE, gimple_statement_wce, false)
DEFGSSTRUCT(GSS_OMP, gimple_statement_omp, false)
DEFGSSTRUCT(GSS_OMP_CRITICAL, gimple_statement_omp_critical, false)
DEFGSSTRUCT(GSS_OMP_FOR, gimple_statement_omp_for, false)
DEFGSSTRUCT(GSS_OMP_CRITICAL, gomp_critical, false)
DEFGSSTRUCT(GSS_OMP_FOR, gomp_for, false)
DEFGSSTRUCT(GSS_OMP_PARALLEL_LAYOUT, gimple_statement_omp_parallel_layout, false)
DEFGSSTRUCT(GSS_OMP_TASK, gimple_statement_omp_task, false)
DEFGSSTRUCT(GSS_OMP_SECTIONS, gimple_statement_omp_sections, false)
DEFGSSTRUCT(GSS_OMP_TASK, gomp_task, false)
DEFGSSTRUCT(GSS_OMP_SECTIONS, gomp_sections, false)
DEFGSSTRUCT(GSS_OMP_SINGLE_LAYOUT, gimple_statement_omp_single_layout, false)
DEFGSSTRUCT(GSS_OMP_CONTINUE, gimple_statement_omp_continue, false)
DEFGSSTRUCT(GSS_OMP_ATOMIC_LOAD, gimple_statement_omp_atomic_load, false)
DEFGSSTRUCT(GSS_OMP_ATOMIC_STORE_LAYOUT, gimple_statement_omp_atomic_store, false)
DEFGSSTRUCT(GSS_TRANSACTION, gimple_statement_transaction, false)
DEFGSSTRUCT(GSS_OMP_CONTINUE, gomp_continue, false)
DEFGSSTRUCT(GSS_OMP_ATOMIC_LOAD, gomp_atomic_load, false)
DEFGSSTRUCT(GSS_OMP_ATOMIC_STORE_LAYOUT, gomp_atomic_store, false)
DEFGSSTRUCT(GSS_TRANSACTION, gtransaction, false)

View File

@ -101,7 +101,7 @@ get_multi_vector_move (tree array_type, convert_optab optab)
/* Expand LOAD_LANES call STMT. */
static void
expand_LOAD_LANES (gimple stmt)
expand_LOAD_LANES (gcall *stmt)
{
struct expand_operand ops[2];
tree type, lhs, rhs;
@ -125,7 +125,7 @@ expand_LOAD_LANES (gimple stmt)
/* Expand STORE_LANES call STMT. */
static void
expand_STORE_LANES (gimple stmt)
expand_STORE_LANES (gcall *stmt)
{
struct expand_operand ops[2];
tree type, lhs, rhs;
@ -147,7 +147,7 @@ expand_STORE_LANES (gimple stmt)
}
static void
expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
expand_ANNOTATE (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
@ -155,7 +155,7 @@ expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
/* This should get expanded in adjust_simduid_builtins. */
static void
expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
expand_GOMP_SIMD_LANE (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
@ -163,7 +163,7 @@ expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
/* This should get expanded in adjust_simduid_builtins. */
static void
expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
expand_GOMP_SIMD_VF (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
@ -171,7 +171,7 @@ expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
/* This should get expanded in adjust_simduid_builtins. */
static void
expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
expand_GOMP_SIMD_LAST_LANE (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
@ -179,7 +179,7 @@ expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
/* This should get expanded in the sanopt pass. */
static void
expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
expand_UBSAN_NULL (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
@ -187,7 +187,7 @@ expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
/* This should get expanded in the sanopt pass. */
static void
expand_UBSAN_BOUNDS (gimple stmt ATTRIBUTE_UNUSED)
expand_UBSAN_BOUNDS (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
@ -195,7 +195,7 @@ expand_UBSAN_BOUNDS (gimple stmt ATTRIBUTE_UNUSED)
/* This should get expanded in the sanopt pass. */
static void
expand_UBSAN_OBJECT_SIZE (gimple stmt ATTRIBUTE_UNUSED)
expand_UBSAN_OBJECT_SIZE (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
@ -203,7 +203,7 @@ expand_UBSAN_OBJECT_SIZE (gimple stmt ATTRIBUTE_UNUSED)
/* This should get expanded in the sanopt pass. */
static void
expand_ASAN_CHECK (gimple stmt ATTRIBUTE_UNUSED)
expand_ASAN_CHECK (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
@ -1614,7 +1614,7 @@ expand_mul_overflow (location_t loc, tree lhs, tree arg0, tree arg1,
/* Expand UBSAN_CHECK_ADD call STMT. */
static void
expand_UBSAN_CHECK_ADD (gimple stmt)
expand_UBSAN_CHECK_ADD (gcall *stmt)
{
location_t loc = gimple_location (stmt);
tree lhs = gimple_call_lhs (stmt);
@ -1627,7 +1627,7 @@ expand_UBSAN_CHECK_ADD (gimple stmt)
/* Expand UBSAN_CHECK_SUB call STMT. */
static void
expand_UBSAN_CHECK_SUB (gimple stmt)
expand_UBSAN_CHECK_SUB (gcall *stmt)
{
location_t loc = gimple_location (stmt);
tree lhs = gimple_call_lhs (stmt);
@ -1643,7 +1643,7 @@ expand_UBSAN_CHECK_SUB (gimple stmt)
/* Expand UBSAN_CHECK_MUL call STMT. */
static void
expand_UBSAN_CHECK_MUL (gimple stmt)
expand_UBSAN_CHECK_MUL (gcall *stmt)
{
location_t loc = gimple_location (stmt);
tree lhs = gimple_call_lhs (stmt);
@ -1828,7 +1828,7 @@ expand_arith_overflow (enum tree_code code, gimple stmt)
/* Expand ADD_OVERFLOW STMT. */
static void
expand_ADD_OVERFLOW (gimple stmt)
expand_ADD_OVERFLOW (gcall *stmt)
{
expand_arith_overflow (PLUS_EXPR, stmt);
}
@ -1836,7 +1836,7 @@ expand_ADD_OVERFLOW (gimple stmt)
/* Expand SUB_OVERFLOW STMT. */
static void
expand_SUB_OVERFLOW (gimple stmt)
expand_SUB_OVERFLOW (gcall *stmt)
{
expand_arith_overflow (MINUS_EXPR, stmt);
}
@ -1844,7 +1844,7 @@ expand_SUB_OVERFLOW (gimple stmt)
/* Expand MUL_OVERFLOW STMT. */
static void
expand_MUL_OVERFLOW (gimple stmt)
expand_MUL_OVERFLOW (gcall *stmt)
{
expand_arith_overflow (MULT_EXPR, stmt);
}
@ -1852,13 +1852,13 @@ expand_MUL_OVERFLOW (gimple stmt)
/* This should get folded in tree-vectorizer.c. */
static void
expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
expand_LOOP_VECTORIZED (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
static void
expand_MASK_LOAD (gimple stmt)
expand_MASK_LOAD (gcall *stmt)
{
struct expand_operand ops[3];
tree type, lhs, rhs, maskt;
@ -1883,7 +1883,7 @@ expand_MASK_LOAD (gimple stmt)
}
static void
expand_MASK_STORE (gimple stmt)
expand_MASK_STORE (gcall *stmt)
{
struct expand_operand ops[3];
tree type, lhs, rhs, maskt;
@ -1906,12 +1906,12 @@ expand_MASK_STORE (gimple stmt)
}
static void
expand_ABNORMAL_DISPATCHER (gimple)
expand_ABNORMAL_DISPATCHER (gcall *)
{
}
static void
expand_BUILTIN_EXPECT (gimple stmt)
expand_BUILTIN_EXPECT (gcall *stmt)
{
/* When guessing was done, the hints should be already stripped away. */
gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
@ -1930,10 +1930,10 @@ expand_BUILTIN_EXPECT (gimple stmt)
/* Routines to expand each internal function, indexed by function number.
Each routine has the prototype:
expand_<NAME> (gimple stmt)
expand_<NAME> (gcall *stmt)
where STMT is the statement that performs the call. */
static void (*const internal_fn_expanders[]) (gimple) = {
static void (*const internal_fn_expanders[]) (gcall *) = {
#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
#include "internal-fn.def"
#undef DEF_INTERNAL_FN
@ -1943,7 +1943,7 @@ static void (*const internal_fn_expanders[]) (gimple) = {
/* Expand STMT, which is a call to internal function FN. */
void
expand_internal_call (gimple stmt)
expand_internal_call (gcall *stmt)
{
internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);
}

View File

@ -35,7 +35,7 @@ along with GCC; see the file COPYING3. If not see
Each entry must have a corresponding expander of the form:
void expand_NAME (gimple stmt)
void expand_NAME (gimple_call stmt)
where STMT is the statement that performs the call. */

View File

@ -57,6 +57,6 @@ internal_fn_fnspec (enum internal_fn fn)
return internal_fn_fnspec_array[(int) fn];
}
extern void expand_internal_call (gimple);
extern void expand_internal_call (gcall *);
#endif

View File

@ -539,9 +539,9 @@ func_checker::parse_labels (sem_bb *bb)
{
gimple stmt = gsi_stmt (gsi);
if (gimple_code (stmt) == GIMPLE_LABEL)
if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
{
tree t = gimple_label_label (stmt);
tree t = gimple_label_label (label_stmt);
gcc_assert (TREE_CODE (t) == LABEL_DECL);
m_label_bb_map.put (t, bb->bb->index);
@ -595,7 +595,8 @@ func_checker::compare_bb (sem_bb *bb1, sem_bb *bb2)
switch (gimple_code (s1))
{
case GIMPLE_CALL:
if (!compare_gimple_call (s1, s2))
if (!compare_gimple_call (as_a <gcall *> (s1),
as_a <gcall *> (s2)))
return return_different_stmts (s1, s2, "GIMPLE_CALL");
break;
case GIMPLE_ASSIGN:
@ -607,22 +608,26 @@ func_checker::compare_bb (sem_bb *bb1, sem_bb *bb2)
return return_different_stmts (s1, s2, "GIMPLE_COND");
break;
case GIMPLE_SWITCH:
if (!compare_gimple_switch (s1, s2))
if (!compare_gimple_switch (as_a <gswitch *> (s1),
as_a <gswitch *> (s2)))
return return_different_stmts (s1, s2, "GIMPLE_SWITCH");
break;
case GIMPLE_DEBUG:
case GIMPLE_EH_DISPATCH:
break;
case GIMPLE_RESX:
if (!compare_gimple_resx (s1, s2))
if (!compare_gimple_resx (as_a <gresx *> (s1),
as_a <gresx *> (s2)))
return return_different_stmts (s1, s2, "GIMPLE_RESX");
break;
case GIMPLE_LABEL:
if (!compare_gimple_label (s1, s2))
if (!compare_gimple_label (as_a <glabel *> (s1),
as_a <glabel *> (s2)))
return return_different_stmts (s1, s2, "GIMPLE_LABEL");
break;
case GIMPLE_RETURN:
if (!compare_gimple_return (s1, s2))
if (!compare_gimple_return (as_a <greturn *> (s1),
as_a <greturn *> (s2)))
return return_different_stmts (s1, s2, "GIMPLE_RETURN");
break;
case GIMPLE_GOTO:
@ -630,7 +635,8 @@ func_checker::compare_bb (sem_bb *bb1, sem_bb *bb2)
return return_different_stmts (s1, s2, "GIMPLE_GOTO");
break;
case GIMPLE_ASM:
if (!compare_gimple_asm (s1, s2))
if (!compare_gimple_asm (as_a <gasm *> (s1),
as_a <gasm *> (s2)))
return return_different_stmts (s1, s2, "GIMPLE_ASM");
break;
case GIMPLE_PREDICT:
@ -651,7 +657,7 @@ func_checker::compare_bb (sem_bb *bb1, sem_bb *bb2)
call statements are semantically equivalent. */
bool
func_checker::compare_gimple_call (gimple s1, gimple s2)
func_checker::compare_gimple_call (gcall *s1, gcall *s2)
{
unsigned i;
tree t1, t2;
@ -781,11 +787,11 @@ func_checker::compare_tree_ssa_label (tree t1, tree t2)
return compare_operand (t1, t2);
}
/* Verifies for given GIMPLEs S1 and S2 that
/* Verifies for given GIMPLE_LABEL stmts S1 and S2 that
label statements are semantically equivalent. */
bool
func_checker::compare_gimple_label (gimple g1, gimple g2)
func_checker::compare_gimple_label (const glabel *g1, const glabel *g2)
{
if (m_ignore_labels)
return true;
@ -800,11 +806,11 @@ func_checker::compare_gimple_label (gimple g1, gimple g2)
return true;
}
/* Verifies for given GIMPLEs S1 and S2 that
/* Verifies for given GIMPLE_SWITCH stmts S1 and S2 that
switch statements are semantically equivalent. */
bool
func_checker::compare_gimple_switch (gimple g1, gimple g2)
func_checker::compare_gimple_switch (const gswitch *g1, const gswitch *g2)
{
unsigned lsize1, lsize2, i;
@ -854,11 +860,11 @@ func_checker::compare_gimple_switch (gimple g1, gimple g2)
return true;
}
/* Verifies for given GIMPLEs S1 and S2 that
/* Verifies for given GIMPLE_RETURN stmts S1 and S2 that
return statements are semantically equivalent. */
bool
func_checker::compare_gimple_return (gimple g1, gimple g2)
func_checker::compare_gimple_return (const greturn *g1, const greturn *g2)
{
tree t1, t2;
@ -889,11 +895,11 @@ func_checker::compare_gimple_goto (gimple g1, gimple g2)
return compare_operand (dest1, dest2);
}
/* Verifies for given GIMPLEs S1 and S2 that
/* Verifies for given GIMPLE_RESX stmts S1 and S2 that
resx statements are semantically equivalent. */
bool
func_checker::compare_gimple_resx (gimple g1, gimple g2)
func_checker::compare_gimple_resx (const gresx *g1, const gresx *g2)
{
return gimple_resx_region (g1) == gimple_resx_region (g2);
}
@ -903,7 +909,7 @@ func_checker::compare_gimple_resx (gimple g1, gimple g2)
'__asm__ __volatile__ ("", "", "", "memory")'. */
bool
func_checker::compare_gimple_asm (gimple g1, gimple g2)
func_checker::compare_gimple_asm (const gasm *g1, const gasm *g2)
{
if (gimple_asm_volatile_p (g1) != gimple_asm_volatile_p (g2))
return false;

View File

@ -161,7 +161,7 @@ public:
/* Verifies for given GIMPLEs S1 and S2 that
call statements are semantically equivalent. */
bool compare_gimple_call (gimple s1, gimple s2);
bool compare_gimple_call (gcall *s1, gcall *s2);
/* Verifies for given GIMPLEs S1 and S2 that
assignment statements are semantically equivalent. */
@ -171,30 +171,31 @@ public:
condition statements are semantically equivalent. */
bool compare_gimple_cond (gimple s1, gimple s2);
/* Verifies for given GIMPLEs S1 and S2 that
/* Verifies for given GIMPLE_LABEL stmts S1 and S2 that
label statements are semantically equivalent. */
bool compare_gimple_label (gimple s1, gimple s2);
bool compare_gimple_label (const glabel *s1, const glabel *s2);
/* Verifies for given GIMPLEs S1 and S2 that
/* Verifies for given GIMPLE_SWITCH stmts S1 and S2 that
switch statements are semantically equivalent. */
bool compare_gimple_switch (gimple s1, gimple s2);
bool compare_gimple_switch (const gswitch *s1, const gswitch *s2);
/* Verifies for given GIMPLEs S1 and S2 that
/* Verifies for given GIMPLE_RETURN stmts S1 and S2 that
return statements are semantically equivalent. */
bool compare_gimple_return (gimple s1, gimple s2);
bool compare_gimple_return (const greturn *s1, const greturn *s2);
/* Verifies for given GIMPLEs S1 and S2 that
goto statements are semantically equivalent. */
bool compare_gimple_goto (gimple s1, gimple s2);
/* Verifies for given GIMPLEs S1 and S2 that
/* Verifies for given GIMPLE_RESX stmts S1 and S2 that
resx statements are semantically equivalent. */
bool compare_gimple_resx (gimple s1, gimple s2);
bool compare_gimple_resx (const gresx *s1, const gresx *s2);
/* Verifies for given GIMPLEs S1 and S2 that ASM statements are equivalent.
/* Verifies for given GIMPLE_ASM stmts S1 and S2 that ASM statements
are equivalent.
For the beginning, the pass only supports equality for
'__asm__ __volatile__ ("", "", "", "memory")'. */
bool compare_gimple_asm (gimple s1, gimple s2);
bool compare_gimple_asm (const gasm *s1, const gasm *s2);
/* Verification function for declaration trees T1 and T2. */
bool compare_decl (tree t1, tree t2);

View File

@ -883,8 +883,8 @@ sem_function::parse_tree_args (void)
bool
sem_function::compare_phi_node (basic_block bb1, basic_block bb2)
{
gimple_stmt_iterator si1, si2;
gimple phi1, phi2;
gphi_iterator si1, si2;
gphi *phi1, *phi2;
unsigned size1, size2, i;
tree t1, t2;
edge e1, e2;
@ -905,8 +905,8 @@ sem_function::compare_phi_node (basic_block bb1, basic_block bb2)
if (gsi_end_p (si1) || gsi_end_p (si2))
return return_false();
phi1 = gsi_stmt (si1);
phi2 = gsi_stmt (si2);
phi1 = si1.phi ();
phi2 = si2.phi ();
tree phi_result1 = gimple_phi_result (phi1);
tree phi_result2 = gimple_phi_result (phi2);

View File

@ -1815,7 +1815,7 @@ set_switch_stmt_execution_predicate (struct ipa_node_params *info,
struct inline_summary *summary,
basic_block bb)
{
gimple last;
gimple lastg;
tree op;
int index;
struct agg_position_info aggpos;
@ -1824,9 +1824,10 @@ set_switch_stmt_execution_predicate (struct ipa_node_params *info,
size_t n;
size_t case_idx;
last = last_stmt (bb);
if (!last || gimple_code (last) != GIMPLE_SWITCH)
lastg = last_stmt (bb);
if (!lastg || gimple_code (lastg) != GIMPLE_SWITCH)
return;
gswitch *last = as_a <gswitch *> (lastg);
op = gimple_switch_index (last);
if (!unmodified_parm_or_parm_agg_item (info, last, op, &index, &aggpos))
return;
@ -2276,7 +2277,7 @@ phi_result_unknown_predicate (struct ipa_node_params *info,
NONCONSTANT_NAMES, if possible. */
static void
predicate_for_phi_result (struct inline_summary *summary, gimple phi,
predicate_for_phi_result (struct inline_summary *summary, gphi *phi,
struct predicate *p,
vec<predicate_t> nonconstant_names)
{
@ -2459,7 +2460,6 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
/* Benefits are scaled by probability of elimination that is in range
<0,2>. */
basic_block bb;
gimple_stmt_iterator bsi;
struct function *my_function = DECL_STRUCT_FUNCTION (node->decl);
int freq;
struct inline_summary *info = inline_summary (node);
@ -2540,7 +2540,8 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
struct predicate phi_predicate;
bool first_phi = true;
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
if (first_phi
&& !phi_result_unknown_predicate (parms_info, info, bb,
@ -2553,14 +2554,15 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
fprintf (dump_file, " ");
print_gimple_stmt (dump_file, gsi_stmt (bsi), 0, 0);
}
predicate_for_phi_result (info, gsi_stmt (bsi), &phi_predicate,
predicate_for_phi_result (info, bsi.phi (), &phi_predicate,
nonconstant_names);
}
}
fix_builtin_expect_stmt = find_foldable_builtin_expect (bb);
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
int this_size = estimate_num_insns (stmt, &eni_size_weights);

View File

@ -710,7 +710,7 @@ param_type_may_change_p (tree function, tree arg, gimple call)
static bool
detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
gimple call, struct ipa_jump_func *jfunc,
gcall *call, struct ipa_jump_func *jfunc,
HOST_WIDE_INT offset)
{
struct prop_type_change_info tci;
@ -759,7 +759,7 @@ detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
returned by get_ref_base_and_extent, as is the offset. */
static bool
detect_type_change (tree arg, tree base, tree comp_type, gimple call,
detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
{
if (!flag_devirtualize)
@ -780,7 +780,7 @@ detect_type_change (tree arg, tree base, tree comp_type, gimple call,
static bool
detect_type_change_ssa (tree arg, tree comp_type,
gimple call, struct ipa_jump_func *jfunc)
gcall *call, struct ipa_jump_func *jfunc)
{
gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
if (!flag_devirtualize
@ -1157,7 +1157,7 @@ static void
compute_complex_assign_jump_func (struct func_body_info *fbi,
struct ipa_node_params *info,
struct ipa_jump_func *jfunc,
gimple call, gimple stmt, tree name,
gcall *call, gimple stmt, tree name,
tree param_type)
{
HOST_WIDE_INT offset, size, max_size;
@ -1299,7 +1299,7 @@ static void
compute_complex_ancestor_jump_func (struct func_body_info *fbi,
struct ipa_node_params *info,
struct ipa_jump_func *jfunc,
gimple call, gimple phi)
gcall *call, gphi *phi)
{
HOST_WIDE_INT offset;
gimple assign, cond;
@ -1486,7 +1486,8 @@ build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
subsequently stored. */
static void
determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
determine_locally_known_aggregate_parts (gcall *call, tree arg,
tree arg_type,
struct ipa_jump_func *jfunc)
{
struct ipa_known_agg_contents_list *list = NULL;
@ -1682,7 +1683,7 @@ ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
{
struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
struct ipa_edge_args *args = IPA_EDGE_REF (cs);
gimple call = cs->call_stmt;
gcall *call = cs->call_stmt;
int n, arg_num = gimple_call_num_args (call);
bool useful_context = false;
@ -1751,7 +1752,8 @@ ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
call, stmt, arg, param_type);
else if (gimple_code (stmt) == GIMPLE_PHI)
compute_complex_ancestor_jump_func (fbi, info, jfunc,
call, stmt);
call,
as_a <gphi *> (stmt));
}
}
@ -1871,7 +1873,8 @@ ipa_is_ssa_with_stmt_def (tree t)
indirect call graph edge. */
static struct cgraph_edge *
ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
ipa_note_param_call (struct cgraph_node *node, int param_index,
gcall *stmt)
{
struct cgraph_edge *cs;
@ -1942,7 +1945,7 @@ ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
passed by value or reference. */
static void
ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
tree target)
{
struct ipa_node_params *info = fbi->info;
@ -2081,7 +2084,7 @@ ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
static void
ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
gimple call, tree target)
gcall *call, tree target)
{
tree obj = OBJ_TYPE_REF_OBJECT (target);
int index;
@ -2137,7 +2140,7 @@ ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
containing intermediate information about each formal parameter. */
static void
ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
{
tree target = gimple_call_fn (call);
@ -2186,7 +2189,7 @@ static void
ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
{
if (is_gimple_call (stmt))
ipa_analyze_call_uses (fbi, stmt);
ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
}
/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
@ -3852,13 +3855,13 @@ ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
contain the corresponding call graph edge. */
void
ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
ipa_parm_adjustment_vec adjustments)
{
struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
vec<tree> vargs;
vec<tree, va_gc> **debug_args = NULL;
gimple new_stmt;
gcall *new_stmt;
gimple_stmt_iterator gsi, prev_gsi;
tree callee_decl;
int i, len;

View File

@ -637,7 +637,7 @@ typedef vec<ipa_parm_adjustment> ipa_parm_adjustment_vec;
vec<tree> ipa_get_vector_of_formal_parms (tree fndecl);
vec<tree> ipa_get_vector_of_formal_parm_types (tree fntype);
void ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec);
void ipa_modify_call_arguments (struct cgraph_edge *, gimple,
void ipa_modify_call_arguments (struct cgraph_edge *, gcall *,
ipa_parm_adjustment_vec);
ipa_parm_adjustment_vec ipa_combine_adjustments (ipa_parm_adjustment_vec,
ipa_parm_adjustment_vec);

View File

@ -518,7 +518,7 @@ special_builtin_state (enum pure_const_state_e *state, bool *looping,
the entire call expression. */
static void
check_call (funct_state local, gimple call, bool ipa)
check_call (funct_state local, gcall *call, bool ipa)
{
int flags = gimple_call_flags (call);
tree callee_t = gimple_call_fndecl (call);
@ -745,10 +745,10 @@ check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
switch (gimple_code (stmt))
{
case GIMPLE_CALL:
check_call (local, stmt, ipa);
check_call (local, as_a <gcall *> (stmt), ipa);
break;
case GIMPLE_LABEL:
if (DECL_NONLOCAL (gimple_label_label (stmt)))
if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
/* Target of long jump. */
{
if (dump_file)
@ -757,7 +757,7 @@ check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
}
break;
case GIMPLE_ASM:
if (gimple_asm_clobbers_memory_p (stmt))
if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
{
if (dump_file)
fprintf (dump_file, " memory asm clobber is not const/pure\n");
@ -765,7 +765,7 @@ check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
local->pure_const_state = IPA_NEITHER;
local->can_free = true;
}
if (gimple_asm_volatile_p (stmt))
if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
{
if (dump_file)
fprintf (dump_file, " volatile is not const/pure\n");

View File

@ -248,8 +248,6 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
while (!worklist.is_empty ())
{
gimple_stmt_iterator bsi;
bb = worklist.pop ();
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
@ -259,7 +257,8 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
e->src->index));
worklist.safe_push (e->src);
}
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
if (is_gimple_debug (stmt))
@ -271,15 +270,16 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
ok = false;
goto done;
}
if (gimple_code (stmt) == GIMPLE_LABEL
&& test_nonssa_use (stmt, gimple_label_label (stmt),
NULL_TREE, non_ssa_vars))
{
ok = false;
goto done;
}
if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
if (test_nonssa_use (stmt, gimple_label_label (label_stmt),
NULL_TREE, non_ssa_vars))
{
ok = false;
goto done;
}
}
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
if (walk_stmt_load_store_addr_ops
(gsi_stmt (bsi), non_ssa_vars, test_nonssa_use, test_nonssa_use,
@ -293,10 +293,11 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
{
if (e->dest != return_bb)
continue;
for (bsi = gsi_start_phis (return_bb); !gsi_end_p (bsi);
for (gphi_iterator bsi = gsi_start_phis (return_bb);
!gsi_end_p (bsi);
gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
gphi *stmt = bsi.phi ();
tree op = gimple_phi_arg_def (stmt, e->dest_idx);
if (virtual_operand_p (gimple_phi_result (stmt)))
@ -319,15 +320,17 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
{
gimple_stmt_iterator bsi;
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
if (gimple_code (gsi_stmt (bsi)) == GIMPLE_LABEL
&& test_nonssa_use (gsi_stmt (bsi),
gimple_label_label (gsi_stmt (bsi)),
NULL_TREE, non_ssa_vars))
if (glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (bsi)))
{
ok = false;
goto done;
if (test_nonssa_use (label_stmt,
gimple_label_label (label_stmt),
NULL_TREE, non_ssa_vars))
{
ok = false;
goto done;
}
}
else if (gimple_code (gsi_stmt (bsi)) != GIMPLE_LABEL)
else
break;
}
@ -369,9 +372,10 @@ check_forbidden_calls (gimple stmt)
basic_block use_bb, forbidden_bb;
enum tree_code code;
edge true_edge, false_edge;
gimple use_stmt = USE_STMT (use_p);
gcond *use_stmt;
if (gimple_code (use_stmt) != GIMPLE_COND)
use_stmt = dyn_cast <gcond *> (USE_STMT (use_p));
if (!use_stmt)
continue;
/* Assuming canonical form for GIMPLE_COND here, with constant
@ -443,7 +447,7 @@ consider_split (struct split_point *current, bitmap non_ssa_vars,
unsigned int call_overhead;
edge e;
edge_iterator ei;
gimple_stmt_iterator bsi;
gphi_iterator bsi;
unsigned int i;
int incoming_freq = 0;
tree retval;
@ -501,7 +505,7 @@ consider_split (struct split_point *current, bitmap non_ssa_vars,
incoming from header are the same. */
for (bsi = gsi_start_phis (current->entry_bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
gphi *stmt = bsi.phi ();
tree val = NULL;
if (virtual_operand_p (gimple_phi_result (stmt)))
@ -673,15 +677,15 @@ consider_split (struct split_point *current, bitmap non_ssa_vars,
for the return value. If there are other PHIs, give up. */
if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
{
gimple_stmt_iterator psi;
gphi_iterator psi;
for (psi = gsi_start_phis (return_bb); !gsi_end_p (psi); gsi_next (&psi))
if (!virtual_operand_p (gimple_phi_result (gsi_stmt (psi)))
if (!virtual_operand_p (gimple_phi_result (psi.phi ()))
&& !(retval
&& current->split_part_set_retval
&& TREE_CODE (retval) == SSA_NAME
&& !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))
&& SSA_NAME_DEF_STMT (retval) == gsi_stmt (psi)))
&& SSA_NAME_DEF_STMT (retval) == psi.phi ()))
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file,
@ -762,10 +766,10 @@ find_return_bb (void)
|| is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
&& retval == gimple_assign_lhs (stmt))
;
else if (gimple_code (stmt) == GIMPLE_RETURN)
else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
found_return = true;
retval = gimple_return_retval (stmt);
retval = gimple_return_retval (return_stmt);
}
else
break;
@ -783,8 +787,8 @@ find_retval (basic_block return_bb)
{
gimple_stmt_iterator bsi;
for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
return gimple_return_retval (gsi_stmt (bsi));
if (greturn *return_stmt = dyn_cast <greturn *> (gsi_stmt (bsi)))
return gimple_return_retval (return_stmt);
else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
&& !gimple_clobber_p (gsi_stmt (bsi)))
return gimple_assign_rhs1 (gsi_stmt (bsi));
@ -861,12 +865,12 @@ visit_bb (basic_block bb, basic_block return_bb,
bitmap set_ssa_names, bitmap used_ssa_names,
bitmap non_ssa_vars)
{
gimple_stmt_iterator bsi;
edge e;
edge_iterator ei;
bool can_split = true;
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
tree op;
@ -935,9 +939,10 @@ visit_bb (basic_block bb, basic_block return_bb,
mark_nonssa_use,
mark_nonssa_use);
}
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
gphi *stmt = bsi.phi ();
unsigned int i;
if (virtual_operand_p (gimple_phi_result (stmt)))
@ -959,9 +964,11 @@ visit_bb (basic_block bb, basic_block return_bb,
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->dest == return_bb)
{
for (bsi = gsi_start_phis (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
for (gphi_iterator bsi = gsi_start_phis (return_bb);
!gsi_end_p (bsi);
gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
gphi *stmt = bsi.phi ();
tree op = gimple_phi_arg_def (stmt, e->dest_idx);
if (virtual_operand_p (gimple_phi_result (stmt)))
@ -1198,8 +1205,7 @@ split_function (struct split_point *split_point)
cgraph_node *node, *cur_node = cgraph_node::get (current_function_decl);
basic_block return_bb = find_return_bb ();
basic_block call_bb;
gimple_stmt_iterator gsi;
gimple call;
gcall *call;
edge e;
edge_iterator ei;
tree retval = NULL, real_retval = NULL, retbnd = NULL;
@ -1302,9 +1308,10 @@ split_function (struct split_point *split_point)
if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
{
bool phi_p = false;
for (gsi = gsi_start_phis (return_bb); !gsi_end_p (gsi);)
for (gphi_iterator gsi = gsi_start_phis (return_bb);
!gsi_end_p (gsi);)
{
gimple stmt = gsi_stmt (gsi);
gphi *stmt = gsi.phi ();
if (!virtual_operand_p (gimple_phi_result (stmt)))
{
gsi_next (&gsi);
@ -1323,7 +1330,9 @@ split_function (struct split_point *split_point)
entry of the SESE region as the vuse of the call and the reaching
vdef of the exit of the SESE region as the vdef of the call. */
if (!phi_p)
for (gsi = gsi_start_bb (return_bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gimple_stmt_iterator gsi = gsi_start_bb (return_bb);
!gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
if (gimple_vuse (stmt))
@ -1372,7 +1381,7 @@ split_function (struct split_point *split_point)
/* Create the basic block we place call into. It is the entry basic block
split after last label. */
call_bb = split_point->entry_bb;
for (gsi = gsi_start_bb (call_bb); !gsi_end_p (gsi);)
for (gimple_stmt_iterator gsi = gsi_start_bb (call_bb); !gsi_end_p (gsi);)
if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
{
last_stmt = gsi_stmt (gsi);
@ -1384,7 +1393,7 @@ split_function (struct split_point *split_point)
remove_edge (e);
/* Produce the call statement. */
gsi = gsi_last_bb (call_bb);
gimple_stmt_iterator gsi = gsi_last_bb (call_bb);
FOR_EACH_VEC_ELT (args_to_pass, i, arg)
if (!is_gimple_val (arg))
{
@ -1504,7 +1513,7 @@ split_function (struct split_point *split_point)
if (real_retval && split_point->split_part_set_retval)
{
gimple_stmt_iterator psi;
gphi_iterator psi;
/* See if we need new SSA_NAME for the result.
When DECL_BY_REFERENCE is true, retval is actually pointer to
@ -1517,13 +1526,13 @@ split_function (struct split_point *split_point)
/* See if there is PHI defining return value. */
for (psi = gsi_start_phis (return_bb);
!gsi_end_p (psi); gsi_next (&psi))
if (!virtual_operand_p (gimple_phi_result (gsi_stmt (psi))))
if (!virtual_operand_p (gimple_phi_result (psi.phi ())))
break;
/* When there is PHI, just update its value. */
if (TREE_CODE (retval) == SSA_NAME
&& !gsi_end_p (psi))
add_phi_arg (gsi_stmt (psi), retval, e, UNKNOWN_LOCATION);
add_phi_arg (psi.phi (), retval, e, UNKNOWN_LOCATION);
/* Otherwise update the return BB itself.
find_return_bb allows at most one assignment to return value,
so update first statement. */
@ -1532,9 +1541,10 @@ split_function (struct split_point *split_point)
gimple_stmt_iterator bsi;
for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
gsi_next (&bsi))
if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
if (greturn *return_stmt
= dyn_cast <greturn *> (gsi_stmt (bsi)))
{
gimple_return_set_retval (gsi_stmt (bsi), retval);
gimple_return_set_retval (return_stmt, retval);
break;
}
else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
@ -1596,7 +1606,7 @@ split_function (struct split_point *split_point)
*/
else
{
gimple ret;
greturn *ret;
if (split_point->split_part_set_retval
&& !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
{

View File

@ -1,3 +1,10 @@
2014-11-19 David Malcolm <dmalcolm@redhat.com>
Merger of git branch "gimple-classes-v2-option-3".
* ChangeLog.gimple-classes: New.
* java-gimplify.c (java_gimplify_block): Strengthen local "outer"
from gimple to gbind *.
2014-11-11 David Malcolm <dmalcolm@redhat.com>
* gcc/ChangeLog.jit: New.

View File

@ -0,0 +1,20 @@
2014-10-27 David Malcolm <dmalcolm@redhat.com>
Patch autogenerated by rename_gimple_subclasses.py from
https://github.com/davidmalcolm/gcc-refactoring-scripts
revision 7d754b63ff2bf47226a67b2c0af5d74b54d4709f
* java-gimplify.c (java_gimplify_block): Rename gimple subclass types.
2014-10-24 David Malcolm <dmalcolm@redhat.com>
Introduce gimple_bind and use it for accessors.
* java-gimplify.c (java_gimplify_block): Update local to be a
gimple_bind rather than just a gimple.
Copyright (C) 2014 Free Software Foundation, Inc.
Copying and distribution of this file, with or without modification,
are permitted in any medium without royalty provided the copyright
notice and this notice are preserved.

View File

@ -137,7 +137,7 @@ java_gimplify_block (tree java_block)
{
tree decls = BLOCK_VARS (java_block);
tree body = BLOCK_EXPR_BODY (java_block);
gimple outer = gimple_current_bind_expr ();
gbind *outer = gimple_current_bind_expr ();
tree block;
/* Don't bother with empty blocks. */

View File

@ -799,7 +799,7 @@ fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple *stmts,
{
if (gimple_stmt_max_uid (fn) < cedge->lto_stmt_uid)
fatal_error ("Cgraph edge statement index out of range");
cedge->call_stmt = stmts[cedge->lto_stmt_uid - 1];
cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
if (!cedge->call_stmt)
fatal_error ("Cgraph edge statement index not found");
}
@ -807,7 +807,7 @@ fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple *stmts,
{
if (gimple_stmt_max_uid (fn) < cedge->lto_stmt_uid)
fatal_error ("Cgraph edge statement index out of range");
cedge->call_stmt = stmts[cedge->lto_stmt_uid - 1];
cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
if (!cedge->call_stmt)
fatal_error ("Cgraph edge statement index not found");
}

View File

@ -2023,16 +2023,17 @@ output_function (struct cgraph_node *node)
set_gimple_stmt_max_uid (cfun, 0);
FOR_ALL_BB_FN (bb, cfun)
{
gimple_stmt_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
gphi *stmt = gsi.phi ();
/* Virtual PHIs are not going to be streamed. */
if (!virtual_operand_p (gimple_phi_result (stmt)))
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
@ -2042,10 +2043,10 @@ output_function (struct cgraph_node *node)
virtual phis now. */
FOR_ALL_BB_FN (bb, cfun)
{
gimple_stmt_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
gphi *stmt = gsi.phi ();
if (virtual_operand_p (gimple_phi_result (stmt)))
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}

File diff suppressed because it is too large Load Diff

View File

@ -1065,7 +1065,7 @@ get_base_value (tree t)
Otherwise return false and set LOOP_INVAIANT to NULL. */
static bool
is_comparison_with_loop_invariant_p (gimple stmt, struct loop *loop,
is_comparison_with_loop_invariant_p (gcond *stmt, struct loop *loop,
tree *loop_invariant,
enum tree_code *compare_code,
tree *loop_step,
@ -1230,7 +1230,8 @@ predict_iv_comparison (struct loop *loop, basic_block bb,
stmt = last_stmt (bb);
if (!stmt || gimple_code (stmt) != GIMPLE_COND)
return;
if (!is_comparison_with_loop_invariant_p (stmt, loop, &compare_var,
if (!is_comparison_with_loop_invariant_p (as_a <gcond *> (stmt),
loop, &compare_var,
&compare_code,
&compare_step_var,
&compare_base))
@ -1403,12 +1404,19 @@ predict_extra_loop_exits (edge exit_edge)
{
unsigned i;
bool check_value_one;
gimple phi_stmt;
gimple lhs_def_stmt;
gphi *phi_stmt;
tree cmp_rhs, cmp_lhs;
gimple cmp_stmt = last_stmt (exit_edge->src);
gimple last;
gcond *cmp_stmt;
if (!cmp_stmt || gimple_code (cmp_stmt) != GIMPLE_COND)
last = last_stmt (exit_edge->src);
if (!last)
return;
cmp_stmt = dyn_cast <gcond *> (last);
if (!cmp_stmt)
return;
cmp_rhs = gimple_cond_rhs (cmp_stmt);
cmp_lhs = gimple_cond_lhs (cmp_stmt);
if (!TREE_CONSTANT (cmp_rhs)
@ -1424,8 +1432,12 @@ predict_extra_loop_exits (edge exit_edge)
^ (gimple_cond_code (cmp_stmt) == EQ_EXPR))
^ ((exit_edge->flags & EDGE_TRUE_VALUE) != 0));
phi_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
if (!phi_stmt || gimple_code (phi_stmt) != GIMPLE_PHI)
lhs_def_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
if (!lhs_def_stmt)
return;
phi_stmt = dyn_cast <gphi *> (lhs_def_stmt);
if (!phi_stmt)
return;
for (i = 0; i < gimple_phi_num_args (phi_stmt); i++)
@ -1471,7 +1483,7 @@ predict_loops (void)
tree loop_bound_step = NULL;
tree loop_bound_var = NULL;
tree loop_iv_base = NULL;
gimple stmt = NULL;
gcond *stmt = NULL;
exits = get_loop_exit_edges (loop);
n_exits = exits.length ();
@ -1538,12 +1550,12 @@ predict_loops (void)
if (nb_iter->stmt
&& gimple_code (nb_iter->stmt) == GIMPLE_COND)
{
stmt = nb_iter->stmt;
stmt = as_a <gcond *> (nb_iter->stmt);
break;
}
if (!stmt && last_stmt (loop->header)
&& gimple_code (last_stmt (loop->header)) == GIMPLE_COND)
stmt = last_stmt (loop->header);
stmt = as_a <gcond *> (last_stmt (loop->header));
if (stmt)
is_comparison_with_loop_invariant_p (stmt, loop,
&loop_bound_var,
@ -2101,10 +2113,10 @@ return_prediction (tree val, enum prediction *prediction)
static void
apply_return_prediction (void)
{
gimple return_stmt = NULL;
greturn *return_stmt = NULL;
tree return_val;
edge e;
gimple phi;
gphi *phi;
int phi_num_args, i;
enum br_predictor pred;
enum prediction direction;
@ -2112,10 +2124,13 @@ apply_return_prediction (void)
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
{
return_stmt = last_stmt (e->src);
if (return_stmt
&& gimple_code (return_stmt) == GIMPLE_RETURN)
break;
gimple last = last_stmt (e->src);
if (last
&& gimple_code (last) == GIMPLE_RETURN)
{
return_stmt = as_a <greturn *> (last);
break;
}
}
if (!e)
return;
@ -2126,7 +2141,7 @@ apply_return_prediction (void)
|| !SSA_NAME_DEF_STMT (return_val)
|| gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI)
return;
phi = SSA_NAME_DEF_STMT (return_val);
phi = as_a <gphi *> (SSA_NAME_DEF_STMT (return_val));
phi_num_args = gimple_phi_num_args (phi);
pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
@ -2231,12 +2246,12 @@ tree_estimate_probability_bb (basic_block bb)
gimple_stmt_iterator gi;
for (gi = gsi_start_bb (e->dest); !gsi_end_p (gi); gsi_next (&gi))
{
gimple stmt = gsi_stmt (gi);
glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gi));
tree decl;
if (gimple_code (stmt) != GIMPLE_LABEL)
if (!label_stmt)
break;
decl = gimple_label_label (stmt);
decl = gimple_label_label (label_stmt);
if (DECL_ARTIFICIAL (decl))
continue;

View File

@ -418,10 +418,11 @@ sanopt_optimize_walker (basic_block bb, struct sanopt_ctx *ctx)
{
/* Handle asm volatile or asm with "memory" clobber
the same as potentionally freeing call. */
if (gimple_code (stmt) == GIMPLE_ASM
gasm *asm_stmt = dyn_cast <gasm *> (stmt);
if (asm_stmt
&& asan_check_optimize
&& (gimple_asm_clobbers_memory_p (stmt)
|| gimple_asm_volatile_p (stmt)))
&& (gimple_asm_clobbers_memory_p (asm_stmt)
|| gimple_asm_volatile_p (asm_stmt)))
info->freeing_call_events++;
gsi_next (&gsi);
continue;

View File

@ -186,18 +186,19 @@ sese_build_liveouts_use (sese region, bitmap liveouts, basic_block bb,
static void
sese_build_liveouts_bb (sese region, bitmap liveouts, basic_block bb)
{
gimple_stmt_iterator bsi;
edge e;
edge_iterator ei;
ssa_op_iter iter;
use_operand_p use_p;
FOR_EACH_EDGE (e, ei, bb->succs)
for (bsi = gsi_start_phis (e->dest); !gsi_end_p (bsi); gsi_next (&bsi))
for (gphi_iterator bsi = gsi_start_phis (e->dest); !gsi_end_p (bsi);
gsi_next (&bsi))
sese_build_liveouts_use (region, liveouts, bb,
PHI_ARG_DEF_FROM_EDGE (gsi_stmt (bsi), e));
PHI_ARG_DEF_FROM_EDGE (bsi.phi (), e));
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
@ -318,7 +319,7 @@ free_sese (sese region)
static void
sese_add_exit_phis_edge (basic_block exit, tree use, edge false_e, edge true_e)
{
gimple phi = create_phi_node (NULL_TREE, exit);
gphi *phi = create_phi_node (NULL_TREE, exit);
create_new_def_for (use, phi, gimple_phi_result_ptr (phi));
add_phi_arg (phi, use, false_e, UNKNOWN_LOCATION);
add_phi_arg (phi, use, true_e, UNKNOWN_LOCATION);
@ -737,7 +738,7 @@ set_ifsese_condition (ifsese if_region, tree condition)
basic_block bb = entry->dest;
gimple last = last_stmt (bb);
gimple_stmt_iterator gsi = gsi_last_bb (bb);
gimple cond_stmt;
gcond *cond_stmt;
gcc_assert (gimple_code (last) == GIMPLE_COND);

View File

@ -194,7 +194,9 @@ struct ssa_op_iter
a real stmt or a PHI node, looking at the USE nodes matching FLAGS. */
#define FOR_EACH_PHI_OR_STMT_USE(USEVAR, STMT, ITER, FLAGS) \
for ((USEVAR) = (gimple_code (STMT) == GIMPLE_PHI \
? op_iter_init_phiuse (&(ITER), STMT, FLAGS) \
? op_iter_init_phiuse (&(ITER), \
as_a <gphi *> (STMT), \
FLAGS) \
: op_iter_init_use (&(ITER), STMT, FLAGS)); \
!op_iter_done (&(ITER)); \
(USEVAR) = op_iter_next_use (&(ITER)))
@ -203,7 +205,9 @@ struct ssa_op_iter
a real stmt or a PHI node, looking at the DEF nodes matching FLAGS. */
#define FOR_EACH_PHI_OR_STMT_DEF(DEFVAR, STMT, ITER, FLAGS) \
for ((DEFVAR) = (gimple_code (STMT) == GIMPLE_PHI \
? op_iter_init_phidef (&(ITER), STMT, FLAGS) \
? op_iter_init_phidef (&(ITER), \
as_a <gphi *> (STMT), \
FLAGS) \
: op_iter_init_def (&(ITER), STMT, FLAGS)); \
!op_iter_done (&(ITER)); \
(DEFVAR) = op_iter_next_def (&(ITER)))
@ -610,7 +614,7 @@ op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
ptr->numops = 1;
break;
case GIMPLE_ASM:
ptr->numops = gimple_asm_noutputs (stmt);
ptr->numops = gimple_asm_noutputs (as_a <gasm *> (stmt));
break;
default:
ptr->numops = 0;
@ -749,7 +753,7 @@ num_ssa_operands (gimple stmt, int flags)
/* If there is a single DEF in the PHI node which matches FLAG, return it.
Otherwise return NULL_DEF_OPERAND_P. */
static inline tree
single_phi_def (gimple stmt, int flags)
single_phi_def (gphi *stmt, int flags)
{
tree def = PHI_RESULT (stmt);
if ((flags & SSA_OP_DEF) && is_gimple_reg (def))
@ -762,7 +766,7 @@ single_phi_def (gimple stmt, int flags)
/* Initialize the iterator PTR for uses matching FLAGS in PHI. FLAGS should
be either SSA_OP_USES or SSA_OP_VIRTUAL_USES. */
static inline use_operand_p
op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags)
op_iter_init_phiuse (ssa_op_iter *ptr, gphi *phi, int flags)
{
tree phi_def = gimple_phi_result (phi);
int comp;
@ -792,7 +796,7 @@ op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags)
/* Start an iterator for a PHI definition. */
static inline def_operand_p
op_iter_init_phidef (ssa_op_iter *ptr, gimple phi, int flags)
op_iter_init_phidef (ssa_op_iter *ptr, gphi *phi, int flags)
{
tree phi_def = PHI_RESULT (phi);
int comp;
@ -881,9 +885,9 @@ link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
/* Only look at virtual or real uses, depending on the type of HEAD. */
flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
if (gimple_code (head_stmt) == GIMPLE_PHI)
if (gphi *phi = dyn_cast <gphi *> (head_stmt))
{
FOR_EACH_PHI_ARG (use_p, head_stmt, op_iter, flag)
FOR_EACH_PHI_ARG (use_p, phi, op_iter, flag)
if (USE_FROM_PTR (use_p) == use)
last_p = move_use_after_head (use_p, head, last_p);
}

View File

@ -1111,7 +1111,7 @@ reset_out_edges_aux (basic_block bb)
STMT. Record this information in the aux field of the edge. */
static inline void
compute_cases_per_edge (gimple stmt)
compute_cases_per_edge (gswitch *stmt)
{
basic_block bb = gimple_bb (stmt);
reset_out_edges_aux (bb);
@ -1133,7 +1133,7 @@ compute_cases_per_edge (gimple stmt)
Generate the code to test it and jump to the right place. */
void
expand_case (gimple stmt)
expand_case (gswitch *stmt)
{
tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
rtx default_label = NULL_RTX;

View File

@ -767,9 +767,10 @@ diagnose_tm_1 (gimple_stmt_iterator *gsi, bool *handled_ops_p,
case GIMPLE_TRANSACTION:
{
gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
unsigned char inner_flags = DIAG_TM_SAFE;
if (gimple_transaction_subcode (stmt) & GTMA_IS_RELAXED)
if (gimple_transaction_subcode (trans_stmt) & GTMA_IS_RELAXED)
{
if (d->block_flags & DIAG_TM_SAFE)
error_at (gimple_location (stmt),
@ -779,7 +780,7 @@ diagnose_tm_1 (gimple_stmt_iterator *gsi, bool *handled_ops_p,
"relaxed transaction in %<transaction_safe%> function");
inner_flags = DIAG_TM_RELAXED;
}
else if (gimple_transaction_subcode (stmt) & GTMA_IS_OUTER)
else if (gimple_transaction_subcode (trans_stmt) & GTMA_IS_OUTER)
{
if (d->block_flags)
error_at (gimple_location (stmt),
@ -795,7 +796,7 @@ diagnose_tm_1 (gimple_stmt_iterator *gsi, bool *handled_ops_p,
}
*handled_ops_p = true;
if (gimple_transaction_body (stmt))
if (gimple_transaction_body (trans_stmt))
{
struct walk_stmt_info wi_inner;
struct diagnose_tm d_inner;
@ -808,7 +809,7 @@ diagnose_tm_1 (gimple_stmt_iterator *gsi, bool *handled_ops_p,
memset (&wi_inner, 0, sizeof (wi_inner));
wi_inner.info = &d_inner;
walk_gimple_seq (gimple_transaction_body (stmt),
walk_gimple_seq (gimple_transaction_body (trans_stmt),
diagnose_tm_1, diagnose_tm_1_op, &wi_inner);
}
}
@ -930,7 +931,7 @@ typedef struct tm_log_entry
/* Entry block for the transaction this address occurs in. */
basic_block entry_block;
/* Dominating statements the store occurs in. */
gimple_vec stmts;
vec<gimple> stmts;
/* Initially, while we are building the log, we place a nonzero
value here to mean that this address *will* be saved with a
save/restore sequence. Later, when generating the save sequence
@ -1610,7 +1611,8 @@ examine_call_tm (unsigned *state, gimple_stmt_iterator *gsi)
static void
lower_transaction (gimple_stmt_iterator *gsi, struct walk_stmt_info *wi)
{
gimple g, stmt = gsi_stmt (*gsi);
gimple g;
gtransaction *stmt = as_a <gtransaction *> (gsi_stmt (*gsi));
unsigned int *outer_state = (unsigned int *) wi->info;
unsigned int this_state = 0;
struct walk_stmt_info this_wi;
@ -1806,6 +1808,22 @@ make_pass_lower_tm (gcc::context *ctxt)
struct tm_region
{
public:
/* The field "transaction_stmt" is initially a gtransaction *,
but eventually gets lowered to a gcall *(to BUILT_IN_TM_START).
Helper method to get it as a gtransaction *, with code-checking
in a checked-build. */
gtransaction *
get_transaction_stmt () const
{
return as_a <gtransaction *> (transaction_stmt);
}
public:
/* Link to the next unnested transaction. */
struct tm_region *next;
@ -1817,7 +1835,8 @@ struct tm_region
/* The GIMPLE_TRANSACTION statement beginning this transaction.
After TM_MARK, this gets replaced by a call to
BUILT_IN_TM_START. */
BUILT_IN_TM_START.
Hence this will be either a gtransaction *or a gcall *. */
gimple transaction_stmt;
/* After TM_MARK expands the GIMPLE_TRANSACTION into a call to
@ -1860,7 +1879,8 @@ static bitmap_obstack tm_obstack;
GIMPLE_TRANSACTION statement in a tree of tm_region elements. */
static struct tm_region *
tm_region_init_0 (struct tm_region *outer, basic_block bb, gimple stmt)
tm_region_init_0 (struct tm_region *outer, basic_block bb,
gtransaction *stmt)
{
struct tm_region *region;
@ -1975,8 +1995,9 @@ tm_region_init (struct tm_region *region)
/* Check for the last statement in the block beginning a new region. */
g = last_stmt (bb);
old_region = region;
if (g && gimple_code (g) == GIMPLE_TRANSACTION)
region = tm_region_init_0 (region, bb, g);
if (g)
if (gtransaction *trans_stmt = dyn_cast <gtransaction *> (g))
region = tm_region_init_0 (region, bb, trans_stmt);
/* Process subsequent blocks. */
FOR_EACH_EDGE (e, ei, bb->succs)
@ -2085,8 +2106,9 @@ transaction_subcode_ior (struct tm_region *region, unsigned flags)
{
if (region && region->transaction_stmt)
{
flags |= gimple_transaction_subcode (region->transaction_stmt);
gimple_transaction_set_subcode (region->transaction_stmt, flags);
gtransaction *transaction_stmt = region->get_transaction_stmt ();
flags |= gimple_transaction_subcode (transaction_stmt);
gimple_transaction_set_subcode (transaction_stmt, flags);
}
}
@ -2096,12 +2118,12 @@ transaction_subcode_ior (struct tm_region *region, unsigned flags)
LOC is the location to use for the new statement(s). */
static gimple
static gcall *
build_tm_load (location_t loc, tree lhs, tree rhs, gimple_stmt_iterator *gsi)
{
enum built_in_function code = END_BUILTINS;
tree t, type = TREE_TYPE (rhs), decl;
gimple gcall;
gcall *gcall;
if (type == float_type_node)
code = BUILT_IN_TM_LOAD_FLOAT;
@ -2168,12 +2190,12 @@ build_tm_load (location_t loc, tree lhs, tree rhs, gimple_stmt_iterator *gsi)
/* Similarly for storing TYPE in a transactional context. */
static gimple
static gcall *
build_tm_store (location_t loc, tree lhs, tree rhs, gimple_stmt_iterator *gsi)
{
enum built_in_function code = END_BUILTINS;
tree t, fn, type = TREE_TYPE (rhs), simple_type;
gimple gcall;
gcall *gcall;
if (type == float_type_node)
code = BUILT_IN_TM_STORE_FLOAT;
@ -2340,7 +2362,7 @@ static bool
expand_call_tm (struct tm_region *region,
gimple_stmt_iterator *gsi)
{
gimple stmt = gsi_stmt (*gsi);
gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
tree lhs = gimple_call_lhs (stmt);
tree fn_decl;
struct cgraph_node *node;
@ -2427,6 +2449,7 @@ expand_call_tm (struct tm_region *region,
tree tmp = create_tmp_reg (TREE_TYPE (lhs), NULL);
location_t loc = gimple_location (stmt);
edge fallthru_edge = NULL;
gassign *assign_stmt;
/* Remember if the call was going to throw. */
if (stmt_can_throw_internal (stmt))
@ -2445,15 +2468,15 @@ expand_call_tm (struct tm_region *region,
gimple_call_set_lhs (stmt, tmp);
update_stmt (stmt);
stmt = gimple_build_assign (lhs, tmp);
gimple_set_location (stmt, loc);
assign_stmt = gimple_build_assign (lhs, tmp);
gimple_set_location (assign_stmt, loc);
/* We cannot throw in the middle of a BB. If the call was going
to throw, place the instrumentation on the fallthru edge, so
the call remains the last statement in the block. */
if (fallthru_edge)
{
gimple_seq fallthru_seq = gimple_seq_alloc_with_stmt (stmt);
gimple_seq fallthru_seq = gimple_seq_alloc_with_stmt (assign_stmt);
gimple_stmt_iterator fallthru_gsi = gsi_start (fallthru_seq);
expand_assign_tm (region, &fallthru_gsi);
gsi_insert_seq_on_edge (fallthru_edge, fallthru_seq);
@ -2461,7 +2484,7 @@ expand_call_tm (struct tm_region *region,
}
else
{
gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
gsi_insert_after (gsi, assign_stmt, GSI_CONTINUE_LINKING);
expand_assign_tm (region, gsi);
}
@ -2710,7 +2733,7 @@ expand_transaction (struct tm_region *region, void *data ATTRIBUTE_UNUSED)
/* ??? There are plenty of bits here we're not computing. */
{
int subcode = gimple_transaction_subcode (region->transaction_stmt);
int subcode = gimple_transaction_subcode (region->get_transaction_stmt ());
int flags = 0;
if (subcode & GTMA_DOES_GO_IRREVOCABLE)
flags |= PR_DOESGOIRREVOCABLE;
@ -2729,7 +2752,7 @@ expand_transaction (struct tm_region *region, void *data ATTRIBUTE_UNUSED)
if (subcode & GTMA_IS_OUTER)
region->original_transaction_was_outer = true;
tree t = build_int_cst (tm_state_type, flags);
gimple call = gimple_build_call (tm_start, 1, t);
gcall *call = gimple_build_call (tm_start, 1, t);
gimple_call_set_lhs (call, tm_state);
gimple_set_location (call, gimple_location (region->transaction_stmt));
@ -2915,8 +2938,8 @@ generate_tm_state (struct tm_region *region, void *data ATTRIBUTE_UNUSED)
// again as we process blocks.
if (region->exit_blocks)
{
unsigned int subcode
= gimple_transaction_subcode (region->transaction_stmt);
gtransaction *transaction_stmt = region->get_transaction_stmt ();
unsigned int subcode = gimple_transaction_subcode (transaction_stmt);
if (subcode & GTMA_DOES_GO_IRREVOCABLE)
subcode &= (GTMA_DECLARATION_MASK | GTMA_DOES_GO_IRREVOCABLE
@ -2924,7 +2947,7 @@ generate_tm_state (struct tm_region *region, void *data ATTRIBUTE_UNUSED)
| GTMA_HAS_NO_INSTRUMENTATION);
else
subcode &= GTMA_DECLARATION_MASK;
gimple_transaction_set_subcode (region->transaction_stmt, subcode);
gimple_transaction_set_subcode (transaction_stmt, subcode);
}
return NULL;
@ -2940,11 +2963,13 @@ propagate_tm_flags_out (struct tm_region *region)
if (region->outer && region->outer->transaction_stmt)
{
unsigned s = gimple_transaction_subcode (region->transaction_stmt);
unsigned s
= gimple_transaction_subcode (region->get_transaction_stmt ());
s &= (GTMA_HAVE_ABORT | GTMA_HAVE_LOAD | GTMA_HAVE_STORE
| GTMA_MAY_ENTER_IRREVOCABLE);
s |= gimple_transaction_subcode (region->outer->transaction_stmt);
gimple_transaction_set_subcode (region->outer->transaction_stmt, s);
s |= gimple_transaction_subcode (region->outer->get_transaction_stmt ());
gimple_transaction_set_subcode (region->outer->get_transaction_stmt (),
s);
}
propagate_tm_flags_out (region->next);
@ -2979,7 +3004,8 @@ execute_tm_mark (void)
{
if (r->transaction_stmt)
{
unsigned sub = gimple_transaction_subcode (r->transaction_stmt);
unsigned sub
= gimple_transaction_subcode (r->get_transaction_stmt ());
/* If we're sure to go irrevocable, there won't be
anything to expand, since the run-time will go
@ -3097,23 +3123,26 @@ expand_block_edges (struct tm_region *const region, basic_block bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi = next_gsi)
{
gimple stmt = gsi_stmt (gsi);
gcall *call_stmt;
next_gsi = gsi;
gsi_next (&next_gsi);
// ??? Shouldn't we split for any non-pure, non-irrevocable function?
if (gimple_code (stmt) != GIMPLE_CALL
|| (gimple_call_flags (stmt) & ECF_TM_BUILTIN) == 0)
call_stmt = dyn_cast <gcall *> (stmt);
if ((!call_stmt)
|| (gimple_call_flags (call_stmt) & ECF_TM_BUILTIN) == 0)
continue;
if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)) == BUILT_IN_TM_ABORT)
if (DECL_FUNCTION_CODE (gimple_call_fndecl (call_stmt))
== BUILT_IN_TM_ABORT)
{
// If we have a ``_transaction_cancel [[outer]]'', there is only
// one abnormal edge: to the transaction marked OUTER.
// All compiler-generated instances of BUILT_IN_TM_ABORT have a
// constant argument, which we can examine here. Users invoking
// TM_ABORT directly get what they deserve.
tree arg = gimple_call_arg (stmt, 0);
tree arg = gimple_call_arg (call_stmt, 0);
if (TREE_CODE (arg) == INTEGER_CST
&& (TREE_INT_CST_LOW (arg) & AR_OUTERABORT) != 0
&& !decl_is_tm_clone (current_function_decl))
@ -3122,7 +3151,7 @@ expand_block_edges (struct tm_region *const region, basic_block bb)
for (struct tm_region *o = region; o; o = o->outer)
if (o->original_transaction_was_outer)
{
split_bb_make_tm_edge (stmt, o->restart_block,
split_bb_make_tm_edge (call_stmt, o->restart_block,
gsi, &next_gsi);
break;
}
@ -3135,7 +3164,8 @@ expand_block_edges (struct tm_region *const region, basic_block bb)
// Non-outer, TM aborts have an abnormal edge to the inner-most
// transaction, the one being aborted;
split_bb_make_tm_edge (stmt, region->restart_block, gsi, &next_gsi);
split_bb_make_tm_edge (call_stmt, region->restart_block, gsi,
&next_gsi);
}
// All TM builtins have an abnormal edge to the outer-most transaction.
@ -3153,14 +3183,14 @@ expand_block_edges (struct tm_region *const region, basic_block bb)
for (struct tm_region *o = region; o; o = o->outer)
if (!o->outer)
{
split_bb_make_tm_edge (stmt, o->restart_block, gsi, &next_gsi);
split_bb_make_tm_edge (call_stmt, o->restart_block, gsi, &next_gsi);
break;
}
// Delete any tail-call annotation that may have been added.
// The tail-call pass may have mis-identified the commit as being
// a candidate because we had not yet added this restart edge.
gimple_call_set_tail (stmt, false);
gimple_call_set_tail (call_stmt, false);
}
}
@ -3764,7 +3794,7 @@ dump_tm_memopt_transform (gimple stmt)
static void
tm_memopt_transform_stmt (unsigned int offset,
gimple stmt,
gcall *stmt,
gimple_stmt_iterator *gsi)
{
tree fn = gimple_call_fn (stmt);
@ -3800,28 +3830,30 @@ tm_memopt_transform_blocks (vec<basic_block> blocks)
if (is_tm_simple_load (stmt))
{
gcall *call_stmt = as_a <gcall *> (stmt);
loc = tm_memopt_value_number (stmt, NO_INSERT);
if (store_avail && bitmap_bit_p (store_avail, loc))
tm_memopt_transform_stmt (TRANSFORM_RAW, stmt, &gsi);
tm_memopt_transform_stmt (TRANSFORM_RAW, call_stmt, &gsi);
else if (store_antic && bitmap_bit_p (store_antic, loc))
{
tm_memopt_transform_stmt (TRANSFORM_RFW, stmt, &gsi);
tm_memopt_transform_stmt (TRANSFORM_RFW, call_stmt, &gsi);
bitmap_set_bit (store_avail, loc);
}
else if (read_avail && bitmap_bit_p (read_avail, loc))
tm_memopt_transform_stmt (TRANSFORM_RAR, stmt, &gsi);
tm_memopt_transform_stmt (TRANSFORM_RAR, call_stmt, &gsi);
else
bitmap_set_bit (read_avail, loc);
}
else if (is_tm_simple_store (stmt))
{
gcall *call_stmt = as_a <gcall *> (stmt);
loc = tm_memopt_value_number (stmt, NO_INSERT);
if (store_avail && bitmap_bit_p (store_avail, loc))
tm_memopt_transform_stmt (TRANSFORM_WAW, stmt, &gsi);
tm_memopt_transform_stmt (TRANSFORM_WAW, call_stmt, &gsi);
else
{
if (read_avail && bitmap_bit_p (read_avail, loc))
tm_memopt_transform_stmt (TRANSFORM_WAR, stmt, &gsi);
tm_memopt_transform_stmt (TRANSFORM_WAR, call_stmt, &gsi);
bitmap_set_bit (store_avail, loc);
}
}
@ -4676,7 +4708,8 @@ ipa_tm_diagnose_transaction (struct cgraph_node *node,
struct tm_region *r;
for (r = all_tm_regions; r ; r = r->next)
if (gimple_transaction_subcode (r->transaction_stmt) & GTMA_IS_RELAXED)
if (gimple_transaction_subcode (r->get_transaction_stmt ())
& GTMA_IS_RELAXED)
{
/* Atomic transactions can be nested inside relaxed. */
if (r->inner)
@ -4950,7 +4983,7 @@ ipa_tm_insert_irr_call (struct cgraph_node *node, struct tm_region *region,
basic_block bb)
{
gimple_stmt_iterator gsi;
gimple g;
gcall *g;
transaction_subcode_ior (region, GTMA_MAY_ENTER_IRREVOCABLE);
@ -4973,10 +5006,11 @@ ipa_tm_insert_irr_call (struct cgraph_node *node, struct tm_region *region,
static bool
ipa_tm_insert_gettmclone_call (struct cgraph_node *node,
struct tm_region *region,
gimple_stmt_iterator *gsi, gimple stmt)
gimple_stmt_iterator *gsi, gcall *stmt)
{
tree gettm_fn, ret, old_fn, callfn;
gimple g, g2;
gcall *g;
gassign *g2;
bool safe;
old_fn = gimple_call_fn (stmt);
@ -5072,7 +5106,7 @@ ipa_tm_transform_calls_redirect (struct cgraph_node *node,
gimple_stmt_iterator *gsi,
bool *need_ssa_rename_p)
{
gimple stmt = gsi_stmt (*gsi);
gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
struct cgraph_node *new_node;
struct cgraph_edge *e = node->get_edge (stmt);
tree fndecl = gimple_call_fndecl (stmt);

View File

@ -184,7 +184,7 @@ check_target_format (tree arg)
#define MAX_BASE_INT_BIT_SIZE 32
static bool
check_pow (gimple pow_call)
check_pow (gcall *pow_call)
{
tree base, expn;
enum tree_code bc, ec;
@ -258,7 +258,7 @@ check_pow (gimple pow_call)
Returns true if the function call is a candidate. */
static bool
check_builtin_call (gimple bcall)
check_builtin_call (gcall *bcall)
{
tree arg;
@ -271,7 +271,7 @@ check_builtin_call (gimple bcall)
is a candidate. */
static bool
is_call_dce_candidate (gimple call)
is_call_dce_candidate (gcall *call)
{
tree fn;
enum built_in_function fnc;
@ -342,7 +342,9 @@ gen_one_condition (tree arg, int lbub,
{
tree lbub_real_cst, lbub_cst, float_type;
tree temp, tempn, tempc, tempcn;
gimple stmt1, stmt2, stmt3;
gassign *stmt1;
gassign *stmt2;
gcond *stmt3;
float_type = TREE_TYPE (arg);
lbub_cst = build_int_cst (integer_type_node, lbub);
@ -547,7 +549,7 @@ gen_conditions_for_pow_int_base (tree base, tree expn,
and *NCONDS is the number of logical conditions. */
static void
gen_conditions_for_pow (gimple pow_call, vec<gimple> conds,
gen_conditions_for_pow (gcall *pow_call, vec<gimple> conds,
unsigned *nconds)
{
tree base, expn;
@ -683,10 +685,10 @@ get_no_error_domain (enum built_in_function fnc)
condition are separated by NULL tree in the vector. */
static void
gen_shrink_wrap_conditions (gimple bi_call, vec<gimple> conds,
gen_shrink_wrap_conditions (gcall *bi_call, vec<gimple> conds,
unsigned int *nconds)
{
gimple call;
gcall *call;
tree fn;
enum built_in_function fnc;
@ -724,7 +726,7 @@ gen_shrink_wrap_conditions (gimple bi_call, vec<gimple> conds,
transformation actually happens. */
static bool
shrink_wrap_one_built_in_call (gimple bi_call)
shrink_wrap_one_built_in_call (gcall *bi_call)
{
gimple_stmt_iterator bi_call_bsi;
basic_block bi_call_bb, join_tgt_bb, guard_bb, guard_bb0;
@ -859,7 +861,7 @@ shrink_wrap_one_built_in_call (gimple bi_call)
wrapping transformation. */
static bool
shrink_wrap_conditional_dead_built_in_calls (vec<gimple> calls)
shrink_wrap_conditional_dead_built_in_calls (vec<gcall *> calls)
{
bool changed = false;
unsigned i = 0;
@ -870,7 +872,7 @@ shrink_wrap_conditional_dead_built_in_calls (vec<gimple> calls)
for (; i < n ; i++)
{
gimple bi_call = calls[i];
gcall *bi_call = calls[i];
changed |= shrink_wrap_one_built_in_call (bi_call);
}
@ -919,15 +921,14 @@ pass_call_cdce::execute (function *fun)
basic_block bb;
gimple_stmt_iterator i;
bool something_changed = false;
auto_vec<gimple> cond_dead_built_in_calls;
auto_vec<gcall *> cond_dead_built_in_calls;
FOR_EACH_BB_FN (bb, fun)
{
/* Collect dead call candidates. */
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
gimple stmt = gsi_stmt (i);
if (is_gimple_call (stmt)
&& is_call_dce_candidate (stmt))
gcall *stmt = dyn_cast <gcall *> (gsi_stmt (i));
if (stmt && is_call_dce_candidate (stmt))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{

File diff suppressed because it is too large Load Diff

View File

@ -37,11 +37,11 @@ extern void end_recording_case_labels (void);
extern basic_block label_to_block_fn (struct function *, tree);
#define label_to_block(t) (label_to_block_fn (cfun, t))
extern void cleanup_dead_labels (void);
extern void group_case_labels_stmt (gimple);
extern void group_case_labels_stmt (gswitch *);
extern void group_case_labels (void);
extern void replace_uses_by (tree, tree);
extern basic_block single_noncomplex_succ (basic_block bb);
extern void notice_special_calls (gimple);
extern void notice_special_calls (gcall *);
extern void clear_special_calls (void);
extern edge find_taken_edge (basic_block, tree);
extern void gimple_debug_bb (basic_block);

View File

@ -123,7 +123,7 @@ cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
break;
case GIMPLE_SWITCH:
val = gimple_switch_index (stmt);
val = gimple_switch_index (as_a <gswitch *> (stmt));
break;
default:
@ -319,7 +319,7 @@ tree_forwarder_block_p (basic_block bb, bool phi_wanted)
switch (gimple_code (stmt))
{
case GIMPLE_LABEL:
if (DECL_NONLOCAL (gimple_label_label (stmt)))
if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
return false;
if (optimize == 0 && gimple_location (stmt) != locus)
return false;
@ -377,11 +377,11 @@ phi_alternatives_equal (basic_block dest, edge e1, edge e2)
{
int n1 = e1->dest_idx;
int n2 = e2->dest_idx;
gimple_stmt_iterator gsi;
gphi_iterator gsi;
for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
tree val1 = gimple_phi_arg_def (phi, n1);
tree val2 = gimple_phi_arg_def (phi, n2);
@ -416,11 +416,11 @@ remove_forwarder_block (basic_block bb)
/* If the destination block consists of a nonlocal label or is a
EH landing pad, do not merge it. */
label = first_stmt (dest);
if (label
&& gimple_code (label) == GIMPLE_LABEL
&& (DECL_NONLOCAL (gimple_label_label (label))
|| EH_LANDING_PAD_NR (gimple_label_label (label)) != 0))
return false;
if (label)
if (glabel *label_stmt = dyn_cast <glabel *> (label))
if (DECL_NONLOCAL (gimple_label_label (label_stmt))
|| EH_LANDING_PAD_NR (gimple_label_label (label_stmt)) != 0)
return false;
/* If there is an abnormal edge to basic block BB, but not into
dest, problems might occur during removal of the phi node at out
@ -478,11 +478,11 @@ remove_forwarder_block (basic_block bb)
{
/* Create arguments for the phi nodes, since the edge was not
here before. */
for (gsi = gsi_start_phis (dest);
!gsi_end_p (gsi);
gsi_next (&gsi))
for (gphi_iterator psi = gsi_start_phis (dest);
!gsi_end_p (psi);
gsi_next (&psi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = psi.phi ();
source_location l = gimple_phi_arg_location_from_edge (phi, succ);
tree def = gimple_phi_arg_def (phi, succ->dest_idx);
add_phi_arg (phi, unshare_expr (def), s, l);
@ -502,7 +502,7 @@ remove_forwarder_block (basic_block bb)
label = gsi_stmt (gsi);
if (is_gimple_debug (label))
break;
decl = gimple_label_label (label);
decl = gimple_label_label (as_a <glabel *> (label));
if (EH_LANDING_PAD_NR (decl) != 0
|| DECL_NONLOCAL (decl)
|| FORCED_LABEL (decl)
@ -828,10 +828,10 @@ remove_forwarder_block_with_phi (basic_block bb)
/* If the destination block consists of a nonlocal label, do not
merge it. */
label = first_stmt (dest);
if (label
&& gimple_code (label) == GIMPLE_LABEL
&& DECL_NONLOCAL (gimple_label_label (label)))
return false;
if (label)
if (glabel *label_stmt = dyn_cast <glabel *> (label))
if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
return false;
/* Record BB's single pred in case we need to update the father
loop's latch information later. */
@ -843,7 +843,7 @@ remove_forwarder_block_with_phi (basic_block bb)
while (EDGE_COUNT (bb->preds) > 0)
{
edge e = EDGE_PRED (bb, 0), s;
gimple_stmt_iterator gsi;
gphi_iterator gsi;
s = find_edge (e->src, dest);
if (s)
@ -875,7 +875,7 @@ remove_forwarder_block_with_phi (basic_block bb)
!gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
tree def = gimple_phi_arg_def (phi, succ->dest_idx);
source_location locus = gimple_phi_arg_location_from_edge (phi, succ);
@ -1023,7 +1023,7 @@ pass_merge_phi::execute (function *fun)
}
else
{
gimple_stmt_iterator gsi;
gphi_iterator gsi;
unsigned int dest_idx = single_succ_edge (bb)->dest_idx;
/* BB dominates DEST. There may be many users of the PHI
@ -1034,7 +1034,7 @@ pass_merge_phi::execute (function *fun)
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
tree result = gimple_phi_result (phi);
use_operand_p imm_use;
gimple use_stmt;

View File

@ -691,18 +691,15 @@ chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
void *res ATTRIBUTE_UNUSED)
{
tree ptr = *slot;
gimple bounds_phi;
gimple ptr_phi;
gphi *bounds_phi;
gphi *ptr_phi;
unsigned i;
gcc_assert (TREE_CODE (bounds) == SSA_NAME);
gcc_assert (TREE_CODE (ptr) == SSA_NAME);
bounds_phi = SSA_NAME_DEF_STMT (bounds);
ptr_phi = SSA_NAME_DEF_STMT (ptr);
gcc_assert (bounds_phi && gimple_code (bounds_phi) == GIMPLE_PHI);
gcc_assert (ptr_phi && gimple_code (ptr_phi) == GIMPLE_PHI);
bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
{
@ -1198,7 +1195,7 @@ chkp_get_registered_bounds (tree ptr)
static void
chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
{
gimple ret = gsi_stmt (*gsi);
greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
tree retval = gimple_return_retval (ret);
tree ret_decl = DECL_RESULT (cfun->decl);
tree bounds;
@ -1637,7 +1634,7 @@ chkp_instrument_normal_builtin (tree fndecl)
static void
chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
{
gimple call = gsi_stmt (*gsi);
gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
unsigned arg_no = 0;
tree fndecl = gimple_call_fndecl (call);
tree fntype;
@ -1646,7 +1643,7 @@ chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
bool use_fntype = false;
tree op;
ssa_op_iter iter;
gimple new_call;
gcall *new_call;
/* Do nothing for internal functions. */
if (gimple_call_internal_p (call))
@ -2085,7 +2082,7 @@ chkp_get_nonpointer_load_bounds (void)
/* Build bounds returned by CALL. */
static tree
chkp_build_returned_bound (gimple call)
chkp_build_returned_bound (gcall *call)
{
gimple_stmt_iterator gsi;
tree bounds;
@ -2191,7 +2188,7 @@ chkp_build_returned_bound (gimple call)
/* Return bounds used as returned by call
which produced SSA name VAL. */
gimple
gcall *
chkp_retbnd_call_by_val (tree val)
{
if (TREE_CODE (val) != SSA_NAME)
@ -2204,7 +2201,7 @@ chkp_retbnd_call_by_val (tree val)
FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
&& gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
return USE_STMT (use_p);
return as_a <gcall *> (USE_STMT (use_p));
return NULL;
}
@ -2599,11 +2596,11 @@ chkp_compute_bounds_for_assignment (tree node, gimple assign)
Return computed bounds. */
static tree
chkp_get_bounds_by_definition (tree node, gimple def_stmt,
gimple_stmt_iterator *iter)
gphi_iterator *iter)
{
tree var, bounds;
enum gimple_code code = gimple_code (def_stmt);
gimple stmt;
gphi *stmt;
if (dump_file && (dump_flags & TDF_DETAILS))
{
@ -2664,7 +2661,7 @@ chkp_get_bounds_by_definition (tree node, gimple def_stmt,
break;
case GIMPLE_CALL:
bounds = chkp_build_returned_bound (def_stmt);
bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
break;
case GIMPLE_PHI:
@ -2679,7 +2676,7 @@ chkp_get_bounds_by_definition (tree node, gimple def_stmt,
var = chkp_get_tmp_var ();
stmt = create_phi_node (var, gimple_bb (def_stmt));
bounds = gimple_phi_result (stmt);
*iter = gsi_for_stmt (stmt);
*iter = gsi_for_phi (stmt);
bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
@ -3389,21 +3386,21 @@ chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
if (!bounds)
{
gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
gimple_stmt_iterator phi_iter;
gphi_iterator phi_iter;
bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
gcc_assert (bounds);
if (gimple_code (def_stmt) == GIMPLE_PHI)
if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
{
unsigned i;
for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
for (i = 0; i < gimple_phi_num_args (def_phi); i++)
{
tree arg = gimple_phi_arg_def (def_stmt, i);
tree arg = gimple_phi_arg_def (def_phi, i);
tree arg_bnd;
gimple phi_bnd;
gphi *phi_bnd;
arg_bnd = chkp_find_bounds (arg, NULL);
@ -3413,10 +3410,10 @@ chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
Previous call to chkp_find_bounds could create
new basic block and therefore change phi statement
phi_iter points to. */
phi_bnd = gsi_stmt (phi_iter);
phi_bnd = phi_iter.phi ();
add_phi_arg (phi_bnd, arg_bnd,
gimple_phi_arg_edge (def_stmt, i),
gimple_phi_arg_edge (def_phi, i),
UNKNOWN_LOCATION);
}
@ -3874,7 +3871,9 @@ chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
|| fndecl == chkp_bndldx_fndecl
|| fndecl == chkp_ret_bnd_fndecl);
new_edge = edge->caller->create_edge (callee, stmt, edge->count,
new_edge = edge->caller->create_edge (callee,
as_a <gcall *> (stmt),
edge->count,
edge->frequency);
new_edge->frequency = compute_call_stmt_bb_frequency
(edge->caller->decl, gimple_bb (stmt));
@ -4036,18 +4035,21 @@ chkp_instrument_function (void)
break;
case GIMPLE_RETURN:
if (gimple_return_retval (s) != NULL_TREE)
{
chkp_process_stmt (&i, gimple_return_retval (s),
gimple_location (s),
integer_zero_node,
NULL_TREE, NULL_TREE, safe);
{
greturn *r = as_a <greturn *> (s);
if (gimple_return_retval (r) != NULL_TREE)
{
chkp_process_stmt (&i, gimple_return_retval (r),
gimple_location (r),
integer_zero_node,
NULL_TREE, NULL_TREE, safe);
/* Additionally we need to add bounds
to return statement. */
chkp_add_bounds_to_ret_stmt (&i);
}
break;
/* Additionally we need to add bounds
to return statement. */
chkp_add_bounds_to_ret_stmt (&i);
}
}
break;
case GIMPLE_CALL:
chkp_add_bounds_to_call_stmt (&i);

View File

@ -46,7 +46,7 @@ extern tree chkp_build_bndstx_call (tree addr, tree ptr, tree bounds);
extern void chkp_find_bound_slots (const_tree type, bitmap res);
extern void chkp_build_bndstx (tree addr, tree ptr, tree bounds,
gimple_stmt_iterator *gsi);
extern gimple chkp_retbnd_call_by_val (tree val);
extern gcall *chkp_retbnd_call_by_val (tree val);
extern bool chkp_function_instrumented_p (tree fndecl);
extern void chkp_function_mark_instrumented (tree fndecl);
extern void chkp_copy_bounds_for_assign (gimple assign,

View File

@ -211,20 +211,20 @@ static bool
init_dont_simulate_again (void)
{
basic_block bb;
gimple_stmt_iterator gsi;
gimple phi;
bool saw_a_complex_op = false;
FOR_EACH_BB_FN (bb, cfun)
{
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
prop_set_simulate_again (phi,
is_complex_reg (gimple_phi_result (phi)));
}
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple stmt;
tree op0, op1;
@ -409,7 +409,7 @@ complex_visit_stmt (gimple stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
/* Evaluate a PHI node against the complex lattice defined above. */
static enum ssa_prop_result
complex_visit_phi (gimple phi)
complex_visit_phi (gphi *phi)
{
complex_lattice_t new_l, old_l;
unsigned int ver;
@ -732,11 +732,11 @@ update_parameter_components (void)
static void
update_phi_components (basic_block bb)
{
gimple_stmt_iterator gsi;
gphi_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
if (is_complex_reg (gimple_phi_result (phi)))
{
@ -865,7 +865,7 @@ expand_complex_move (gimple_stmt_iterator *gsi, tree type)
stmt = gsi_stmt (*gsi);
gcc_assert (gimple_code (stmt) == GIMPLE_RETURN);
gimple_return_set_retval (stmt, lhs);
gimple_return_set_retval (as_a <greturn *> (stmt), lhs);
}
update_stmt (stmt);
@ -960,7 +960,8 @@ expand_complex_libcall (gimple_stmt_iterator *gsi, tree ar, tree ai,
machine_mode mode;
enum built_in_function bcode;
tree fn, type, lhs;
gimple old_stmt, stmt;
gimple old_stmt;
gcall *stmt;
old_stmt = gsi_stmt (*gsi);
lhs = gimple_assign_lhs (old_stmt);
@ -1400,8 +1401,11 @@ expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai,
switch (gimple_code (stmt))
{
case GIMPLE_RETURN:
type = TREE_TYPE (gimple_return_retval (stmt));
gimple_return_set_retval (stmt, fold_convert (type, cc));
{
greturn *return_stmt = as_a <greturn *> (stmt);
type = TREE_TYPE (gimple_return_retval (return_stmt));
gimple_return_set_retval (return_stmt, fold_convert (type, cc));
}
break;
case GIMPLE_ASSIGN:
@ -1411,9 +1415,12 @@ expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai,
break;
case GIMPLE_COND:
gimple_cond_set_code (stmt, EQ_EXPR);
gimple_cond_set_lhs (stmt, cc);
gimple_cond_set_rhs (stmt, boolean_true_node);
{
gcond *cond_stmt = as_a <gcond *> (stmt);
gimple_cond_set_code (cond_stmt, EQ_EXPR);
gimple_cond_set_lhs (cond_stmt, cc);
gimple_cond_set_rhs (cond_stmt, boolean_true_node);
}
break;
default:
@ -1428,7 +1435,7 @@ expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai,
static void
expand_complex_asm (gimple_stmt_iterator *gsi)
{
gimple stmt = gsi_stmt (*gsi);
gasm *stmt = as_a <gasm *> (gsi_stmt (*gsi));
unsigned int i;
for (i = 0; i < gimple_asm_noutputs (stmt); ++i)

View File

@ -4399,7 +4399,8 @@ get_references_in_stmt (gimple stmt, vec<data_ref_loc, va_heap> *references)
clobbers_memory = true;
}
else if (stmt_code == GIMPLE_ASM
&& (gimple_asm_volatile_p (stmt) || gimple_vuse (stmt)))
&& (gimple_asm_volatile_p (as_a <gasm *> (stmt))
|| gimple_vuse (stmt)))
clobbers_memory = true;
if (!gimple_vuse (stmt))

View File

@ -241,7 +241,7 @@ dump_dfa_stats (FILE *file)
fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
SCALE (size), LABEL (size));
size = dfa_stats.num_phis * sizeof (struct gimple_statement_phi);
size = dfa_stats.num_phis * sizeof (struct gphi);
total += size;
fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
SCALE (size), LABEL (size));
@ -290,18 +290,18 @@ collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
/* Walk all the statements in the function counting references. */
FOR_EACH_BB_FN (bb, cfun)
{
gimple_stmt_iterator si;
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
gsi_next (&si))
{
gimple phi = gsi_stmt (si);
gphi *phi = si.phi ();
dfa_stats_p->num_phis++;
dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
}
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
gimple stmt = gsi_stmt (si);
dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);

View File

@ -190,7 +190,7 @@ struct finally_tree_node
tree) leaves the TRY block, its necessary to record a tree in
this field. Thus a treemple is used. */
treemple child;
gimple parent;
gtry *parent;
};
/* Hashtable helpers. */
@ -219,7 +219,7 @@ finally_tree_hasher::equal (const value_type *v, const compare_type *c)
static hash_table<finally_tree_hasher> *finally_tree;
static void
record_in_finally_tree (treemple child, gimple parent)
record_in_finally_tree (treemple child, gtry *parent)
{
struct finally_tree_node *n;
finally_tree_node **slot;
@ -234,13 +234,13 @@ record_in_finally_tree (treemple child, gimple parent)
}
static void
collect_finally_tree (gimple stmt, gimple region);
collect_finally_tree (gimple stmt, gtry *region);
/* Go through the gimple sequence. Works with collect_finally_tree to
record all GIMPLE_LABEL and GIMPLE_TRY statements. */
static void
collect_finally_tree_1 (gimple_seq seq, gimple region)
collect_finally_tree_1 (gimple_seq seq, gtry *region)
{
gimple_stmt_iterator gsi;
@ -249,14 +249,14 @@ collect_finally_tree_1 (gimple_seq seq, gimple region)
}
static void
collect_finally_tree (gimple stmt, gimple region)
collect_finally_tree (gimple stmt, gtry *region)
{
treemple temp;
switch (gimple_code (stmt))
{
case GIMPLE_LABEL:
temp.t = gimple_label_label (stmt);
temp.t = gimple_label_label (as_a <glabel *> (stmt));
record_in_finally_tree (temp, region);
break;
@ -265,7 +265,8 @@ collect_finally_tree (gimple stmt, gimple region)
{
temp.g = stmt;
record_in_finally_tree (temp, region);
collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
collect_finally_tree_1 (gimple_try_eval (stmt),
as_a <gtry *> (stmt));
collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
}
else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
@ -276,7 +277,9 @@ collect_finally_tree (gimple stmt, gimple region)
break;
case GIMPLE_CATCH:
collect_finally_tree_1 (gimple_catch_handler (stmt), region);
collect_finally_tree_1 (gimple_catch_handler (
as_a <gcatch *> (stmt)),
region);
break;
case GIMPLE_EH_FILTER:
@ -284,8 +287,11 @@ collect_finally_tree (gimple stmt, gimple region)
break;
case GIMPLE_EH_ELSE:
collect_finally_tree_1 (gimple_eh_else_n_body (stmt), region);
collect_finally_tree_1 (gimple_eh_else_e_body (stmt), region);
{
geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt), region);
collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt), region);
}
break;
default:
@ -377,8 +383,8 @@ struct leh_tf_state
try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
this so that outside_finally_tree can reliably reference the tree used
in the collect_finally_tree data structures. */
gimple try_finally_expr;
gimple top_p;
gtry *try_finally_expr;
gtry *top_p;
/* While lowering a top_p usually it is expanded into multiple statements,
thus we need the following field to store them. */
@ -418,7 +424,7 @@ struct leh_tf_state
bool may_throw;
};
static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
static gimple_seq lower_eh_must_not_throw (struct leh_state *, gtry *);
/* Search for STMT in the goto queue. Return the replacement,
or null if the statement isn't in the queue. */
@ -532,14 +538,21 @@ replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
break;
case GIMPLE_CATCH:
replace_goto_queue_stmt_list (gimple_catch_handler_ptr (stmt), tf);
replace_goto_queue_stmt_list (gimple_catch_handler_ptr (
as_a <gcatch *> (stmt)),
tf);
break;
case GIMPLE_EH_FILTER:
replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
break;
case GIMPLE_EH_ELSE:
replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (stmt), tf);
replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (stmt), tf);
{
geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt),
tf);
replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt),
tf);
}
break;
default:
@ -671,12 +684,17 @@ maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
switch (gimple_code (stmt))
{
case GIMPLE_COND:
new_stmt.tp = gimple_op_ptr (stmt, 2);
record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt),
EXPR_LOCATION (*new_stmt.tp));
new_stmt.tp = gimple_op_ptr (stmt, 3);
record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt),
EXPR_LOCATION (*new_stmt.tp));
{
gcond *cond_stmt = as_a <gcond *> (stmt);
new_stmt.tp = gimple_op_ptr (cond_stmt, 2);
record_in_goto_queue_label (tf, new_stmt,
gimple_cond_true_label (cond_stmt),
EXPR_LOCATION (*new_stmt.tp));
new_stmt.tp = gimple_op_ptr (cond_stmt, 3);
record_in_goto_queue_label (tf, new_stmt,
gimple_cond_false_label (cond_stmt),
EXPR_LOCATION (*new_stmt.tp));
}
break;
case GIMPLE_GOTO:
new_stmt.g = stmt;
@ -702,7 +720,8 @@ maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
static void
verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
verify_norecord_switch_expr (struct leh_state *state,
gswitch *switch_expr)
{
struct leh_tf_state *tf = state->tf;
size_t i, n;
@ -768,7 +787,7 @@ static void
do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
struct leh_tf_state *tf)
{
gimple x;
ggoto *x;
gcc_assert (q->is_label);
@ -788,7 +807,7 @@ static void
emit_post_landing_pad (gimple_seq *seq, eh_region region)
{
eh_landing_pad lp = region->landing_pads;
gimple x;
glabel *x;
if (lp == NULL)
lp = gen_eh_landing_pad (region);
@ -805,7 +824,7 @@ emit_post_landing_pad (gimple_seq *seq, eh_region region)
static void
emit_resx (gimple_seq *seq, eh_region region)
{
gimple x = gimple_build_resx (region->index);
gresx *x = gimple_build_resx (region->index);
gimple_seq_add_stmt (seq, x);
if (region->outer)
record_stmt_eh_region (region->outer, x);
@ -816,7 +835,7 @@ emit_resx (gimple_seq *seq, eh_region region)
static void
emit_eh_dispatch (gimple_seq *seq, eh_region region)
{
gimple x = gimple_build_eh_dispatch (region->index);
geh_dispatch *x = gimple_build_eh_dispatch (region->index);
gimple_seq_add_stmt (seq, x);
}
@ -861,7 +880,7 @@ eh_region_may_contain_throw (eh_region r)
an existing label that should be put at the exit, or NULL. */
static gimple_seq
frob_into_branch_around (gimple tp, eh_region region, tree over)
frob_into_branch_around (gtry *tp, eh_region region, tree over)
{
gimple x;
gimple_seq cleanup, result;
@ -898,7 +917,7 @@ static gimple_seq
lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
location_t loc)
{
gimple region = NULL;
gtry *region = NULL;
gimple_seq new_seq;
gimple_stmt_iterator gsi;
@ -948,14 +967,14 @@ lower_try_finally_fallthru_label (struct leh_tf_state *tf)
/* A subroutine of lower_try_finally. If FINALLY consits of a
GIMPLE_EH_ELSE node, return it. */
static inline gimple
static inline geh_else *
get_eh_else (gimple_seq finally)
{
gimple x = gimple_seq_first_stmt (finally);
if (gimple_code (x) == GIMPLE_EH_ELSE)
{
gcc_assert (gimple_seq_singleton_p (finally));
return x;
return as_a <geh_else *> (x);
}
return NULL;
}
@ -989,7 +1008,10 @@ honor_protect_cleanup_actions (struct leh_state *outer_state,
gimple_stmt_iterator gsi;
bool finally_may_fallthru;
gimple_seq finally;
gimple x, eh_else;
gimple x;
geh_mnt *eh_mnt;
gtry *try_stmt;
geh_else *eh_else;
/* First check for nothing to do. */
if (lang_hooks.eh_protect_cleanup_actions == NULL)
@ -1031,10 +1053,10 @@ honor_protect_cleanup_actions (struct leh_state *outer_state,
}
/* Wrap the block with protect_cleanup_actions as the action. */
x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
GIMPLE_TRY_CATCH);
finally = lower_eh_must_not_throw (outer_state, x);
eh_mnt = gimple_build_eh_must_not_throw (protect_cleanup_actions);
try_stmt = gimple_build_try (finally, gimple_seq_alloc_with_stmt (eh_mnt),
GIMPLE_TRY_CATCH);
finally = lower_eh_must_not_throw (outer_state, try_stmt);
/* Drop all of this into the exception sequence. */
emit_post_landing_pad (&eh_seq, tf->region);
@ -1057,7 +1079,8 @@ lower_try_finally_nofallthru (struct leh_state *state,
struct leh_tf_state *tf)
{
tree lab;
gimple x, eh_else;
gimple x;
geh_else *eh_else;
gimple_seq finally;
struct goto_queue_node *q, *qe;
@ -1121,6 +1144,8 @@ static void
lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
{
struct goto_queue_node *q, *qe;
geh_else *eh_else;
glabel *label_stmt;
gimple x;
gimple_seq finally;
gimple_stmt_iterator gsi;
@ -1133,13 +1158,13 @@ lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
/* Since there's only one destination, and the destination edge can only
either be EH or non-EH, that implies that all of our incoming edges
are of the same type. Therefore we can lower EH_ELSE immediately. */
x = get_eh_else (finally);
if (x)
eh_else = get_eh_else (finally);
if (eh_else)
{
if (tf->may_throw)
finally = gimple_eh_else_e_body (x);
finally = gimple_eh_else_e_body (eh_else);
else
finally = gimple_eh_else_n_body (x);
finally = gimple_eh_else_n_body (eh_else);
}
lower_eh_constructs_1 (state, &finally);
@ -1174,8 +1199,8 @@ lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
}
finally_label = create_artificial_label (loc);
x = gimple_build_label (finally_label);
gimple_seq_add_stmt (&tf->top_p_seq, x);
label_stmt = gimple_build_label (finally_label);
gimple_seq_add_stmt (&tf->top_p_seq, label_stmt);
gimple_seq_add_seq (&tf->top_p_seq, finally);
@ -1223,7 +1248,8 @@ lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
gimple_seq finally;
gimple_seq new_stmt;
gimple_seq seq;
gimple x, eh_else;
gimple x;
geh_else *eh_else;
tree tmp;
location_t tf_loc = gimple_location (tf->try_finally_expr);
@ -1356,7 +1382,8 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
tree last_case;
vec<tree> case_label_vec;
gimple_seq switch_body = NULL;
gimple x, eh_else;
gimple x;
geh_else *eh_else;
tree tmp;
gimple switch_stmt;
gimple_seq finally;
@ -1568,7 +1595,7 @@ static bool
decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
{
int f_estimate, sw_estimate;
gimple eh_else;
geh_else *eh_else;
/* If there's an EH_ELSE involved, the exception path is separate
and really doesn't come into play for this computation. */
@ -1634,7 +1661,7 @@ cleanup_is_dead_in (eh_region reg)
arrange for the FINALLY block to be executed on all exits. */
static gimple_seq
lower_try_finally (struct leh_state *state, gimple tp)
lower_try_finally (struct leh_state *state, gtry *tp)
{
struct leh_tf_state this_tf;
struct leh_state this_state;
@ -1741,7 +1768,7 @@ lower_try_finally (struct leh_state *state, gimple tp)
exception region trees that records all the magic. */
static gimple_seq
lower_catch (struct leh_state *state, gimple tp)
lower_catch (struct leh_state *state, gtry *tp)
{
eh_region try_region = NULL;
struct leh_state this_state = *state;
@ -1776,13 +1803,13 @@ lower_catch (struct leh_state *state, gimple tp)
gsi_next (&gsi))
{
eh_catch c;
gimple gcatch;
gcatch *catch_stmt;
gimple_seq handler;
gcatch = gsi_stmt (gsi);
c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
catch_stmt = as_a <gcatch *> (gsi_stmt (gsi));
c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt));
handler = gimple_catch_handler (gcatch);
handler = gimple_catch_handler (catch_stmt);
lower_eh_constructs_1 (&this_state, &handler);
c->label = create_artificial_label (UNKNOWN_LOCATION);
@ -1813,7 +1840,7 @@ lower_catch (struct leh_state *state, gimple tp)
region trees that record all the magic. */
static gimple_seq
lower_eh_filter (struct leh_state *state, gimple tp)
lower_eh_filter (struct leh_state *state, gtry *tp)
{
struct leh_state this_state = *state;
eh_region this_region = NULL;
@ -1858,7 +1885,7 @@ lower_eh_filter (struct leh_state *state, gimple tp)
plus the exception region trees that record all the magic. */
static gimple_seq
lower_eh_must_not_throw (struct leh_state *state, gimple tp)
lower_eh_must_not_throw (struct leh_state *state, gtry *tp)
{
struct leh_state this_state = *state;
@ -1869,7 +1896,8 @@ lower_eh_must_not_throw (struct leh_state *state, gimple tp)
this_region = gen_eh_region_must_not_throw (state->cur_region);
this_region->u.must_not_throw.failure_decl
= gimple_eh_must_not_throw_fndecl (inner);
= gimple_eh_must_not_throw_fndecl (
as_a <geh_mnt *> (inner));
this_region->u.must_not_throw.failure_loc
= LOCATION_LOCUS (gimple_location (tp));
@ -1890,7 +1918,7 @@ lower_eh_must_not_throw (struct leh_state *state, gimple tp)
except that we only execute the cleanup block for exception edges. */
static gimple_seq
lower_cleanup (struct leh_state *state, gimple tp)
lower_cleanup (struct leh_state *state, gtry *tp)
{
struct leh_state this_state = *state;
eh_region this_region = NULL;
@ -2041,40 +2069,43 @@ lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
break;
case GIMPLE_SWITCH:
verify_norecord_switch_expr (state, stmt);
verify_norecord_switch_expr (state, as_a <gswitch *> (stmt));
break;
case GIMPLE_TRY:
if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
replace = lower_try_finally (state, stmt);
else
{
x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
if (!x)
{
replace = gimple_try_eval (stmt);
lower_eh_constructs_1 (state, &replace);
}
else
switch (gimple_code (x))
{
gtry *try_stmt = as_a <gtry *> (stmt);
if (gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
replace = lower_try_finally (state, try_stmt);
else
{
x = gimple_seq_first_stmt (gimple_try_cleanup (try_stmt));
if (!x)
{
case GIMPLE_CATCH:
replace = lower_catch (state, stmt);
break;
case GIMPLE_EH_FILTER:
replace = lower_eh_filter (state, stmt);
break;
case GIMPLE_EH_MUST_NOT_THROW:
replace = lower_eh_must_not_throw (state, stmt);
break;
case GIMPLE_EH_ELSE:
/* This code is only valid with GIMPLE_TRY_FINALLY. */
gcc_unreachable ();
default:
replace = lower_cleanup (state, stmt);
break;
replace = gimple_try_eval (try_stmt);
lower_eh_constructs_1 (state, &replace);
}
}
else
switch (gimple_code (x))
{
case GIMPLE_CATCH:
replace = lower_catch (state, try_stmt);
break;
case GIMPLE_EH_FILTER:
replace = lower_eh_filter (state, try_stmt);
break;
case GIMPLE_EH_MUST_NOT_THROW:
replace = lower_eh_must_not_throw (state, try_stmt);
break;
case GIMPLE_EH_ELSE:
/* This code is only valid with GIMPLE_TRY_FINALLY. */
gcc_unreachable ();
default:
replace = lower_cleanup (state, try_stmt);
break;
}
}
}
/* Remove the old stmt and insert the transformed sequence
instead. */
@ -2190,7 +2221,7 @@ make_pass_lower_eh (gcc::context *ctxt)
no fallthru edge; false if there is. */
bool
make_eh_dispatch_edges (gimple stmt)
make_eh_dispatch_edges (geh_dispatch *stmt)
{
eh_region r;
eh_catch c;
@ -2348,7 +2379,7 @@ redirect_eh_edge (edge edge_in, basic_block new_bb)
The actual edge update will happen in the caller. */
void
redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
redirect_eh_dispatch_edge (geh_dispatch *stmt, edge e, basic_block new_bb)
{
tree new_lab = gimple_block_label (new_bb);
bool any_changed = false;
@ -2763,7 +2794,7 @@ stmt_could_throw_p (gimple stmt)
return true;
case GIMPLE_CALL:
return !gimple_call_nothrow_p (stmt);
return !gimple_call_nothrow_p (as_a <gcall *> (stmt));
case GIMPLE_ASSIGN:
case GIMPLE_COND:
@ -2774,7 +2805,7 @@ stmt_could_throw_p (gimple stmt)
case GIMPLE_ASM:
if (!cfun->can_throw_non_call_exceptions)
return false;
return gimple_asm_volatile_p (stmt);
return gimple_asm_volatile_p (as_a <gasm *> (stmt));
default:
return false;
@ -3000,7 +3031,7 @@ same_handler_p (gimple_seq oneh, gimple_seq twoh)
temporary used in the initializer for A. */
static void
optimize_double_finally (gimple one, gimple two)
optimize_double_finally (gtry *one, gtry *two)
{
gimple oneh;
gimple_stmt_iterator gsi;
@ -3047,13 +3078,12 @@ refactor_eh_r (gimple_seq seq)
two = NULL;
else
two = gsi_stmt (gsi);
if (one
&& two
&& gimple_code (one) == GIMPLE_TRY
&& gimple_code (two) == GIMPLE_TRY
&& gimple_try_kind (one) == GIMPLE_TRY_FINALLY
&& gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
optimize_double_finally (one, two);
if (one && two)
if (gtry *try_one = dyn_cast <gtry *> (one))
if (gtry *try_two = dyn_cast <gtry *> (two))
if (gimple_try_kind (try_one) == GIMPLE_TRY_FINALLY
&& gimple_try_kind (try_two) == GIMPLE_TRY_FINALLY)
optimize_double_finally (try_one, try_two);
if (one)
switch (gimple_code (one))
{
@ -3062,14 +3092,17 @@ refactor_eh_r (gimple_seq seq)
refactor_eh_r (gimple_try_cleanup (one));
break;
case GIMPLE_CATCH:
refactor_eh_r (gimple_catch_handler (one));
refactor_eh_r (gimple_catch_handler (as_a <gcatch *> (one)));
break;
case GIMPLE_EH_FILTER:
refactor_eh_r (gimple_eh_filter_failure (one));
break;
case GIMPLE_EH_ELSE:
refactor_eh_r (gimple_eh_else_n_body (one));
refactor_eh_r (gimple_eh_else_e_body (one));
{
geh_else *eh_else_stmt = as_a <geh_else *> (one);
refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt));
refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt));
}
break;
default:
break;
@ -3124,7 +3157,8 @@ make_pass_refactor_eh (gcc::context *ctxt)
/* At the end of gimple optimization, we can lower RESX. */
static bool
lower_resx (basic_block bb, gimple stmt, hash_map<eh_region, tree> *mnt_map)
lower_resx (basic_block bb, gresx *stmt,
hash_map<eh_region, tree> *mnt_map)
{
int lp_nr;
eh_region src_r, dst_r;
@ -3312,7 +3346,8 @@ pass_lower_resx::execute (function *fun)
gimple last = last_stmt (bb);
if (last && is_gimple_resx (last))
{
dominance_invalidated |= lower_resx (bb, last, &mnt_map);
dominance_invalidated |=
lower_resx (bb, as_a <gresx *> (last), &mnt_map);
any_rewritten = true;
}
}
@ -3436,14 +3471,15 @@ sink_clobbers (basic_block bb)
/* See if there is a virtual PHI node to take an updated virtual
operand from. */
gimple vphi = NULL;
gphi *vphi = NULL;
tree vuse = NULL_TREE;
for (gsi = gsi_start_phis (succbb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gpi = gsi_start_phis (succbb);
!gsi_end_p (gpi); gsi_next (&gpi))
{
tree res = gimple_phi_result (gsi_stmt (gsi));
tree res = gimple_phi_result (gpi.phi ());
if (virtual_operand_p (res))
{
vphi = gsi_stmt (gsi);
vphi = gpi.phi ();
vuse = res;
break;
}
@ -3521,7 +3557,7 @@ sink_clobbers (basic_block bb)
we have found some duplicate labels and removed some edges. */
static bool
lower_eh_dispatch (basic_block src, gimple stmt)
lower_eh_dispatch (basic_block src, geh_dispatch *stmt)
{
gimple_stmt_iterator gsi;
int region_nr;
@ -3709,7 +3745,8 @@ pass_lower_eh_dispatch::execute (function *fun)
continue;
if (gimple_code (last) == GIMPLE_EH_DISPATCH)
{
redirected |= lower_eh_dispatch (bb, last);
redirected |= lower_eh_dispatch (bb,
as_a <geh_dispatch *> (last));
flags |= TODO_update_ssa_only_virtuals;
}
else if (gimple_code (last) == GIMPLE_RESX)
@ -3797,10 +3834,13 @@ mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep)
switch (gimple_code (stmt))
{
case GIMPLE_RESX:
bitmap_set_bit (r_reachable, gimple_resx_region (stmt));
bitmap_set_bit (r_reachable,
gimple_resx_region (as_a <gresx *> (stmt)));
break;
case GIMPLE_EH_DISPATCH:
bitmap_set_bit (r_reachable, gimple_eh_dispatch_region (stmt));
bitmap_set_bit (r_reachable,
gimple_eh_dispatch_region (
as_a <geh_dispatch *> (stmt)));
break;
default:
break;
@ -3971,13 +4011,13 @@ unsplit_eh (eh_landing_pad lp)
for a different region. */
for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
tree lab;
int lp_nr;
if (gimple_code (stmt) != GIMPLE_LABEL)
if (!label_stmt)
break;
lab = gimple_label_label (stmt);
lab = gimple_label_label (label_stmt);
lp_nr = EH_LANDING_PAD_NR (lab);
if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
return false;
@ -3994,9 +4034,10 @@ unsplit_eh (eh_landing_pad lp)
that doesn't appear to handle virtuals. Propagate by hand. */
if (!gimple_seq_empty_p (phi_nodes (bb)))
{
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); )
{
gimple use_stmt, phi = gsi_stmt (gsi);
gimple use_stmt;
gphi *phi = gpi.phi ();
tree lhs = gimple_phi_result (phi);
tree rhs = gimple_phi_arg_def (phi, 0);
use_operand_p use_p;
@ -4011,7 +4052,7 @@ unsplit_eh (eh_landing_pad lp)
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
remove_phi_node (&gsi, true);
remove_phi_node (&gpi, true);
}
}
@ -4060,7 +4101,7 @@ static bool
cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
edge old_bb_out, bool change_region)
{
gimple_stmt_iterator ngsi, ogsi;
gphi_iterator ngsi, ogsi;
edge_iterator ei;
edge e;
bitmap ophi_handled;
@ -4090,7 +4131,7 @@ cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
for the edges we're going to move. */
for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
{
gimple ophi, nphi = gsi_stmt (ngsi);
gphi *ophi, *nphi = ngsi.phi ();
tree nresult, nop;
nresult = gimple_phi_result (nphi);
@ -4101,7 +4142,7 @@ cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
ophi = NULL;
for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
{
ophi = gsi_stmt (ogsi);
ophi = ogsi.phi ();
if (gimple_phi_result (ophi) == nop)
break;
ophi = NULL;
@ -4154,7 +4195,7 @@ cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
we don't know what values from the other edges into NEW_BB to use. */
for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
{
gimple ophi = gsi_stmt (ogsi);
gphi *ophi = ogsi.phi ();
tree oresult = gimple_phi_result (ophi);
if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
goto fail;
@ -4242,10 +4283,10 @@ cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
lab = NULL;
for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
int lp_nr;
if (gimple_code (stmt) != GIMPLE_LABEL)
if (!stmt)
break;
lab = gimple_label_label (stmt);
lp_nr = EH_LANDING_PAD_NR (lab);
@ -4642,7 +4683,7 @@ verify_eh_edges (gimple stmt)
/* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
DEBUG_FUNCTION bool
verify_eh_dispatch_edge (gimple stmt)
verify_eh_dispatch_edge (geh_dispatch *stmt)
{
eh_region r;
eh_catch c;

View File

@ -30,10 +30,10 @@ extern bool remove_stmt_from_eh_lp_fn (struct function *, gimple);
extern bool remove_stmt_from_eh_lp (gimple);
extern int lookup_stmt_eh_lp_fn (struct function *, gimple);
extern int lookup_stmt_eh_lp (gimple);
extern bool make_eh_dispatch_edges (gimple);
extern bool make_eh_dispatch_edges (geh_dispatch *);
extern void make_eh_edges (gimple);
extern edge redirect_eh_edge (edge, basic_block);
extern void redirect_eh_dispatch_edge (gimple, edge, basic_block);
extern void redirect_eh_dispatch_edge (geh_dispatch *, edge, basic_block);
extern bool operation_could_trap_helper_p (enum tree_code, bool, bool, bool,
bool, tree, bool *);
extern bool operation_could_trap_p (enum tree_code, bool, bool, tree);
@ -51,6 +51,6 @@ extern bool maybe_duplicate_eh_stmt_fn (struct function *, gimple,
extern bool maybe_duplicate_eh_stmt (gimple, gimple);
extern void maybe_remove_unreachable_handlers (void);
extern bool verify_eh_edges (gimple);
extern bool verify_eh_dispatch_edge (gimple);
extern bool verify_eh_dispatch_edge (geh_dispatch *);
#endif /* GCC_TREE_EH_H */

View File

@ -419,7 +419,7 @@ gen_emutls_addr (tree decl, struct lower_emutls_data *d)
{
varpool_node *cvar;
tree cdecl;
gimple x;
gcall *x;
cvar = data->control_var;
cdecl = cvar->decl;
@ -569,7 +569,8 @@ lower_emutls_stmt (gimple stmt, struct lower_emutls_data *d)
/* Lower the I'th operand of PHI. */
static void
lower_emutls_phi_arg (gimple phi, unsigned int i, struct lower_emutls_data *d)
lower_emutls_phi_arg (gphi *phi, unsigned int i,
struct lower_emutls_data *d)
{
struct walk_stmt_info wi;
struct phi_arg_d *pd = gimple_phi_arg (phi, i);
@ -630,7 +631,6 @@ lower_emutls_function_body (struct cgraph_node *node)
FOR_EACH_BB_FN (d.bb, cfun)
{
gimple_stmt_iterator gsi;
unsigned int i, nedge;
/* Lower each of the PHI nodes of the block, as we may have
@ -652,10 +652,10 @@ lower_emutls_function_body (struct cgraph_node *node)
clear_access_vars ();
d.seq = NULL;
for (gsi = gsi_start_phis (d.bb);
for (gphi_iterator gsi = gsi_start_phis (d.bb);
!gsi_end_p (gsi);
gsi_next (&gsi))
lower_emutls_phi_arg (gsi_stmt (gsi), i, &d);
lower_emutls_phi_arg (gsi.phi (), i, &d);
/* Insert all statements generated by all phi nodes for this
particular edge all at once. */
@ -673,7 +673,8 @@ lower_emutls_function_body (struct cgraph_node *node)
clear_access_vars ();
/* Lower each of the statements of the block. */
for (gsi = gsi_start_bb (d.bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gimple_stmt_iterator gsi = gsi_start_bb (d.bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
d.seq = NULL;
lower_emutls_stmt (gsi_stmt (gsi), &d);

View File

@ -515,7 +515,7 @@ bb_with_exit_edge_p (struct loop *loop, basic_block bb)
- there is a virtual PHI in a BB other than the loop->header. */
static bool
if_convertible_phi_p (struct loop *loop, basic_block bb, gimple phi,
if_convertible_phi_p (struct loop *loop, basic_block bb, gphi *phi,
bool any_mask_load_store)
{
if (dump_file && (dump_flags & TDF_DETAILS))
@ -1282,10 +1282,10 @@ if_convertible_loop_p_1 (struct loop *loop,
for (i = 0; i < loop->num_nodes; i++)
{
basic_block bb = ifc_bbs[i];
gimple_stmt_iterator itr;
gphi_iterator itr;
for (itr = gsi_start_phis (bb); !gsi_end_p (itr); gsi_next (&itr))
if (!if_convertible_phi_p (loop, bb, gsi_stmt (itr),
if (!if_convertible_phi_p (loop, bb, itr.phi (),
*any_mask_load_store))
return false;
}
@ -1592,7 +1592,7 @@ convert_scalar_cond_reduction (gimple reduc, gimple_stmt_iterator *gsi,
TRUE_BB is selected. */
static void
predicate_scalar_phi (gimple phi, tree cond,
predicate_scalar_phi (gphi *phi, tree cond,
basic_block true_bb,
gimple_stmt_iterator *gsi)
{
@ -1667,9 +1667,10 @@ predicate_all_scalar_phis (struct loop *loop)
for (i = 1; i < orig_loop_num_nodes; i++)
{
gimple phi;
gphi *phi;
tree cond = NULL_TREE;
gimple_stmt_iterator gsi, phi_gsi;
gimple_stmt_iterator gsi;
gphi_iterator phi_gsi;
basic_block true_bb = NULL;
bb = ifc_bbs[i];
@ -1687,7 +1688,7 @@ predicate_all_scalar_phis (struct loop *loop)
while (!gsi_end_p (phi_gsi))
{
phi = gsi_stmt (phi_gsi);
phi = phi_gsi.phi ();
predicate_scalar_phi (phi, cond, true_bb, &gsi);
release_phi_node (phi);
gsi_next (&phi_gsi);

View File

@ -808,7 +808,7 @@ remap_gimple_seq (gimple_seq body, copy_body_data *id)
block using the mapping information in ID. */
static gimple
copy_gimple_bind (gimple stmt, copy_body_data *id)
copy_gimple_bind (gbind *stmt, copy_body_data *id)
{
gimple new_bind;
tree new_block, new_vars;
@ -1319,7 +1319,7 @@ remap_gimple_stmt (gimple stmt, copy_body_data *id)
statement. */
if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
{
tree retval = gimple_return_retval (stmt);
tree retval = gimple_return_retval (as_a <greturn *> (stmt));
tree retbnd = gimple_return_retbnd (stmt);
tree bndslot = id->retbnd;
@ -1371,12 +1371,15 @@ remap_gimple_stmt (gimple stmt, copy_body_data *id)
switch (gimple_code (stmt))
{
case GIMPLE_BIND:
copy = copy_gimple_bind (stmt, id);
copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
break;
case GIMPLE_CATCH:
s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
copy = gimple_build_catch (gimple_catch_types (stmt), s1);
{
gcatch *catch_stmt = as_a <gcatch *> (stmt);
s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
}
break;
case GIMPLE_EH_FILTER:
@ -1396,12 +1399,15 @@ remap_gimple_stmt (gimple stmt, copy_body_data *id)
break;
case GIMPLE_OMP_PARALLEL:
s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
copy = gimple_build_omp_parallel
(s1,
gimple_omp_parallel_clauses (stmt),
gimple_omp_parallel_child_fn (stmt),
gimple_omp_parallel_data_arg (stmt));
{
gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
copy = gimple_build_omp_parallel
(s1,
gimple_omp_parallel_clauses (omp_par_stmt),
gimple_omp_parallel_child_fn (omp_par_stmt),
gimple_omp_parallel_data_arg (omp_par_stmt));
}
break;
case GIMPLE_OMP_TASK:
@ -1487,14 +1493,25 @@ remap_gimple_stmt (gimple stmt, copy_body_data *id)
case GIMPLE_OMP_CRITICAL:
s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
copy
= gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
copy = gimple_build_omp_critical (s1,
gimple_omp_critical_name (
as_a <gomp_critical *> (stmt)));
break;
case GIMPLE_TRANSACTION:
s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
{
gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
gtransaction *new_trans_stmt;
s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
id);
copy = new_trans_stmt
= gimple_build_transaction (
s1,
gimple_transaction_label (old_trans_stmt));
gimple_transaction_set_subcode (
new_trans_stmt,
gimple_transaction_subcode (old_trans_stmt));
}
break;
default:
@ -1546,18 +1563,20 @@ remap_gimple_stmt (gimple stmt, copy_body_data *id)
if (gimple_debug_bind_p (stmt))
{
copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
gimple_debug_bind_get_value (stmt),
stmt);
gdebug *copy
= gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
gimple_debug_bind_get_value (stmt),
stmt);
id->debug_stmts.safe_push (copy);
gimple_seq_add_stmt (&stmts, copy);
return stmts;
}
if (gimple_debug_source_bind_p (stmt))
{
copy = gimple_build_debug_source_bind
(gimple_debug_source_bind_get_var (stmt),
gimple_debug_source_bind_get_value (stmt), stmt);
gdebug *copy = gimple_build_debug_source_bind
(gimple_debug_source_bind_get_var (stmt),
gimple_debug_source_bind_get_value (stmt),
stmt);
id->debug_stmts.safe_push (copy);
gimple_seq_add_stmt (&stmts, copy);
return stmts;
@ -1567,9 +1586,9 @@ remap_gimple_stmt (gimple stmt, copy_body_data *id)
copy = gimple_copy (stmt);
/* Clear flags that need revisiting. */
if (is_gimple_call (copy)
&& gimple_call_tail_p (copy))
gimple_call_set_tail (copy, false);
if (gcall *call_stmt = dyn_cast <gcall *> (copy))
if (gimple_call_tail_p (call_stmt))
gimple_call_set_tail (call_stmt, false);
/* Remap the region numbers for __builtin_eh_{pointer,filter},
RESX and EH_DISPATCH. */
@ -1603,23 +1622,25 @@ remap_gimple_stmt (gimple stmt, copy_body_data *id)
keep it valid over inlining by setting DECL_PT_UID. */
if (!id->src_cfun->gimple_df
|| !id->src_cfun->gimple_df->ipa_pta)
gimple_call_reset_alias_info (copy);
gimple_call_reset_alias_info (as_a <gcall *> (copy));
}
break;
case GIMPLE_RESX:
{
int r = gimple_resx_region (copy);
gresx *resx_stmt = as_a <gresx *> (copy);
int r = gimple_resx_region (resx_stmt);
r = remap_eh_region_nr (r, id);
gimple_resx_set_region (copy, r);
gimple_resx_set_region (resx_stmt, r);
}
break;
case GIMPLE_EH_DISPATCH:
{
int r = gimple_eh_dispatch_region (copy);
geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
int r = gimple_eh_dispatch_region (eh_dispatch);
r = remap_eh_region_nr (r, id);
gimple_eh_dispatch_set_region (copy, r);
gimple_eh_dispatch_set_region (eh_dispatch, r);
}
break;
@ -1769,18 +1790,20 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
do
{
tree fn;
gcall *call_stmt;
stmt = gsi_stmt (copy_gsi);
if (is_gimple_call (stmt)
&& gimple_call_va_arg_pack_p (stmt)
&& id->gimple_call)
call_stmt = dyn_cast <gcall *> (stmt);
if (call_stmt
&& gimple_call_va_arg_pack_p (call_stmt)
&& id->call_stmt)
{
/* __builtin_va_arg_pack () should be replaced by
all arguments corresponding to ... in the caller. */
tree p;
gimple new_call;
gcall *new_call;
vec<tree> argarray;
size_t nargs = gimple_call_num_args (id->gimple_call);
size_t nargs = gimple_call_num_args (id->call_stmt);
size_t n, i, nargs_to_copy;
bool remove_bounds = false;
@ -1791,73 +1814,73 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
we handle not instrumented call in instrumented
function. */
nargs_to_copy = nargs;
if (gimple_call_with_bounds_p (id->gimple_call)
if (gimple_call_with_bounds_p (id->call_stmt)
&& !gimple_call_with_bounds_p (stmt))
{
for (i = gimple_call_num_args (id->gimple_call) - nargs;
i < gimple_call_num_args (id->gimple_call);
for (i = gimple_call_num_args (id->call_stmt) - nargs;
i < gimple_call_num_args (id->call_stmt);
i++)
if (POINTER_BOUNDS_P (gimple_call_arg (id->gimple_call, i)))
if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
nargs_to_copy--;
remove_bounds = true;
}
/* Create the new array of arguments. */
n = nargs_to_copy + gimple_call_num_args (stmt);
n = nargs_to_copy + gimple_call_num_args (call_stmt);
argarray.create (n);
argarray.safe_grow_cleared (n);
/* Copy all the arguments before '...' */
memcpy (argarray.address (),
gimple_call_arg_ptr (stmt, 0),
gimple_call_num_args (stmt) * sizeof (tree));
gimple_call_arg_ptr (call_stmt, 0),
gimple_call_num_args (call_stmt) * sizeof (tree));
if (remove_bounds)
{
/* Append the rest of arguments removing bounds. */
unsigned cur = gimple_call_num_args (stmt);
i = gimple_call_num_args (id->gimple_call) - nargs;
for (i = gimple_call_num_args (id->gimple_call) - nargs;
i < gimple_call_num_args (id->gimple_call);
unsigned cur = gimple_call_num_args (call_stmt);
i = gimple_call_num_args (id->call_stmt) - nargs;
for (i = gimple_call_num_args (id->call_stmt) - nargs;
i < gimple_call_num_args (id->call_stmt);
i++)
if (!POINTER_BOUNDS_P (gimple_call_arg (id->gimple_call, i)))
argarray[cur++] = gimple_call_arg (id->gimple_call, i);
if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
argarray[cur++] = gimple_call_arg (id->call_stmt, i);
gcc_assert (cur == n);
}
else
{
/* Append the arguments passed in '...' */
memcpy (argarray.address () + gimple_call_num_args (stmt),
gimple_call_arg_ptr (id->gimple_call, 0)
+ (gimple_call_num_args (id->gimple_call) - nargs),
memcpy (argarray.address () + gimple_call_num_args (call_stmt),
gimple_call_arg_ptr (id->call_stmt, 0)
+ (gimple_call_num_args (id->call_stmt) - nargs),
nargs * sizeof (tree));
}
new_call = gimple_build_call_vec (gimple_call_fn (stmt),
new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
argarray);
argarray.release ();
/* Copy all GIMPLE_CALL flags, location and block, except
GF_CALL_VA_ARG_PACK. */
gimple_call_copy_flags (new_call, stmt);
gimple_call_copy_flags (new_call, call_stmt);
gimple_call_set_va_arg_pack (new_call, false);
gimple_set_location (new_call, gimple_location (stmt));
gimple_set_block (new_call, gimple_block (stmt));
gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
gsi_replace (&copy_gsi, new_call, false);
stmt = new_call;
}
else if (is_gimple_call (stmt)
&& id->gimple_call
&& id->call_stmt
&& (decl = gimple_call_fndecl (stmt))
&& DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
{
/* __builtin_va_arg_pack_len () should be replaced by
the number of anonymous arguments. */
size_t nargs = gimple_call_num_args (id->gimple_call), i;
size_t nargs = gimple_call_num_args (id->call_stmt), i;
tree count, p;
gimple new_stmt;
@ -1865,10 +1888,10 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
nargs--;
/* For instrumented calls we should ignore bounds. */
for (i = gimple_call_num_args (id->gimple_call) - nargs;
i < gimple_call_num_args (id->gimple_call);
for (i = gimple_call_num_args (id->call_stmt) - nargs;
i < gimple_call_num_args (id->call_stmt);
i++)
if (POINTER_BOUNDS_P (gimple_call_arg (id->gimple_call, i)))
if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
nargs--;
count = build_int_cst (integer_type_node, nargs);
@ -1893,7 +1916,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
/* We're duplicating a CALL_EXPR. Find any corresponding
callgraph edges and update or duplicate them. */
if (is_gimple_call (stmt))
if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
struct cgraph_edge *edge;
@ -1906,7 +1929,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
int edge_freq = edge->frequency;
int new_freq;
struct cgraph_edge *old_edge = edge;
edge = edge->clone (id->dst_node, stmt,
edge = edge->clone (id->dst_node, call_stmt,
gimple_uid (stmt),
REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
true);
@ -1925,7 +1948,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
gcc_assert (!edge->indirect_unknown_callee);
old_edge->speculative_call_info (direct, indirect, ref);
indirect = indirect->clone (id->dst_node, stmt,
indirect = indirect->clone (id->dst_node, call_stmt,
gimple_uid (stmt),
REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
true);
@ -1964,14 +1987,14 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
case CB_CGE_MOVE_CLONES:
id->dst_node->set_call_stmt_including_clones (orig_stmt,
stmt);
call_stmt);
edge = id->dst_node->get_edge (stmt);
break;
case CB_CGE_MOVE:
edge = id->dst_node->get_edge (orig_stmt);
if (edge)
edge->set_call_stmt (stmt);
edge->set_call_stmt (call_stmt);
break;
default:
@ -2000,12 +2023,12 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
|| !id->dst_node->definition);
if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
id->dst_node->create_edge_including_clones
(dest, orig_stmt, stmt, bb->count,
(dest, orig_stmt, call_stmt, bb->count,
compute_call_stmt_bb_frequency (id->dst_node->decl,
copy_basic_block),
CIF_ORIGINALLY_INDIRECT_CALL);
else
id->dst_node->create_edge (dest, stmt,
id->dst_node->create_edge (dest, call_stmt,
bb->count,
compute_call_stmt_bb_frequency
(id->dst_node->decl,
@ -2018,7 +2041,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
}
}
notice_special_calls (stmt);
notice_special_calls (as_a <gcall *> (stmt));
}
maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
@ -2073,8 +2096,8 @@ update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
if (!e->dest->aux
|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
{
gimple phi;
gimple_stmt_iterator si;
gphi *phi;
gphi_iterator si;
if (!nonlocal_goto)
gcc_assert (e->flags & EDGE_EH);
@ -2086,7 +2109,7 @@ update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
{
edge re;
phi = gsi_stmt (si);
phi = si.phi ();
/* For abnormal goto/call edges the receiver can be the
ENTRY_BLOCK. Do not assert this cannot happen. */
@ -2194,7 +2217,7 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
}
if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
make_eh_dispatch_edges (copy_stmt);
make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
else if (can_throw)
make_eh_edges (copy_stmt);
@ -2240,17 +2263,17 @@ copy_phis_for_bb (basic_block bb, copy_body_data *id)
{
basic_block const new_bb = (basic_block) bb->aux;
edge_iterator ei;
gimple phi;
gimple_stmt_iterator si;
gphi *phi;
gphi_iterator si;
edge new_edge;
bool inserted = false;
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
{
tree res, new_res;
gimple new_phi;
gphi *new_phi;
phi = gsi_stmt (si);
phi = si.phi ();
res = PHI_RESULT (phi);
new_res = res;
if (!virtual_operand_p (res))
@ -2421,7 +2444,8 @@ maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
while (is_gimple_debug (gsi_stmt (ssi)))
{
gimple stmt = gsi_stmt (ssi), new_stmt;
gimple stmt = gsi_stmt (ssi);
gdebug *new_stmt;
tree var;
tree value;
@ -2658,12 +2682,12 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
/* Now that we've duplicated the blocks, duplicate their edges. */
basic_block abnormal_goto_dest = NULL;
if (id->gimple_call
&& stmt_can_make_abnormal_goto (id->gimple_call))
if (id->call_stmt
&& stmt_can_make_abnormal_goto (id->call_stmt))
{
gimple_stmt_iterator gsi = gsi_for_stmt (id->gimple_call);
gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
bb = gimple_bb (id->gimple_call);
bb = gimple_bb (id->call_stmt);
gsi_next (&gsi);
if (gsi_end_p (gsi))
abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
@ -2749,7 +2773,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
this arises, we drop the VALUE expression altogether. */
static void
copy_debug_stmt (gimple stmt, copy_body_data *id)
copy_debug_stmt (gdebug *stmt, copy_body_data *id)
{
tree t, *n;
struct walk_stmt_info wi;
@ -2808,7 +2832,7 @@ copy_debug_stmt (gimple stmt, copy_body_data *id)
t = gimple_debug_source_bind_get_value (stmt);
if (t != NULL_TREE
&& TREE_CODE (t) == PARM_DECL
&& id->gimple_call)
&& id->call_stmt)
{
vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
unsigned int i;
@ -2841,7 +2865,7 @@ static void
copy_debug_stmts (copy_body_data *id)
{
size_t i;
gimple stmt;
gdebug *stmt;
if (!id->debug_stmts.exists ())
return;
@ -3512,7 +3536,7 @@ inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
VLA objects as those can't cause unbounded growth (they're always
wrapped inside stack_save/stack_restore regions. */
if (gimple_alloca_call_p (stmt)
&& !gimple_call_alloca_for_var_p (stmt)
&& !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
&& !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
{
inline_forbidden_reason
@ -3974,15 +3998,18 @@ estimate_num_insns (gimple stmt, eni_weights *weights)
break;
case GIMPLE_SWITCH:
/* Take into account cost of the switch + guess 2 conditional jumps for
each case label.
{
gswitch *switch_stmt = as_a <gswitch *> (stmt);
/* Take into account cost of the switch + guess 2 conditional jumps for
each case label.
TODO: once the switch expansion logic is sufficiently separated, we can
do better job on estimating cost of the switch. */
if (weights->time_based)
cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
else
cost = gimple_switch_num_labels (stmt) * 2;
TODO: once the switch expansion logic is sufficiently separated, we can
do better job on estimating cost of the switch. */
if (weights->time_based)
cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
else
cost = gimple_switch_num_labels (switch_stmt) * 2;
}
break;
case GIMPLE_CALL:
@ -4056,7 +4083,7 @@ estimate_num_insns (gimple stmt, eni_weights *weights)
case GIMPLE_ASM:
{
int count = asm_str_count (gimple_asm_string (stmt));
int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
/* 1000 means infinity. This avoids overflows later
with very long asm statements. */
if (count > 1000)
@ -4076,13 +4103,17 @@ estimate_num_insns (gimple stmt, eni_weights *weights)
return 10;
case GIMPLE_BIND:
return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
return estimate_num_insns_seq (
gimple_bind_body (as_a <gbind *> (stmt)),
weights);
case GIMPLE_EH_FILTER:
return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
case GIMPLE_CATCH:
return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
return estimate_num_insns_seq (gimple_catch_handler (
as_a <gcatch *> (stmt)),
weights);
case GIMPLE_TRY:
return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
@ -4121,7 +4152,8 @@ estimate_num_insns (gimple stmt, eni_weights *weights)
case GIMPLE_TRANSACTION:
return (weights->tm_cost
+ estimate_num_insns_seq (gimple_transaction_body (stmt),
+ estimate_num_insns_seq (gimple_transaction_body (
as_a <gtransaction *> (stmt)),
weights));
default:
@ -4258,6 +4290,7 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
gimple_stmt_iterator gsi, stmt_gsi;
bool successfully_inlined = FALSE;
bool purge_dead_abnormal_edges;
gcall *call_stmt;
unsigned int i;
/* Set input_location here so we get the right instantiation context
@ -4267,7 +4300,8 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
input_location = gimple_location (stmt);
/* From here on, we're only interested in CALL_EXPRs. */
if (gimple_code (stmt) != GIMPLE_CALL)
call_stmt = dyn_cast <gcall *> (stmt);
if (!call_stmt)
goto egress;
cg_edge = id->dst_node->get_edge (stmt);
@ -4413,7 +4447,7 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
id->src_fn = fn;
id->src_node = cg_edge->callee;
id->src_cfun = DECL_STRUCT_FUNCTION (fn);
id->gimple_call = stmt;
id->call_stmt = stmt;
gcc_assert (!id->src_cfun->after_inlining);
@ -4473,7 +4507,7 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
if (gimple_call_with_bounds_p (stmt)
&& TREE_CODE (modify_dest) == SSA_NAME)
{
gimple retbnd = chkp_retbnd_call_by_val (modify_dest);
gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
if (retbnd)
{
return_bounds = gimple_call_lhs (retbnd);
@ -4496,7 +4530,7 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
if (DECL_P (modify_dest))
TREE_NO_WARNING (modify_dest) = 1;
if (gimple_call_return_slot_opt_p (stmt))
if (gimple_call_return_slot_opt_p (call_stmt))
{
return_slot = modify_dest;
modify_dest = NULL;
@ -4969,9 +5003,9 @@ mark_local_labels_stmt (gimple_stmt_iterator *gsip,
struct walk_stmt_info *wi)
{
copy_body_data *id = (copy_body_data *) wi->info;
gimple stmt = gsi_stmt (*gsip);
glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
if (gimple_code (stmt) == GIMPLE_LABEL)
if (stmt)
{
tree decl = gimple_label_label (stmt);
@ -5043,9 +5077,9 @@ replace_locals_stmt (gimple_stmt_iterator *gsip,
struct walk_stmt_info *wi)
{
copy_body_data *id = (copy_body_data *) wi->info;
gimple stmt = gsi_stmt (*gsip);
gimple gs = gsi_stmt (*gsip);
if (gimple_code (stmt) == GIMPLE_BIND)
if (gbind *stmt = dyn_cast <gbind *> (gs))
{
tree block = gimple_bind_block (stmt);

View File

@ -81,7 +81,7 @@ struct copy_body_data
/* GIMPLE_CALL if va arg parameter packs should be expanded or NULL
is not. */
gimple gimple_call;
gimple call_stmt;
/* Exception landing pad the inlined call lies in. */
int eh_lp_nr;
@ -133,7 +133,7 @@ struct copy_body_data
bitmap blocks_to_copy;
/* Debug statements that need processing. */
vec<gimple> debug_stmts;
vec<gdebug *> debug_stmts;
/* A map from local declarations in the inlined function to
equivalents in the function into which it is being inlined, where

View File

@ -128,7 +128,7 @@ static bitmap names_to_release;
/* vec of vec of PHIs to rewrite in a basic block. Element I corresponds
the to basic block with index I. Allocated once per compilation, *not*
released between different functions. */
static vec<gimple_vec> phis_to_rewrite;
static vec< vec<gphi *> > phis_to_rewrite;
/* The bitmap of non-NULL elements of PHIS_TO_REWRITE. */
static bitmap blocks_with_phis_to_rewrite;
@ -960,9 +960,9 @@ find_def_blocks_for (tree var)
/* Marks phi node PHI in basic block BB for rewrite. */
static void
mark_phi_for_rewrite (basic_block bb, gimple phi)
mark_phi_for_rewrite (basic_block bb, gphi *phi)
{
gimple_vec phis;
vec<gphi *> phis;
unsigned n, idx = bb->index;
if (rewrite_uses_p (phi))
@ -1001,7 +1001,7 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
{
unsigned bb_index;
edge e;
gimple phi;
gphi *phi;
basic_block bb;
bitmap_iterator bi;
struct def_blocks_d *def_map = find_def_blocks_for (var);
@ -1405,8 +1405,8 @@ rewrite_add_phi_arguments (basic_block bb)
FOR_EACH_EDGE (e, ei, bb->succs)
{
gimple phi;
gimple_stmt_iterator gsi;
gphi *phi;
gphi_iterator gsi;
for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi);
gsi_next (&gsi))
@ -1414,7 +1414,7 @@ rewrite_add_phi_arguments (basic_block bb)
tree currdef, res;
location_t loc;
phi = gsi_stmt (gsi);
phi = gsi.phi ();
res = gimple_phi_result (phi);
currdef = get_reaching_def (SSA_NAME_VAR (res));
/* Virtual operand PHI args do not need a location. */
@ -1444,8 +1444,6 @@ public:
void
rewrite_dom_walker::before_dom_children (basic_block bb)
{
gimple_stmt_iterator gsi;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\n\nRenaming block #%d\n\n", bb->index);
@ -1455,7 +1453,8 @@ rewrite_dom_walker::before_dom_children (basic_block bb)
/* Step 1. Register new definitions for every PHI node in the block.
Conceptually, all the PHI nodes are executed in parallel and each PHI
node introduces a new version for the associated variable. */
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
tree result = gimple_phi_result (gsi_stmt (gsi));
register_new_def (result, SSA_NAME_VAR (result));
@ -1465,7 +1464,8 @@ rewrite_dom_walker::before_dom_children (basic_block bb)
with its immediate reaching definitions. Update the current definition
of a variable when a new real or virtual definition is found. */
if (bitmap_bit_p (interesting_blocks, bb->index))
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
rewrite_stmt (&gsi);
/* Step 3. Visit all the successor blocks of BB looking for PHI nodes.
@ -2016,8 +2016,8 @@ rewrite_update_phi_arguments (basic_block bb)
FOR_EACH_EDGE (e, ei, bb->succs)
{
gimple phi;
gimple_vec phis;
gphi *phi;
vec<gphi *> phis;
if (!bitmap_bit_p (blocks_with_phis_to_rewrite, e->dest->index))
continue;
@ -2070,12 +2070,13 @@ rewrite_update_phi_arguments (basic_block bb)
else
{
gimple stmt = SSA_NAME_DEF_STMT (reaching_def);
gphi *other_phi = dyn_cast <gphi *> (stmt);
/* Single element PHI nodes behave like copies, so get the
location from the phi argument. */
if (gimple_code (stmt) == GIMPLE_PHI
&& gimple_phi_num_args (stmt) == 1)
locus = gimple_phi_arg_location (stmt, 0);
if (other_phi
&& gimple_phi_num_args (other_phi) == 1)
locus = gimple_phi_arg_location (other_phi, 0);
else
locus = gimple_location (stmt);
}
@ -2108,7 +2109,6 @@ void
rewrite_update_dom_walker::before_dom_children (basic_block bb)
{
bool is_abnormal_phi;
gimple_stmt_iterator gsi;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Registering new PHI nodes in block #%d\n",
@ -2129,10 +2129,11 @@ rewrite_update_dom_walker::before_dom_children (basic_block bb)
register it as a new definition for its corresponding name. Also
register definitions for names whose underlying symbols are
marked for renaming. */
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
tree lhs, lhs_sym;
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
if (!register_defs_p (phi))
continue;
@ -2164,7 +2165,7 @@ rewrite_update_dom_walker::before_dom_children (basic_block bb)
if (bitmap_bit_p (interesting_blocks, bb->index))
{
gcc_checking_assert (bitmap_bit_p (blocks_to_update, bb->index));
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
if (rewrite_update_stmt (gsi_stmt (gsi), gsi))
gsi_remove (&gsi, true);
else
@ -2480,7 +2481,7 @@ mark_use_interesting (tree var, gimple stmt, basic_block bb, bool insert_phi_p)
mark_block_for_update (bb);
if (gimple_code (stmt) == GIMPLE_PHI)
mark_phi_for_rewrite (def_bb, stmt);
mark_phi_for_rewrite (def_bb, as_a <gphi *> (stmt));
else
{
set_rewrite_uses (stmt, true);
@ -2522,7 +2523,6 @@ static void
prepare_block_for_update (basic_block bb, bool insert_phi_p)
{
basic_block son;
gimple_stmt_iterator si;
edge e;
edge_iterator ei;
@ -2530,9 +2530,10 @@ prepare_block_for_update (basic_block bb, bool insert_phi_p)
/* Process PHI nodes marking interesting those that define or use
the symbols that we are interested in. */
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
gsi_next (&si))
{
gimple phi = gsi_stmt (si);
gphi *phi = si.phi ();
tree lhs_sym, lhs = gimple_phi_result (phi);
if (TREE_CODE (lhs) == SSA_NAME
@ -2556,7 +2557,8 @@ prepare_block_for_update (basic_block bb, bool insert_phi_p)
}
/* Process the statements. */
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
gimple stmt;
ssa_op_iter i;
@ -2628,7 +2630,7 @@ prepare_use_sites_for (tree name, bool insert_phi_p)
if (gimple_code (stmt) == GIMPLE_PHI)
{
int ix = PHI_ARG_INDEX_FROM_USE (use_p);
edge e = gimple_phi_arg_edge (stmt, ix);
edge e = gimple_phi_arg_edge (as_a <gphi *> (stmt), ix);
mark_use_interesting (name, stmt, e->src, insert_phi_p);
}
else
@ -2840,7 +2842,7 @@ delete_update_ssa (void)
if (blocks_with_phis_to_rewrite)
EXECUTE_IF_SET_IN_BITMAP (blocks_with_phis_to_rewrite, 0, i, bi)
{
gimple_vec phis = phis_to_rewrite[i];
vec<gphi *> phis = phis_to_rewrite[i];
phis.release ();
phis_to_rewrite[i].create (0);
}
@ -2935,7 +2937,7 @@ mark_virtual_operand_for_renaming (tree name)
removed. */
void
mark_virtual_phi_result_for_renaming (gimple phi)
mark_virtual_phi_result_for_renaming (gphi *phi)
{
if (dump_file && (dump_flags & TDF_DETAILS))
{

View File

@ -26,7 +26,7 @@ void delete_update_ssa (void);
tree create_new_def_for (tree, gimple, def_operand_p);
void mark_virtual_operands_for_renaming (struct function *);
void mark_virtual_operand_for_renaming (tree);
void mark_virtual_phi_result_for_renaming (gimple);
void mark_virtual_phi_result_for_renaming (gphi *);
bool need_ssa_update_p (struct function *);
bool name_registered_for_update_p (tree);
void release_ssa_name_after_update_ssa (tree);

View File

@ -405,16 +405,16 @@ stmts_from_loop (struct loop *loop, vec<gimple> *stmts)
for (i = 0; i < loop->num_nodes; i++)
{
basic_block bb = bbs[i];
gimple_stmt_iterator bsi;
gimple stmt;
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
if (!virtual_operand_p (gimple_phi_result (gsi_stmt (bsi))))
stmts->safe_push (gsi_stmt (bsi));
for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
if (!virtual_operand_p (gimple_phi_result (bsi.phi ())))
stmts->safe_push (bsi.phi ());
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
stmt = gsi_stmt (bsi);
gimple stmt = gsi_stmt (bsi);
if (gimple_code (stmt) != GIMPLE_LABEL && !is_gimple_debug (stmt))
stmts->safe_push (stmt);
}
@ -632,7 +632,6 @@ generate_loops_for_partition (struct loop *loop, partition_t partition,
bool copy_p)
{
unsigned i;
gimple_stmt_iterator bsi;
basic_block *bbs;
if (copy_p)
@ -651,15 +650,16 @@ generate_loops_for_partition (struct loop *loop, partition_t partition,
{
basic_block bb = bbs[i];
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
gsi_next (&bsi))
{
gimple phi = gsi_stmt (bsi);
gphi *phi = bsi.phi ();
if (!virtual_operand_p (gimple_phi_result (phi))
&& !bitmap_bit_p (partition->stmts, gimple_uid (phi)))
reset_debug_uses (phi);
}
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
if (gimple_code (stmt) != GIMPLE_LABEL
@ -673,9 +673,9 @@ generate_loops_for_partition (struct loop *loop, partition_t partition,
{
basic_block bb = bbs[i];
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi);)
for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);)
{
gimple phi = gsi_stmt (bsi);
gphi *phi = bsi.phi ();
if (!virtual_operand_p (gimple_phi_result (phi))
&& !bitmap_bit_p (partition->stmts, gimple_uid (phi)))
remove_phi_node (&bsi, true);
@ -683,7 +683,7 @@ generate_loops_for_partition (struct loop *loop, partition_t partition,
gsi_next (&bsi);
}
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi);)
for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);)
{
gimple stmt = gsi_stmt (bsi);
if (gimple_code (stmt) != GIMPLE_LABEL
@ -692,15 +692,16 @@ generate_loops_for_partition (struct loop *loop, partition_t partition,
{
/* Choose an arbitrary path through the empty CFG part
that this unnecessary control stmt controls. */
if (gimple_code (stmt) == GIMPLE_COND)
if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
{
gimple_cond_make_false (stmt);
gimple_cond_make_false (cond_stmt);
update_stmt (stmt);
}
else if (gimple_code (stmt) == GIMPLE_SWITCH)
{
gswitch *switch_stmt = as_a <gswitch *> (stmt);
gimple_switch_set_index
(stmt, CASE_LOW (gimple_switch_label (stmt, 1)));
(switch_stmt, CASE_LOW (gimple_switch_label (switch_stmt, 1)));
update_stmt (stmt);
}
else
@ -912,14 +913,15 @@ destroy_loop (struct loop *loop)
Make sure we replace all uses of virtual defs that will remain
outside of the loop with the bare symbol as delete_basic_block
will release them. */
gimple_stmt_iterator gsi;
for (gsi = gsi_start_phis (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gsi = gsi_start_phis (bbs[i]); !gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
if (virtual_operand_p (gimple_phi_result (phi)))
mark_virtual_phi_result_for_renaming (phi);
}
for (gsi = gsi_start_bb (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
for (gimple_stmt_iterator gsi = gsi_start_bb (bbs[i]); !gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
tree vdef = gimple_vdef (stmt);
@ -1752,10 +1754,11 @@ pass_loop_distribution::execute (function *fun)
bbs = get_loop_body_in_dom_order (loop);
for (i = 0; i < loop->num_nodes; ++i)
{
gimple_stmt_iterator gsi;
for (gsi = gsi_start_phis (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gsi = gsi_start_phis (bbs[i]);
!gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
if (virtual_operand_p (gimple_phi_result (phi)))
continue;
/* Distribute stmts which have defs that are used outside of
@ -1764,7 +1767,9 @@ pass_loop_distribution::execute (function *fun)
continue;
work_list.safe_push (phi);
}
for (gsi = gsi_start_bb (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
for (gimple_stmt_iterator gsi = gsi_start_bb (bbs[i]);
!gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);

View File

@ -418,7 +418,7 @@ get_chain_field (struct nesting_info *info)
static tree
init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
gimple call)
gcall *call)
{
tree t;
@ -623,7 +623,7 @@ walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
/* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
static void
walk_gimple_omp_for (gimple for_stmt,
walk_gimple_omp_for (gomp_for *for_stmt,
walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
struct nesting_info *info)
{
@ -1355,7 +1355,8 @@ convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
case GIMPLE_OMP_FOR:
save_suppress = info->suppress_expansion;
convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
walk_gimple_omp_for (as_a <gomp_for *> (stmt),
convert_nonlocal_reference_stmt,
convert_nonlocal_reference_op, info);
walk_body (convert_nonlocal_reference_stmt,
convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
@ -1401,7 +1402,7 @@ convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TO;
OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
gimple_omp_target_set_clauses (stmt, c);
gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
}
save_local_var_chain = info->new_local_var_chain;
@ -1435,10 +1436,12 @@ convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
break;
case GIMPLE_BIND:
if (!optimize && gimple_bind_block (stmt))
note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
{
gbind *bind_stmt = as_a <gbind *> (stmt);
if (!optimize && gimple_bind_block (bind_stmt))
note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
if (TREE_CODE (var) == NAMELIST_DECL)
{
/* Adjust decls mentioned in NAMELIST_DECL. */
@ -1459,7 +1462,7 @@ convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
*handled_ops_p = false;
return NULL_TREE;
}
case GIMPLE_COND:
wi->val_only = true;
wi->is_lhs = false;
@ -1921,7 +1924,8 @@ convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
case GIMPLE_OMP_FOR:
save_suppress = info->suppress_expansion;
convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
walk_gimple_omp_for (stmt, convert_local_reference_stmt,
walk_gimple_omp_for (as_a <gomp_for *> (stmt),
convert_local_reference_stmt,
convert_local_reference_op, info);
walk_body (convert_local_reference_stmt, convert_local_reference_op,
info, gimple_omp_body_ptr (stmt));
@ -1964,7 +1968,7 @@ convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TOFROM;
OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
gimple_omp_target_set_clauses (stmt, c);
gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
}
save_local_var_chain = info->new_local_var_chain;
@ -2017,7 +2021,9 @@ convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
return NULL_TREE;
case GIMPLE_BIND:
for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
var;
var = DECL_CHAIN (var))
if (TREE_CODE (var) == NAMELIST_DECL)
{
/* Adjust decls mentioned in NAMELIST_DECL. */
@ -2069,7 +2075,7 @@ convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
{
struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
tree label, new_label, target_context, x, field;
gimple call;
gcall *call;
gimple stmt = gsi_stmt (*gsi);
if (gimple_code (stmt) != GIMPLE_GOTO)
@ -2140,9 +2146,9 @@ convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
struct nesting_info *const info = (struct nesting_info *) wi->info;
tree label, new_label;
gimple_stmt_iterator tmp_gsi;
gimple stmt = gsi_stmt (*gsi);
glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
if (gimple_code (stmt) != GIMPLE_LABEL)
if (!stmt)
{
*handled_ops_p = false;
return NULL_TREE;
@ -2186,7 +2192,7 @@ convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
tree t = *tp, decl, target_context, x, builtin;
gimple call;
gcall *call;
*walk_subtrees = 0;
switch (TREE_CODE (t))
@ -2335,8 +2341,9 @@ convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
target_context = decl_function_context (decl);
if (target_context && DECL_STATIC_CHAIN (decl))
{
gimple_call_set_chain (stmt, get_static_chain (info, target_context,
&wi->gsi));
gimple_call_set_chain (as_a <gcall *> (stmt),
get_static_chain (info, target_context,
&wi->gsi));
info->static_chain_added |= (1 << (info->context != target_context));
}
break;
@ -2403,7 +2410,8 @@ convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
= i ? OMP_CLAUSE_MAP_TO : OMP_CLAUSE_MAP_TOFROM;
OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
gimple_omp_target_set_clauses (stmt, c);
gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
c);
}
}
info->static_chain_added |= save_static_chain_added;
@ -2783,9 +2791,9 @@ finalize_nesting_tree_1 (struct nesting_info *root)
/* If we created initialization statements, insert them. */
if (stmt_list)
{
gimple bind;
gbind *bind;
annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
bind = gimple_seq_first_stmt (gimple_body (context));
bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
gimple_bind_set_body (bind, stmt_list);
}
@ -2814,7 +2822,7 @@ finalize_nesting_tree_1 (struct nesting_info *root)
if (root->debug_var_chain)
{
tree debug_var;
gimple scope;
gbind *scope;
remap_vla_decls (DECL_INITIAL (root->context), root);
@ -2869,7 +2877,7 @@ finalize_nesting_tree_1 (struct nesting_info *root)
delete id.cb.decl_map;
}
scope = gimple_seq_first_stmt (gimple_body (root->context));
scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
if (gimple_bind_block (scope))
declare_vars (root->debug_var_chain, scope, true);
else

View File

@ -189,12 +189,12 @@ pass_nrv::execute (function *fun)
gimple stmt = gsi_stmt (gsi);
tree ret_val;
if (gimple_code (stmt) == GIMPLE_RETURN)
if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
/* In a function with an aggregate return value, the
gimplifier has changed all non-empty RETURN_EXPRs to
return the RESULT_DECL. */
ret_val = gimple_return_retval (stmt);
ret_val = gimple_return_retval (return_stmt);
if (ret_val)
gcc_assert (ret_val == result);
}
@ -324,7 +324,7 @@ make_pass_nrv (gcc::context *ctxt)
DEST is available if it is not clobbered or used by the call. */
static bool
dest_safe_for_nrv_p (gimple call)
dest_safe_for_nrv_p (gcall *call)
{
tree dest = gimple_call_lhs (call);
@ -391,10 +391,11 @@ pass_return_slot::execute (function *fun)
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
gcall *stmt;
bool slot_opt_p;
if (is_gimple_call (stmt)
stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
if (stmt
&& gimple_call_lhs (stmt)
&& !gimple_call_return_slot_opt_p (stmt)
&& aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),

View File

@ -69,8 +69,8 @@ static const unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
static tree compute_object_offset (const_tree, const_tree);
static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
const_tree, int);
static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
static tree pass_through_call (const_gimple);
static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
static tree pass_through_call (const gcall *);
static void collect_object_sizes_for (struct object_size_info *, tree);
static void expr_object_size (struct object_size_info *, tree, tree);
static bool merge_object_sizes (struct object_size_info *, tree, tree,
@ -402,7 +402,7 @@ addr_object_size (struct object_size_info *osi, const_tree ptr,
unknown[object_size_type]. */
static unsigned HOST_WIDE_INT
alloc_object_size (const_gimple call, int object_size_type)
alloc_object_size (const gcall *call, int object_size_type)
{
tree callee, bytes = NULL_TREE;
tree alloc_size;
@ -465,7 +465,7 @@ alloc_object_size (const_gimple call, int object_size_type)
Otherwise return NULL. */
static tree
pass_through_call (const_gimple call)
pass_through_call (const gcall *call)
{
tree callee = gimple_call_fndecl (call);
@ -679,7 +679,7 @@ expr_object_size (struct object_size_info *osi, tree ptr, tree value)
/* Compute object_sizes for PTR, defined to the result of a call. */
static void
call_object_size (struct object_size_info *osi, tree ptr, gimple call)
call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
{
int object_size_type = osi->object_size_type;
unsigned int varno = SSA_NAME_VERSION (ptr);
@ -976,7 +976,8 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
case GIMPLE_CALL:
{
tree arg = pass_through_call (stmt);
gcall *call_stmt = as_a <gcall *> (stmt);
tree arg = pass_through_call (call_stmt);
if (arg)
{
if (TREE_CODE (arg) == SSA_NAME
@ -986,7 +987,7 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
expr_object_size (osi, var, arg);
}
else
call_object_size (osi, var, stmt);
call_object_size (osi, var, call_stmt);
break;
}
@ -1112,7 +1113,8 @@ check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
case GIMPLE_CALL:
{
tree arg = pass_through_call (stmt);
gcall *call_stmt = as_a <gcall *> (stmt);
tree arg = pass_through_call (call_stmt);
if (arg)
{
if (TREE_CODE (arg) == SSA_NAME)
@ -1260,7 +1262,7 @@ pass_object_sizes::execute (function *fun)
continue;
init_object_sizes ();
result = fold_call_stmt (call, false);
result = fold_call_stmt (as_a <gcall *> (call), false);
if (!result)
{
if (gimple_call_num_args (call) == 2

View File

@ -593,13 +593,13 @@ eliminate_build (elim_graph g)
{
tree Ti;
int p0, pi;
gimple_stmt_iterator gsi;
gphi_iterator gsi;
clear_elim_graph (g);
for (gsi = gsi_start_phis (g->e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
source_location locus;
p0 = var_to_partition (g->map, gimple_phi_result (phi));
@ -800,7 +800,7 @@ eliminate_phi (edge e, elim_graph g)
check to see if this allows another PHI node to be removed. */
static void
remove_gimple_phi_args (gimple phi)
remove_gimple_phi_args (gphi *phi)
{
use_operand_p arg_p;
ssa_op_iter iter;
@ -828,7 +828,7 @@ remove_gimple_phi_args (gimple phi)
/* Also remove the def if it is a PHI node. */
if (gimple_code (stmt) == GIMPLE_PHI)
{
remove_gimple_phi_args (stmt);
remove_gimple_phi_args (as_a <gphi *> (stmt));
gsi = gsi_for_stmt (stmt);
remove_phi_node (&gsi, true);
}
@ -844,14 +844,14 @@ static void
eliminate_useless_phis (void)
{
basic_block bb;
gimple_stmt_iterator gsi;
gphi_iterator gsi;
tree result;
FOR_EACH_BB_FN (bb, cfun)
{
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
result = gimple_phi_result (phi);
if (virtual_operand_p (result))
{
@ -907,10 +907,10 @@ rewrite_trees (var_map map ATTRIBUTE_UNUSED)
create incorrect code. */
FOR_EACH_BB_FN (bb, cfun)
{
gimple_stmt_iterator gsi;
gphi_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
tree T0 = var_to_partition_to_var (map, gimple_phi_result (phi));
if (T0 == NULL_TREE)
{
@ -1109,7 +1109,7 @@ static void
insert_backedge_copies (void)
{
basic_block bb;
gimple_stmt_iterator gsi;
gphi_iterator gsi;
mark_dfs_back_edges ();
@ -1120,7 +1120,7 @@ insert_backedge_copies (void)
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
tree result = gimple_phi_result (phi);
size_t i;
@ -1142,7 +1142,8 @@ insert_backedge_copies (void)
|| trivially_conflicts_p (bb, result, arg)))
{
tree name;
gimple stmt, last = NULL;
gassign *stmt;
gimple last = NULL;
gimple_stmt_iterator gsi2;
gsi2 = gsi_last_bb (gimple_phi_arg_edge (phi, i)->src);

View File

@ -203,12 +203,12 @@ struct reduction_info
enum tree_code reduction_code;/* code for the reduction operation. */
unsigned reduc_version; /* SSA_NAME_VERSION of original reduc_phi
result. */
gimple keep_res; /* The PHI_RESULT of this phi is the resulting value
gphi *keep_res; /* The PHI_RESULT of this phi is the resulting value
of the reduction variable when existing the loop. */
tree initial_value; /* The initial value of the reduction var before entering the loop. */
tree field; /* the name of the field in the parloop data structure intended for reduction. */
tree init; /* reduction initialization value. */
gimple new_phi; /* (helper field) Newly created phi node whose result
gphi *new_phi; /* (helper field) Newly created phi node whose result
will be passed to the atomic operation. Represents
the local result each thread computed for the reduction
operation. */
@ -501,7 +501,7 @@ take_address_of (tree obj, tree type, edge entry,
{
int uid;
tree *var_p, name, addr;
gimple stmt;
gassign *stmt;
gimple_seq stmts;
/* Since the address of OBJ is invariant, the trees may be shared.
@ -1038,7 +1038,7 @@ create_phi_for_local_result (reduction_info **slot, struct loop *loop)
{
struct reduction_info *const reduc = *slot;
edge e;
gimple new_phi;
gphi *new_phi;
basic_block store_bb;
tree local_res;
source_location locus;
@ -1509,23 +1509,26 @@ transform_to_exit_first_loop (struct loop *loop,
bool ok;
edge exit = single_dom_exit (loop), hpred;
tree control, control_name, res, t;
gimple phi, nphi, cond_stmt, stmt, cond_nit;
gimple_stmt_iterator gsi;
gphi *phi, *nphi;
gassign *stmt;
gcond *cond_stmt, *cond_nit;
tree nit_1;
split_block_after_labels (loop->header);
orig_header = single_succ (loop->header);
hpred = single_succ_edge (loop->header);
cond_stmt = last_stmt (exit->src);
cond_stmt = as_a <gcond *> (last_stmt (exit->src));
control = gimple_cond_lhs (cond_stmt);
gcc_assert (gimple_cond_rhs (cond_stmt) == nit);
/* Make sure that we have phi nodes on exit for all loop header phis
(create_parallel_loop requires that). */
for (gsi = gsi_start_phis (loop->header); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gsi = gsi_start_phis (loop->header);
!gsi_end_p (gsi);
gsi_next (&gsi))
{
phi = gsi_stmt (gsi);
phi = gsi.phi ();
res = PHI_RESULT (phi);
t = copy_ssa_name (res, phi);
SET_PHI_RESULT (phi, t);
@ -1556,9 +1559,10 @@ transform_to_exit_first_loop (struct loop *loop,
out of the loop is the control variable. */
exit = single_dom_exit (loop);
control_name = NULL_TREE;
for (gsi = gsi_start_phis (ex_bb); !gsi_end_p (gsi); )
for (gphi_iterator gsi = gsi_start_phis (ex_bb);
!gsi_end_p (gsi); )
{
phi = gsi_stmt (gsi);
phi = gsi.phi ();
res = PHI_RESULT (phi);
if (virtual_operand_p (res))
{
@ -1593,8 +1597,8 @@ transform_to_exit_first_loop (struct loop *loop,
/* Initialize the control variable to number of iterations
according to the rhs of the exit condition. */
gsi = gsi_after_labels (ex_bb);
cond_nit = last_stmt (exit->src);
gimple_stmt_iterator gsi = gsi_after_labels (ex_bb);
cond_nit = as_a <gcond *> (last_stmt (exit->src));
nit_1 = gimple_cond_rhs (cond_nit);
nit_1 = force_gimple_operand_gsi (&gsi,
fold_convert (TREE_TYPE (control_name), nit_1),
@ -1616,7 +1620,12 @@ create_parallel_loop (struct loop *loop, tree loop_fn, tree data,
gimple_stmt_iterator gsi;
basic_block bb, paral_bb, for_bb, ex_bb;
tree t, param;
gimple stmt, for_stmt, phi, cond_stmt;
gomp_parallel *omp_par_stmt;
gimple omp_return_stmt1, omp_return_stmt2;
gimple phi;
gcond *cond_stmt;
gomp_for *for_stmt;
gomp_continue *omp_cont_stmt;
tree cvar, cvar_init, initvar, cvar_next, cvar_base, type;
edge exit, nexit, guard, end, e;
@ -1628,35 +1637,37 @@ create_parallel_loop (struct loop *loop, tree loop_fn, tree data,
t = build_omp_clause (loc, OMP_CLAUSE_NUM_THREADS);
OMP_CLAUSE_NUM_THREADS_EXPR (t)
= build_int_cst (integer_type_node, n_threads);
stmt = gimple_build_omp_parallel (NULL, t, loop_fn, data);
gimple_set_location (stmt, loc);
omp_par_stmt = gimple_build_omp_parallel (NULL, t, loop_fn, data);
gimple_set_location (omp_par_stmt, loc);
gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
gsi_insert_after (&gsi, omp_par_stmt, GSI_NEW_STMT);
/* Initialize NEW_DATA. */
if (data)
{
gassign *assign_stmt;
gsi = gsi_after_labels (bb);
param = make_ssa_name (DECL_ARGUMENTS (loop_fn), NULL);
stmt = gimple_build_assign (param, build_fold_addr_expr (data));
gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
assign_stmt = gimple_build_assign (param, build_fold_addr_expr (data));
gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
stmt = gimple_build_assign (new_data,
assign_stmt = gimple_build_assign (new_data,
fold_convert (TREE_TYPE (new_data), param));
gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
}
/* Emit GIMPLE_OMP_RETURN for GIMPLE_OMP_PARALLEL. */
bb = split_loop_exit_edge (single_dom_exit (loop));
gsi = gsi_last_bb (bb);
stmt = gimple_build_omp_return (false);
gimple_set_location (stmt, loc);
gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
omp_return_stmt1 = gimple_build_omp_return (false);
gimple_set_location (omp_return_stmt1, loc);
gsi_insert_after (&gsi, omp_return_stmt1, GSI_NEW_STMT);
/* Extract data for GIMPLE_OMP_FOR. */
gcc_assert (loop->header == single_dom_exit (loop)->src);
cond_stmt = last_stmt (loop->header);
cond_stmt = as_a <gcond *> (last_stmt (loop->header));
cvar = gimple_cond_lhs (cond_stmt);
cvar_base = SSA_NAME_VAR (cvar);
@ -1680,12 +1691,16 @@ create_parallel_loop (struct loop *loop, tree loop_fn, tree data,
guard = make_edge (for_bb, ex_bb, 0);
single_succ_edge (loop->latch)->flags = 0;
end = make_edge (loop->latch, ex_bb, EDGE_FALLTHRU);
for (gsi = gsi_start_phis (ex_bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gpi = gsi_start_phis (ex_bb);
!gsi_end_p (gpi); gsi_next (&gpi))
{
source_location locus;
tree def;
phi = gsi_stmt (gsi);
stmt = SSA_NAME_DEF_STMT (PHI_ARG_DEF_FROM_EDGE (phi, exit));
gphi *phi = gpi.phi ();
gphi *stmt;
stmt = as_a <gphi *> (
SSA_NAME_DEF_STMT (PHI_ARG_DEF_FROM_EDGE (phi, exit)));
def = PHI_ARG_DEF_FROM_EDGE (stmt, loop_preheader_edge (loop));
locus = gimple_phi_arg_location_from_edge (stmt,
@ -1721,16 +1736,16 @@ create_parallel_loop (struct loop *loop, tree loop_fn, tree data,
/* Emit GIMPLE_OMP_CONTINUE. */
gsi = gsi_last_bb (loop->latch);
stmt = gimple_build_omp_continue (cvar_next, cvar);
gimple_set_location (stmt, loc);
gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
SSA_NAME_DEF_STMT (cvar_next) = stmt;
omp_cont_stmt = gimple_build_omp_continue (cvar_next, cvar);
gimple_set_location (omp_cont_stmt, loc);
gsi_insert_after (&gsi, omp_cont_stmt, GSI_NEW_STMT);
SSA_NAME_DEF_STMT (cvar_next) = omp_cont_stmt;
/* Emit GIMPLE_OMP_RETURN for GIMPLE_OMP_FOR. */
gsi = gsi_last_bb (ex_bb);
stmt = gimple_build_omp_return (true);
gimple_set_location (stmt, loc);
gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
omp_return_stmt2 = gimple_build_omp_return (true);
gimple_set_location (omp_return_stmt2, loc);
gsi_insert_after (&gsi, omp_return_stmt2, GSI_NEW_STMT);
/* After the above dom info is hosed. Re-compute it. */
free_dominance_info (CDI_DOMINATORS);
@ -1914,12 +1929,12 @@ loop_has_vector_phi_nodes (struct loop *loop ATTRIBUTE_UNUSED)
{
unsigned i;
basic_block *bbs = get_loop_body_in_dom_order (loop);
gimple_stmt_iterator gsi;
gphi_iterator gsi;
bool res = true;
for (i = 0; i < loop->num_nodes; i++)
for (gsi = gsi_start_phis (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
if (TREE_CODE (TREE_TYPE (PHI_RESULT (gsi_stmt (gsi)))) == VECTOR_TYPE)
if (TREE_CODE (TREE_TYPE (PHI_RESULT (gsi.phi ()))) == VECTOR_TYPE)
goto end;
res = false;
@ -1933,7 +1948,7 @@ loop_has_vector_phi_nodes (struct loop *loop ATTRIBUTE_UNUSED)
static void
build_new_reduction (reduction_info_table_type *reduction_list,
gimple reduc_stmt, gimple phi)
gimple reduc_stmt, gphi *phi)
{
reduction_info **slot;
struct reduction_info *new_reduction;
@ -1973,14 +1988,14 @@ set_reduc_phi_uids (reduction_info **slot, void *data ATTRIBUTE_UNUSED)
static void
gather_scalar_reductions (loop_p loop, reduction_info_table_type *reduction_list)
{
gimple_stmt_iterator gsi;
gphi_iterator gsi;
loop_vec_info simple_loop_info;
simple_loop_info = vect_analyze_loop_form (loop);
for (gsi = gsi_start_phis (loop->header); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
affine_iv iv;
tree res = PHI_RESULT (phi);
bool double_reduc;
@ -2036,7 +2051,7 @@ try_create_reduction_list (loop_p loop,
reduction_info_table_type *reduction_list)
{
edge exit = single_dom_exit (loop);
gimple_stmt_iterator gsi;
gphi_iterator gsi;
gcc_assert (exit);
@ -2045,7 +2060,7 @@ try_create_reduction_list (loop_p loop,
for (gsi = gsi_start_phis (exit->dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
struct reduction_info *red;
imm_use_iterator imm_iter;
use_operand_p use_p;
@ -2103,7 +2118,7 @@ try_create_reduction_list (loop_p loop,
iteration space can be distributed efficiently. */
for (gsi = gsi_start_phis (loop->header); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
tree def = PHI_RESULT (phi);
affine_iv iv;

View File

@ -105,12 +105,12 @@ phinodes_print_statistics (void)
happens to contain a PHI node with LEN arguments or more, return
that one. */
static inline gimple_statement_phi *
static inline gphi *
allocate_phi_node (size_t len)
{
gimple_statement_phi *phi;
gphi *phi;
size_t bucket = NUM_BUCKETS - 2;
size_t size = sizeof (struct gimple_statement_phi)
size_t size = sizeof (struct gphi)
+ (len - 1) * sizeof (struct phi_arg_d);
if (free_phinode_count)
@ -123,7 +123,7 @@ allocate_phi_node (size_t len)
&& gimple_phi_capacity ((*free_phinodes[bucket])[0]) >= len)
{
free_phinode_count--;
phi = as_a <gimple_statement_phi *> (free_phinodes[bucket]->pop ());
phi = as_a <gphi *> (free_phinodes[bucket]->pop ());
if (free_phinodes[bucket]->is_empty ())
vec_free (free_phinodes[bucket]);
if (GATHER_STATISTICS)
@ -131,8 +131,7 @@ allocate_phi_node (size_t len)
}
else
{
phi = static_cast <gimple_statement_phi *> (
ggc_internal_alloc (size));
phi = static_cast <gphi *> (ggc_internal_alloc (size));
if (GATHER_STATISTICS)
{
enum gimple_alloc_kind kind = gimple_alloc_kind (GIMPLE_PHI);
@ -166,7 +165,7 @@ ideal_phi_node_len (int len)
len = 2;
/* Compute the number of bytes of the original request. */
size = sizeof (struct gimple_statement_phi)
size = sizeof (struct gphi)
+ (len - 1) * sizeof (struct phi_arg_d);
/* Round it up to the next power of two. */
@ -181,10 +180,10 @@ ideal_phi_node_len (int len)
/* Return a PHI node with LEN argument slots for variable VAR. */
static gimple
static gphi *
make_phi_node (tree var, int len)
{
gimple_statement_phi *phi;
gphi *phi;
int capacity, i;
capacity = ideal_phi_node_len (len);
@ -194,7 +193,7 @@ make_phi_node (tree var, int len)
/* We need to clear the entire PHI node, including the argument
portion, because we represent a "missing PHI argument" by placing
NULL_TREE in PHI_ARG_DEF. */
memset (phi, 0, (sizeof (struct gimple_statement_phi)
memset (phi, 0, (sizeof (struct gphi)
- sizeof (struct phi_arg_d)
+ sizeof (struct phi_arg_d) * len));
phi->code = GIMPLE_PHI;
@ -249,18 +248,18 @@ release_phi_node (gimple phi)
/* Resize an existing PHI node. The only way is up. Return the
possibly relocated phi. */
static gimple_statement_phi *
resize_phi_node (gimple_statement_phi *phi, size_t len)
static gphi *
resize_phi_node (gphi *phi, size_t len)
{
size_t old_size, i;
gimple_statement_phi *new_phi;
gphi *new_phi;
gcc_assert (len > gimple_phi_capacity (phi));
/* The garbage collector will not look at the PHI node beyond the
first PHI_NUM_ARGS elements. Therefore, all we have to copy is a
portion of the PHI node currently in use. */
old_size = sizeof (struct gimple_statement_phi)
old_size = sizeof (struct gphi)
+ (gimple_phi_num_args (phi) - 1) * sizeof (struct phi_arg_d);
new_phi = allocate_phi_node (len);
@ -300,16 +299,15 @@ reserve_phi_args_for_new_edge (basic_block bb)
{
size_t len = EDGE_COUNT (bb->preds);
size_t cap = ideal_phi_node_len (len + 4);
gimple_stmt_iterator gsi;
gphi_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple_statement_phi *stmt =
as_a <gimple_statement_phi *> (gsi_stmt (gsi));
gphi *stmt = gsi.phi ();
if (len > gimple_phi_capacity (stmt))
{
gimple_statement_phi *new_phi = resize_phi_node (stmt, cap);
gphi *new_phi = resize_phi_node (stmt, cap);
/* The result of the PHI is defined by this PHI node. */
SSA_NAME_DEF_STMT (gimple_phi_result (new_phi)) = new_phi;
@ -336,7 +334,7 @@ reserve_phi_args_for_new_edge (basic_block bb)
/* Adds PHI to BB. */
void
add_phi_node_to_bb (gimple phi, basic_block bb)
add_phi_node_to_bb (gphi *phi, basic_block bb)
{
gimple_seq seq = phi_nodes (bb);
/* Add the new PHI node to the list of PHI nodes for block BB. */
@ -355,10 +353,10 @@ add_phi_node_to_bb (gimple phi, basic_block bb)
/* Create a new PHI node for variable VAR at basic block BB. */
gimple
gphi *
create_phi_node (tree var, basic_block bb)
{
gimple phi = make_phi_node (var, EDGE_COUNT (bb->preds));
gphi *phi = make_phi_node (var, EDGE_COUNT (bb->preds));
add_phi_node_to_bb (phi, bb);
return phi;
@ -372,7 +370,7 @@ create_phi_node (tree var, basic_block bb)
PHI points to the reallocated phi node when we return. */
void
add_phi_arg (gimple phi, tree def, edge e, source_location locus)
add_phi_arg (gphi *phi, tree def, edge e, source_location locus)
{
basic_block bb = e->dest;
@ -405,7 +403,7 @@ add_phi_arg (gimple phi, tree def, edge e, source_location locus)
is consistent with how we remove an edge from the edge vector. */
static void
remove_phi_arg_num (gimple_statement_phi *phi, int i)
remove_phi_arg_num (gphi *phi, int i)
{
int num_elem = gimple_phi_num_args (phi);
@ -441,10 +439,10 @@ remove_phi_arg_num (gimple_statement_phi *phi, int i)
void
remove_phi_args (edge e)
{
gimple_stmt_iterator gsi;
gphi_iterator gsi;
for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
remove_phi_arg_num (as_a <gimple_statement_phi *> (gsi_stmt (gsi)),
remove_phi_arg_num (gsi.phi (),
e->dest_idx);
}
@ -476,7 +474,7 @@ remove_phi_node (gimple_stmt_iterator *gsi, bool release_lhs_p)
void
remove_phi_nodes (basic_block bb)
{
gimple_stmt_iterator gsi;
gphi_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
remove_phi_node (&gsi, true);
@ -488,7 +486,7 @@ remove_phi_nodes (basic_block bb)
NULL. */
tree
degenerate_phi_result (gimple phi)
degenerate_phi_result (gphi *phi)
{
tree lhs = gimple_phi_result (phi);
tree val = NULL;

View File

@ -23,13 +23,13 @@ along with GCC; see the file COPYING3. If not see
extern void phinodes_print_statistics (void);
extern void release_phi_node (gimple);
extern void reserve_phi_args_for_new_edge (basic_block);
extern void add_phi_node_to_bb (gimple phi, basic_block bb);
extern gimple create_phi_node (tree, basic_block);
extern void add_phi_arg (gimple, tree, edge, source_location);
extern void add_phi_node_to_bb (gphi *phi, basic_block bb);
extern gphi *create_phi_node (tree, basic_block);
extern void add_phi_arg (gphi *, tree, edge, source_location);
extern void remove_phi_args (edge);
extern void remove_phi_node (gimple_stmt_iterator *, bool);
extern void remove_phi_nodes (basic_block);
extern tree degenerate_phi_result (gimple);
extern tree degenerate_phi_result (gphi *);
extern void set_phi_nodes (basic_block, gimple_seq);
static inline use_operand_p

View File

@ -1122,14 +1122,15 @@ valid_initializer_p (struct data_reference *ref,
iteration), returns the phi node. Otherwise, NULL_TREE is returned. ROOT
is the root of the current chain. */
static gimple
static gphi *
find_looparound_phi (struct loop *loop, dref ref, dref root)
{
tree name, init, init_ref;
gimple phi = NULL, init_stmt;
gphi *phi = NULL;
gimple init_stmt;
edge latch = loop_latch_edge (loop);
struct data_reference init_dr;
gimple_stmt_iterator psi;
gphi_iterator psi;
if (is_gimple_assign (ref->stmt))
{
@ -1145,7 +1146,7 @@ find_looparound_phi (struct loop *loop, dref ref, dref root)
for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
{
phi = gsi_stmt (psi);
phi = psi.phi ();
if (PHI_ARG_DEF_FROM_EDGE (phi, latch) == name)
break;
}
@ -1183,7 +1184,7 @@ find_looparound_phi (struct loop *loop, dref ref, dref root)
/* Adds a reference for the looparound copy of REF in PHI to CHAIN. */
static void
insert_looparound_copy (chain_p chain, dref ref, gimple phi)
insert_looparound_copy (chain_p chain, dref ref, gphi *phi)
{
dref nw = XCNEW (struct dref_d), aref;
unsigned i;
@ -1214,7 +1215,7 @@ add_looparound_copies (struct loop *loop, chain_p chain)
{
unsigned i;
dref ref, root = get_chain_root (chain);
gimple phi;
gphi *phi;
FOR_EACH_VEC_ELT (chain->refs, i, ref)
{
@ -1302,7 +1303,7 @@ static void
replace_ref_with (gimple stmt, tree new_tree, bool set, bool in_lhs)
{
tree val;
gimple new_stmt;
gassign *new_stmt;
gimple_stmt_iterator bsi, psi;
if (gimple_code (stmt) == GIMPLE_PHI)
@ -1466,7 +1467,7 @@ initialize_root_vars (struct loop *loop, chain_p chain, bitmap tmp_vars)
dref root = get_chain_root (chain);
bool reuse_first = !chain->has_max_use_after;
tree ref, init, var, next;
gimple phi;
gphi *phi;
gimple_seq stmts;
edge entry = loop_preheader_edge (loop), latch = loop_latch_edge (loop);
@ -1540,7 +1541,7 @@ initialize_root_vars_lm (struct loop *loop, dref root, bool written,
unsigned i;
tree ref = DR_REF (root->ref), init, var, next;
gimple_seq stmts;
gimple phi;
gphi *phi;
edge entry = loop_preheader_edge (loop), latch = loop_latch_edge (loop);
/* Find the initializer for the variable, and check that it cannot
@ -1571,7 +1572,7 @@ initialize_root_vars_lm (struct loop *loop, dref root, bool written,
}
else
{
gimple init_stmt = gimple_build_assign (var, init);
gassign *init_stmt = gimple_build_assign (var, init);
gsi_insert_on_edge_immediate (entry, init_stmt);
}
}
@ -1897,14 +1898,15 @@ static void
eliminate_temp_copies (struct loop *loop, bitmap tmp_vars)
{
edge e;
gimple phi, stmt;
gphi *phi;
gimple stmt;
tree name, use, var;
gimple_stmt_iterator psi;
gphi_iterator psi;
e = loop_latch_edge (loop);
for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
{
phi = gsi_stmt (psi);
phi = psi.phi ();
name = PHI_RESULT (phi);
var = SSA_NAME_VAR (name);
if (!var || !bitmap_bit_p (tmp_vars, DECL_UID (var)))
@ -2137,7 +2139,7 @@ static gimple
reassociate_to_the_same_stmt (tree name1, tree name2)
{
gimple stmt1, stmt2, root1, root2, s1, s2;
gimple new_stmt, tmp_stmt;
gassign *new_stmt, *tmp_stmt;
tree new_name, tmp_name, var, r1, r2;
unsigned dist1, dist2;
enum tree_code code;

View File

@ -306,7 +306,7 @@ void
gimple_gen_edge_profiler (int edgeno, edge e)
{
tree ref, one, gcov_type_tmp_var;
gimple stmt1, stmt2, stmt3;
gassign *stmt1, *stmt2, *stmt3;
ref = tree_coverage_counter_ref (GCOV_COUNTER_ARCS, edgeno);
one = build_int_cst (gcov_type_node, 1);
@ -347,7 +347,7 @@ gimple_gen_interval_profiler (histogram_value value, unsigned tag, unsigned base
gimple stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref = tree_coverage_counter_ref (tag, base), ref_ptr;
gimple call;
gcall *call;
tree val;
tree start = build_int_cst_type (integer_type_node,
value->hdata.intvl.int_start);
@ -373,7 +373,7 @@ gimple_gen_pow2_profiler (histogram_value value, unsigned tag, unsigned base)
gimple stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
gimple call;
gcall *call;
tree val;
ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
@ -393,7 +393,7 @@ gimple_gen_one_value_profiler (histogram_value value, unsigned tag, unsigned bas
gimple stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
gimple call;
gcall *call;
tree val;
ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
@ -414,7 +414,7 @@ void
gimple_gen_ic_profiler (histogram_value value, unsigned tag, unsigned base)
{
tree tmp1;
gimple stmt1, stmt2, stmt3;
gassign *stmt1, *stmt2, *stmt3;
gimple stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
@ -456,7 +456,8 @@ gimple_gen_ic_func_profiler (void)
{
struct cgraph_node * c_node = cgraph_node::get (current_function_decl);
gimple_stmt_iterator gsi;
gimple stmt1, stmt2;
gcall *stmt1;
gassign *stmt2;
tree tree_uid, cur_func, void0;
if (c_node->only_called_directly_p ())
@ -516,7 +517,7 @@ gimple_gen_time_profiler (unsigned tag, unsigned base,
gimple_stmt_iterator &gsi)
{
tree ref_ptr = tree_coverage_counter_addr (tag, base);
gimple call;
gcall *call;
ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
true, NULL_TREE, true, GSI_SAME_STMT);
@ -551,7 +552,7 @@ gimple_gen_average_profiler (histogram_value value, unsigned tag, unsigned base)
gimple stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
gimple call;
gcall *call;
tree val;
ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
@ -572,7 +573,7 @@ gimple_gen_ior_profiler (histogram_value value, unsigned tag, unsigned base)
gimple stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
gimple call;
gcall *call;
tree val;
ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,

View File

@ -878,10 +878,10 @@ add_to_evolution (unsigned loop_nb, tree chrec_before, enum tree_code code,
guards the exit edge. If the expression is too difficult to
analyze, then give up. */
gimple
gcond *
get_loop_exit_condition (const struct loop *loop)
{
gimple res = NULL;
gcond *res = NULL;
edge exit_edge = single_exit (loop);
if (dump_file && (dump_flags & TDF_SCEV))
@ -892,8 +892,8 @@ get_loop_exit_condition (const struct loop *loop)
gimple stmt;
stmt = last_stmt (exit_edge->src);
if (gimple_code (stmt) == GIMPLE_COND)
res = stmt;
if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
res = cond_stmt;
}
if (dump_file && (dump_flags & TDF_SCEV))
@ -915,7 +915,8 @@ typedef enum t_bool {
} t_bool;
static t_bool follow_ssa_edge (struct loop *loop, gimple, gimple, tree *, int);
static t_bool follow_ssa_edge (struct loop *loop, gimple, gphi *,
tree *, int);
/* Follow the ssa edge into the binary expression RHS0 CODE RHS1.
Return true if the strongly connected component has been found. */
@ -923,7 +924,8 @@ static t_bool follow_ssa_edge (struct loop *loop, gimple, gimple, tree *, int);
static t_bool
follow_ssa_edge_binary (struct loop *loop, gimple at_stmt,
tree type, tree rhs0, enum tree_code code, tree rhs1,
gimple halting_phi, tree *evolution_of_loop, int limit)
gphi *halting_phi, tree *evolution_of_loop,
int limit)
{
t_bool res = t_false;
tree evol;
@ -1058,7 +1060,8 @@ follow_ssa_edge_binary (struct loop *loop, gimple at_stmt,
static t_bool
follow_ssa_edge_expr (struct loop *loop, gimple at_stmt, tree expr,
gimple halting_phi, tree *evolution_of_loop, int limit)
gphi *halting_phi, tree *evolution_of_loop,
int limit)
{
enum tree_code code = TREE_CODE (expr);
tree type = TREE_TYPE (expr), rhs0, rhs1;
@ -1148,7 +1151,8 @@ follow_ssa_edge_expr (struct loop *loop, gimple at_stmt, tree expr,
static t_bool
follow_ssa_edge_in_rhs (struct loop *loop, gimple stmt,
gimple halting_phi, tree *evolution_of_loop, int limit)
gphi *halting_phi, tree *evolution_of_loop,
int limit)
{
enum tree_code code = gimple_assign_rhs_code (stmt);
tree type = gimple_expr_type (stmt), rhs1, rhs2;
@ -1188,7 +1192,7 @@ follow_ssa_edge_in_rhs (struct loop *loop, gimple stmt,
/* Checks whether the I-th argument of a PHI comes from a backedge. */
static bool
backedge_phi_arg_p (gimple phi, int i)
backedge_phi_arg_p (gphi *phi, int i)
{
const_edge e = gimple_phi_arg_edge (phi, i);
@ -1208,8 +1212,8 @@ backedge_phi_arg_p (gimple phi, int i)
static inline t_bool
follow_ssa_edge_in_condition_phi_branch (int i,
struct loop *loop,
gimple condition_phi,
gimple halting_phi,
gphi *condition_phi,
gphi *halting_phi,
tree *evolution_of_branch,
tree init_cond, int limit)
{
@ -1243,8 +1247,8 @@ follow_ssa_edge_in_condition_phi_branch (int i,
static t_bool
follow_ssa_edge_in_condition_phi (struct loop *loop,
gimple condition_phi,
gimple halting_phi,
gphi *condition_phi,
gphi *halting_phi,
tree *evolution_of_loop, int limit)
{
int i, n;
@ -1290,8 +1294,8 @@ follow_ssa_edge_in_condition_phi (struct loop *loop,
static t_bool
follow_ssa_edge_inner_loop_phi (struct loop *outer_loop,
gimple loop_phi_node,
gimple halting_phi,
gphi *loop_phi_node,
gphi *halting_phi,
tree *evolution_of_loop, int limit)
{
struct loop *loop = loop_containing_stmt (loop_phi_node);
@ -1336,7 +1340,7 @@ follow_ssa_edge_inner_loop_phi (struct loop *outer_loop,
path that is analyzed on the return walk. */
static t_bool
follow_ssa_edge (struct loop *loop, gimple def, gimple halting_phi,
follow_ssa_edge (struct loop *loop, gimple def, gphi *halting_phi,
tree *evolution_of_loop, int limit)
{
struct loop *def_loop;
@ -1359,7 +1363,8 @@ follow_ssa_edge (struct loop *loop, gimple def, gimple halting_phi,
information and set the approximation to the main
variable. */
return follow_ssa_edge_in_condition_phi
(loop, def, halting_phi, evolution_of_loop, limit);
(loop, as_a <gphi *> (def), halting_phi, evolution_of_loop,
limit);
/* When the analyzed phi is the halting_phi, the
depth-first search is over: we have found a path from
@ -1376,7 +1381,8 @@ follow_ssa_edge (struct loop *loop, gimple def, gimple halting_phi,
/* Inner loop. */
if (flow_loop_nested_p (loop, def_loop))
return follow_ssa_edge_inner_loop_phi
(loop, def, halting_phi, evolution_of_loop, limit + 1);
(loop, as_a <gphi *> (def), halting_phi, evolution_of_loop,
limit + 1);
/* Outer loop. */
return t_false;
@ -1457,7 +1463,7 @@ simplify_peeled_chrec (struct loop *loop, tree arg, tree init_cond)
function from LOOP_PHI_NODE to LOOP_PHI_NODE in the loop. */
static tree
analyze_evolution_in_loop (gimple loop_phi_node,
analyze_evolution_in_loop (gphi *loop_phi_node,
tree init_cond)
{
int i, n = gimple_phi_num_args (loop_phi_node);
@ -1552,7 +1558,7 @@ analyze_evolution_in_loop (gimple loop_phi_node,
loop, and leaves this task to the on-demand tree reconstructor. */
static tree
analyze_initial_condition (gimple loop_phi_node)
analyze_initial_condition (gphi *loop_phi_node)
{
int i, n;
tree init_cond = chrec_not_analyzed_yet;
@ -1601,13 +1607,15 @@ analyze_initial_condition (gimple loop_phi_node)
if (TREE_CODE (init_cond) == SSA_NAME)
{
gimple def = SSA_NAME_DEF_STMT (init_cond);
tree res;
if (gimple_code (def) == GIMPLE_PHI
&& (res = degenerate_phi_result (def)) != NULL_TREE
/* Only allow invariants here, otherwise we may break
loop-closed SSA form. */
&& is_gimple_min_invariant (res))
init_cond = res;
if (gphi *phi = dyn_cast <gphi *> (def))
{
tree res = degenerate_phi_result (phi);
if (res != NULL_TREE
/* Only allow invariants here, otherwise we may break
loop-closed SSA form. */
&& is_gimple_min_invariant (res))
init_cond = res;
}
}
if (dump_file && (dump_flags & TDF_SCEV))
@ -1623,7 +1631,7 @@ analyze_initial_condition (gimple loop_phi_node)
/* Analyze the scalar evolution for LOOP_PHI_NODE. */
static tree
interpret_loop_phi (struct loop *loop, gimple loop_phi_node)
interpret_loop_phi (struct loop *loop, gphi *loop_phi_node)
{
tree res;
struct loop *phi_loop = loop_containing_stmt (loop_phi_node);
@ -1672,7 +1680,7 @@ interpret_loop_phi (struct loop *loop, gimple loop_phi_node)
analyzed. */
static tree
interpret_condition_phi (struct loop *loop, gimple condition_phi)
interpret_condition_phi (struct loop *loop, gphi *condition_phi)
{
int i, n = gimple_phi_num_args (condition_phi);
tree res = chrec_not_analyzed_yet;
@ -2000,9 +2008,9 @@ analyze_scalar_evolution_1 (struct loop *loop, tree var, tree res)
case GIMPLE_PHI:
if (loop_phi_node_p (def))
res = interpret_loop_phi (loop, def);
res = interpret_loop_phi (loop, as_a <gphi *> (def));
else
res = interpret_condition_phi (loop, def);
res = interpret_condition_phi (loop, as_a <gphi *> (def));
break;
default:
@ -2249,8 +2257,8 @@ loop_closed_phi_def (tree var)
{
struct loop *loop;
edge exit;
gimple phi;
gimple_stmt_iterator psi;
gphi *phi;
gphi_iterator psi;
if (var == NULL_TREE
|| TREE_CODE (var) != SSA_NAME)
@ -2263,7 +2271,7 @@ loop_closed_phi_def (tree var)
for (psi = gsi_start_phis (exit->dest); !gsi_end_p (psi); gsi_next (&psi))
{
phi = gsi_stmt (psi);
phi = psi.phi ();
if (PHI_ARG_DEF_FROM_EDGE (phi, exit) == var)
return PHI_RESULT (phi);
}
@ -3349,11 +3357,12 @@ scev_const_prop (void)
{
basic_block bb;
tree name, type, ev;
gimple phi, ass;
gphi *phi;
gassign *ass;
struct loop *loop, *ex_loop;
bitmap ssa_names_to_remove = NULL;
unsigned i;
gimple_stmt_iterator psi;
gphi_iterator psi;
if (number_of_loops (cfun) <= 1)
return 0;
@ -3364,7 +3373,7 @@ scev_const_prop (void)
for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
{
phi = gsi_stmt (psi);
phi = psi.phi ();
name = PHI_RESULT (phi);
if (virtual_operand_p (name))
@ -3402,7 +3411,7 @@ scev_const_prop (void)
{
gimple_stmt_iterator psi;
name = ssa_name (i);
phi = SSA_NAME_DEF_STMT (name);
phi = as_a <gphi *> (SSA_NAME_DEF_STMT (name));
gcc_assert (gimple_code (phi) == GIMPLE_PHI);
psi = gsi_for_stmt (phi);
@ -3440,7 +3449,7 @@ scev_const_prop (void)
for (psi = gsi_start_phis (exit->dest); !gsi_end_p (psi); )
{
phi = gsi_stmt (psi);
phi = psi.phi ();
rslt = PHI_RESULT (phi);
def = PHI_ARG_DEF_FROM_EDGE (phi, exit);
if (virtual_operand_p (def))

View File

@ -22,7 +22,7 @@ along with GCC; see the file COPYING3. If not see
#define GCC_TREE_SCALAR_EVOLUTION_H
extern tree number_of_latch_executions (struct loop *);
extern gimple get_loop_exit_condition (const struct loop *);
extern gcond *get_loop_exit_condition (const struct loop *);
extern void scev_initialize (void);
extern bool scev_initialized_p (void);

View File

@ -1293,7 +1293,7 @@ scan_function (void)
switch (gimple_code (stmt))
{
case GIMPLE_RETURN:
t = gimple_return_retval (stmt);
t = gimple_return_retval (as_a <greturn *> (stmt));
if (t != NULL_TREE)
ret |= build_access_from_expr (t, stmt, false);
if (final_bbs)
@ -1338,21 +1338,24 @@ scan_function (void)
break;
case GIMPLE_ASM:
walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
asm_visit_addr);
if (final_bbs)
bitmap_set_bit (final_bbs, bb->index);
{
gasm *asm_stmt = as_a <gasm *> (stmt);
walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
asm_visit_addr);
if (final_bbs)
bitmap_set_bit (final_bbs, bb->index);
for (i = 0; i < gimple_asm_ninputs (stmt); i++)
{
t = TREE_VALUE (gimple_asm_input_op (stmt, i));
ret |= build_access_from_expr (t, stmt, false);
}
for (i = 0; i < gimple_asm_noutputs (stmt); i++)
{
t = TREE_VALUE (gimple_asm_output_op (stmt, i));
ret |= build_access_from_expr (t, stmt, true);
}
for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
{
t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
ret |= build_access_from_expr (t, asm_stmt, false);
}
for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
{
t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
ret |= build_access_from_expr (t, asm_stmt, true);
}
}
break;
default:
@ -1541,7 +1544,7 @@ build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
offset such as array[var_index]. */
if (!base)
{
gimple stmt;
gassign *stmt;
tree tmp, addr;
gcc_checking_assert (gsi);
@ -2625,7 +2628,7 @@ generate_subtree_copies (struct access *access, tree agg,
|| access->offset + access->size > start_offset))
{
tree expr, repl = get_access_replacement (access);
gimple stmt;
gassign *stmt;
expr = build_ref_for_model (loc, agg, access->offset - top_offset,
access, gsi, insert_after);
@ -2663,7 +2666,7 @@ generate_subtree_copies (struct access *access, tree agg,
&& (chunk_size == 0
|| access->offset + access->size > start_offset))
{
gimple ds;
gdebug *ds;
tree drhs = build_debug_ref_for_model (loc, agg,
access->offset - top_offset,
access);
@ -2699,7 +2702,7 @@ init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
if (access->grp_to_be_replaced)
{
gimple stmt;
gassign *stmt;
stmt = gimple_build_assign (get_access_replacement (access),
build_zero_cst (access->type));
@ -2712,9 +2715,10 @@ init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
}
else if (access->grp_to_be_debug_replaced)
{
gimple ds = gimple_build_debug_bind (get_access_replacement (access),
build_zero_cst (access->type),
gsi_stmt (*gsi));
gdebug *ds
= gimple_build_debug_bind (get_access_replacement (access),
build_zero_cst (access->type),
gsi_stmt (*gsi));
if (insert_after)
gsi_insert_after (gsi, ds, GSI_NEW_STMT);
else
@ -2808,7 +2812,7 @@ sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
if (write)
{
gimple stmt;
gassign *stmt;
if (access->grp_partial_lhs)
ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
@ -2819,7 +2823,7 @@ sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
}
else
{
gimple stmt;
gassign *stmt;
if (access->grp_partial_lhs)
repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
@ -2835,9 +2839,9 @@ sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
}
else if (write && access->grp_to_be_debug_replaced)
{
gimple ds = gimple_build_debug_bind (get_access_replacement (access),
NULL_TREE,
gsi_stmt (*gsi));
gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
NULL_TREE,
gsi_stmt (*gsi));
gsi_insert_after (gsi, ds, GSI_NEW_STMT);
}
@ -2937,7 +2941,7 @@ load_assign_lhs_subreplacements (struct access *lacc,
if (lacc->grp_to_be_replaced)
{
struct access *racc;
gimple stmt;
gassign *stmt;
tree rhs;
racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
@ -2987,7 +2991,7 @@ load_assign_lhs_subreplacements (struct access *lacc,
if (lacc && lacc->grp_to_be_debug_replaced)
{
gimple ds;
gdebug *ds;
tree drhs;
struct access *racc = find_access_in_subtree (sad->top_racc,
offset,
@ -3229,7 +3233,7 @@ sra_modify_assign (gimple stmt, gimple_stmt_iterator *gsi)
drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
TREE_TYPE (dlhs), drhs);
}
gimple ds = gimple_build_debug_bind (dlhs, drhs, stmt);
gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
gsi_insert_before (gsi, ds, GSI_SAME_STMT);
}
@ -3399,7 +3403,7 @@ sra_modify_function_body (void)
switch (gimple_code (stmt))
{
case GIMPLE_RETURN:
t = gimple_return_retval_ptr (stmt);
t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
if (*t != NULL_TREE)
modified |= sra_modify_expr (t, &gsi, false);
break;
@ -3426,16 +3430,19 @@ sra_modify_function_body (void)
break;
case GIMPLE_ASM:
for (i = 0; i < gimple_asm_ninputs (stmt); i++)
{
t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
modified |= sra_modify_expr (t, &gsi, false);
}
for (i = 0; i < gimple_asm_noutputs (stmt); i++)
{
t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
modified |= sra_modify_expr (t, &gsi, true);
}
{
gasm *asm_stmt = as_a <gasm *> (stmt);
for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
{
t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
modified |= sra_modify_expr (t, &gsi, false);
}
for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
{
t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
modified |= sra_modify_expr (t, &gsi, true);
}
}
break;
default:
@ -4563,7 +4570,7 @@ replace_removed_params_ssa_names (gimple stmt,
else if (is_gimple_call (stmt))
gimple_call_set_lhs (stmt, name);
else
gimple_phi_set_result (stmt, name);
gimple_phi_set_result (as_a <gphi *> (stmt), name);
replace_uses_by (lhs, name);
release_ssa_name (lhs);
@ -4659,7 +4666,7 @@ ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
switch (gimple_code (stmt))
{
case GIMPLE_RETURN:
t = gimple_return_retval_ptr (stmt);
t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
if (*t != NULL_TREE)
modified |= ipa_modify_expr (t, true, adjustments);
break;
@ -4687,16 +4694,19 @@ ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
break;
case GIMPLE_ASM:
for (i = 0; i < gimple_asm_ninputs (stmt); i++)
{
t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
modified |= ipa_modify_expr (t, true, adjustments);
}
for (i = 0; i < gimple_asm_noutputs (stmt); i++)
{
t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
modified |= ipa_modify_expr (t, false, adjustments);
}
{
gasm *asm_stmt = as_a <gasm *> (stmt);
for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
{
t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
modified |= ipa_modify_expr (t, true, adjustments);
}
for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
{
t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
modified |= ipa_modify_expr (t, false, adjustments);
}
}
break;
default:
@ -4736,7 +4746,8 @@ sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
{
struct ipa_parm_adjustment *adj;
imm_use_iterator ui;
gimple stmt, def_temp;
gimple stmt;
gdebug *def_temp;
tree name, vexpr, copy = NULL_TREE;
use_operand_p use_p;
@ -4887,9 +4898,10 @@ convert_callers (struct cgraph_node *node, tree old_decl,
for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
gcall *stmt;
tree call_fndecl;
if (gimple_code (stmt) != GIMPLE_CALL)
stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
if (!stmt)
continue;
call_fndecl = gimple_call_fndecl (stmt);
if (call_fndecl == old_decl)

View File

@ -1490,7 +1490,7 @@ refs_output_dependent_p (tree store1, tree store2)
otherwise return false. */
static bool
ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
{
tree base, callee;
unsigned i;
@ -1803,7 +1803,7 @@ process_args:
}
static bool
ref_maybe_used_by_call_p (gimple call, ao_ref *ref)
ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
{
bool res;
res = ref_maybe_used_by_call_p_1 (call, ref);
@ -1838,10 +1838,10 @@ ref_maybe_used_by_stmt_p (gimple stmt, ao_ref *ref)
return refs_may_alias_p (rhs, ref);
}
else if (is_gimple_call (stmt))
return ref_maybe_used_by_call_p (stmt, ref);
else if (gimple_code (stmt) == GIMPLE_RETURN)
return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
tree retval = gimple_return_retval (stmt);
tree retval = gimple_return_retval (return_stmt);
if (retval
&& TREE_CODE (retval) != SSA_NAME
&& !is_gimple_min_invariant (retval)
@ -1874,7 +1874,7 @@ ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
return true, otherwise return false. */
bool
call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
{
tree base;
tree callee;
@ -2147,7 +2147,7 @@ call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
return true, otherwise return false. */
bool
call_may_clobber_ref_p (gimple call, tree ref)
call_may_clobber_ref_p (gcall *call, tree ref)
{
bool res;
ao_ref r;
@ -2179,7 +2179,7 @@ stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
return true;
}
return call_may_clobber_ref_p_1 (stmt, ref);
return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
}
else if (gimple_assign_single_p (stmt))
{

View File

@ -112,8 +112,8 @@ extern bool ref_maybe_used_by_stmt_p (gimple, ao_ref *);
extern bool stmt_may_clobber_global_p (gimple);
extern bool stmt_may_clobber_ref_p (gimple, tree);
extern bool stmt_may_clobber_ref_p_1 (gimple, ao_ref *);
extern bool call_may_clobber_ref_p (gimple, tree);
extern bool call_may_clobber_ref_p_1 (gimple, ao_ref *);
extern bool call_may_clobber_ref_p (gcall *, tree);
extern bool call_may_clobber_ref_p_1 (gcall *, ao_ref *);
extern bool stmt_kills_ref_p (gimple, tree);
extern bool stmt_kills_ref_p (gimple, ao_ref *);
extern tree get_continuation_for_phi (gimple, ao_ref *,

View File

@ -839,11 +839,11 @@ ccp_initialize (void)
except for phi nodes for virtual operands when we do not do store ccp. */
FOR_EACH_BB_FN (bb, cfun)
{
gimple_stmt_iterator i;
gphi_iterator i;
for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
{
gimple phi = gsi_stmt (i);
gphi *phi = i.phi ();
if (virtual_operand_p (gimple_phi_result (phi)))
prop_set_simulate_again (phi, false);
@ -1028,7 +1028,7 @@ ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
of the PHI node that are incoming via executable edges. */
static enum ssa_prop_result
ccp_visit_phi_node (gimple phi)
ccp_visit_phi_node (gphi *phi)
{
unsigned i;
ccp_prop_value_t *old_val, new_val;
@ -1174,7 +1174,7 @@ ccp_fold (gimple stmt)
case GIMPLE_SWITCH:
{
/* Return the constant switch index. */
return valueize_op (gimple_switch_index (stmt));
return valueize_op (gimple_switch_index (as_a <gswitch *> (stmt)));
}
case GIMPLE_ASSIGN:
@ -1724,7 +1724,7 @@ evaluate_stmt (gimple stmt)
simplified = gimple_assign_rhs1 (stmt);
}
else if (code == GIMPLE_SWITCH)
simplified = gimple_switch_index (stmt);
simplified = gimple_switch_index (as_a <gswitch *> (stmt));
else
/* These cannot satisfy is_gimple_min_invariant without folding. */
gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
@ -1935,7 +1935,8 @@ static void
insert_clobber_before_stack_restore (tree saved_val, tree var,
gimple_htab **visited)
{
gimple stmt, clobber_stmt;
gimple stmt;
gassign *clobber_stmt;
tree clobber;
imm_use_iterator iter;
gimple_stmt_iterator i;
@ -2094,6 +2095,7 @@ ccp_fold_stmt (gimple_stmt_iterator *gsi)
{
case GIMPLE_COND:
{
gcond *cond_stmt = as_a <gcond *> (stmt);
ccp_prop_value_t val;
/* Statement evaluation will handle type mismatches in constants
more gracefully than the final propagation. This allows us to
@ -2113,9 +2115,9 @@ ccp_fold_stmt (gimple_stmt_iterator *gsi)
}
if (integer_zerop (val.value))
gimple_cond_make_false (stmt);
gimple_cond_make_false (cond_stmt);
else
gimple_cond_make_true (stmt);
gimple_cond_make_true (cond_stmt);
return true;
}
@ -2600,10 +2602,10 @@ optimize_unreachable (gimple_stmt_iterator i)
if (is_gimple_debug (stmt))
continue;
if (gimple_code (stmt) == GIMPLE_LABEL)
if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
{
/* Verify we do not need to preserve the label. */
if (FORCED_LABEL (gimple_label_label (stmt)))
if (FORCED_LABEL (gimple_label_label (label_stmt)))
return false;
continue;
@ -2624,15 +2626,15 @@ optimize_unreachable (gimple_stmt_iterator i)
continue;
stmt = gsi_stmt (gsi);
if (gimple_code (stmt) == GIMPLE_COND)
if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
{
if (e->flags & EDGE_TRUE_VALUE)
gimple_cond_make_false (stmt);
gimple_cond_make_false (cond_stmt);
else if (e->flags & EDGE_FALSE_VALUE)
gimple_cond_make_true (stmt);
gimple_cond_make_true (cond_stmt);
else
gcc_unreachable ();
update_stmt (stmt);
update_stmt (cond_stmt);
}
else
{

View File

@ -834,12 +834,11 @@ build_ssa_conflict_graph (tree_live_info_p liveinfo)
FOR_EACH_BB_FN (bb, cfun)
{
gimple_stmt_iterator gsi;
/* Start with live on exit temporaries. */
live_track_init (live, live_on_exit (liveinfo, bb));
for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
gsi_prev (&gsi))
{
tree var;
gimple stmt = gsi_stmt (gsi);
@ -876,9 +875,10 @@ build_ssa_conflict_graph (tree_live_info_p liveinfo)
There must be a conflict recorded between the result of the PHI and
any variables that are live. Otherwise the out-of-ssa translation
may create incorrect code. */
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
tree result = PHI_RESULT (phi);
if (live_track_live_p (live, result))
live_track_process_def (live, result, graph);
@ -944,9 +944,11 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
{
tree arg;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gpi = gsi_start_phis (bb);
!gsi_end_p (gpi);
gsi_next (&gpi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gpi.phi ();
size_t i;
int ver;
tree res;
@ -1018,15 +1020,16 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
case GIMPLE_ASM:
{
gasm *asm_stmt = as_a <gasm *> (stmt);
unsigned long noutputs, i;
unsigned long ninputs;
tree *outputs, link;
noutputs = gimple_asm_noutputs (stmt);
ninputs = gimple_asm_ninputs (stmt);
noutputs = gimple_asm_noutputs (asm_stmt);
ninputs = gimple_asm_ninputs (asm_stmt);
outputs = (tree *) alloca (noutputs * sizeof (tree));
for (i = 0; i < noutputs; ++i)
{
link = gimple_asm_output_op (stmt, i);
link = gimple_asm_output_op (asm_stmt, i);
outputs[i] = TREE_VALUE (link);
}
@ -1037,7 +1040,7 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
char *end;
unsigned long match;
link = gimple_asm_input_op (stmt, i);
link = gimple_asm_input_op (asm_stmt, i);
constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
input = TREE_VALUE (link);
@ -1199,11 +1202,11 @@ coalesce_partitions (var_map map, ssa_conflicts_p graph, coalesce_list_p cl,
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->flags & EDGE_ABNORMAL)
{
gimple_stmt_iterator gsi;
gphi_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
tree res = PHI_RESULT (phi);
tree arg = PHI_ARG_DEF (phi, e->dest_idx);
int v1 = SSA_NAME_VERSION (res);

View File

@ -340,7 +340,7 @@ copy_prop_visit_stmt (gimple stmt, edge *taken_edge_p, tree *result_p)
set it to be the value of the LHS of PHI. */
static enum ssa_prop_result
copy_prop_visit_phi_node (gimple phi)
copy_prop_visit_phi_node (gphi *phi)
{
enum ssa_prop_result retval;
unsigned i;
@ -461,9 +461,8 @@ init_copy_prop (void)
FOR_EACH_BB_FN (bb, cfun)
{
gimple_stmt_iterator si;
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
gsi_next (&si))
{
gimple stmt = gsi_stmt (si);
ssa_op_iter iter;
@ -487,9 +486,10 @@ init_copy_prop (void)
set_copy_of_val (def, def);
}
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
gsi_next (&si))
{
gimple phi = gsi_stmt (si);
gphi *phi = si.phi ();
tree def;
def = gimple_phi_result (phi);

View File

@ -347,9 +347,8 @@ pass_rename_ssa_copies::execute (function *fun)
{
var_map map;
basic_block bb;
gimple_stmt_iterator gsi;
tree var, part_var;
gimple stmt, phi;
gimple stmt;
unsigned x;
FILE *debug;
@ -365,7 +364,8 @@ pass_rename_ssa_copies::execute (function *fun)
FOR_EACH_BB_FN (bb, fun)
{
/* Scan for real copies. */
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
stmt = gsi_stmt (gsi);
if (gimple_assign_ssa_name_copy_p (stmt))
@ -381,12 +381,12 @@ pass_rename_ssa_copies::execute (function *fun)
FOR_EACH_BB_FN (bb, fun)
{
/* Treat PHI nodes as copies between the result and each argument. */
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
size_t i;
tree res;
phi = gsi_stmt (gsi);
gphi *phi = gsi.phi ();
res = gimple_phi_result (phi);
/* Do not process virtual SSA_NAMES. */

Some files were not shown because too many files have changed in this diff Show More