[multiple changes]

2006-12-11  Diego Novillo  <dnovillo@redhat.com>

	* doc/tree-ssa.texi: Update documentation for virtual operands
	and the use of push_stmt_changes/pop_stmt_changes.
	* doc/invoke.texi: Remove documentation for params
	global-var-threshold.
	Update documentation on max-aliased-vops.

	* tree-into-ssa.c: Cleanup comments, variables and
	spacing in various functions.
	(regs_to_rename): Declare.
	(mem_syms_to_rename): Declare.
	(dump_update_ssa): Declare.
	(debug_update_ssa): Declare.
	(dump_names_replaced_by): Declare.
	(debug_names_replaced_by): Declare.
	(dump_def_blocks): Declare.
	(debug_def_blocks): Declare.
	(dump_defs_stack): Declare.
	(debug_defs_stack): Declare.
	(dump_currdefs): Declare.
	(debug_currdefs): Declare.
	(mark_def_sites): Do not handle virtual operands.
	(compute_idf): Rename from find_idf.  Update users.
	(register_new_def): Make local.  Convert second argument
	to 'tree'.
	Use BLOCK_DEFS_STACK directly.
	If pushing a non-register, also push the underlying
	symbol.
	(rewrite_stmt): Do not handle virtual operands.
	(dump_tree_ssa): Call dump_def_blocks, dump_defs_stack,
	dump_currdefs and dump_tree_ssa_stats.
	(dump_tree_ssa_stats): Also dump REPL_TBL.
	(replace_use): Remove.  Update all users to call SET_USE
	instead.
	(rewrite_blocks): Move code to free memory to
	fini_ssa_renamer.
	(mark_def_site_blocks): Move initialization code to
	init_ssa_renamer.
	(init_ssa_renamer): New.
	(fini_ssa_renamer): New.
	(rewrite_into_ssa): Call them.
	(prepare_block_for_update): Process SSA_OP_ALL_USES first
	and SSA_OP_ALL_DEFS later.  Do not process virtual
	operands separately.
	(dump_update_ssa): Call dump_decl_set.
	(init_update_ssa): Initialize regs_to_rename and
	mem_syms_to_rename.
	Call init_ssa_renamer.
	(delete_update_ssa): Call fini_ssa_renamer.
	Free blocks_with_phis_to_rewrite.
	(mark_sym_for_renaming): If the variable has
	sub-variables, also mark them.
	If the variable belongs to a partition, also mark it.
	(mark_set_for_renaming): Call mark_sym_for_renaming on
	every symbol in the set.
	(switch_virtuals_to_full_rewrite): Call
	mark_set_for_renaming.
	(update_ssa): Separate syms_to_rename into regs_to_rename
	and mem_syms_to_rename.

	* tree-dump.c (dump_options): Add TDF_MEMSYMS.
	* tree-pretty-print.c (debug_generic_expr): Add TDF_MEMSYMS.
	(debug_generic_stmt): Likewise.
	(debug_tree_chain): Likewise.
	(dump_symbols): New.
	(dump_generic_node): Check for TDF_MEMSYMS.
	Handle MEMORY_PARTITION_TAG.
	If the statement references memory and TDF_MEMSYMS is
	given, call dump_symbols.
	Indicate default names with (D).
	(dump_vops): Update for new virtual operator format.

	* tree.c (init_ttree): Add MEMORY_PARTITION_TAG to
	tree_contains_struct.
	(tree_code_size): Handle MEMORY_PARTITION_TAG.
	(tree_node_structure): Likewise.
	(needs_to_live_in_memory): Handle SSA names.
	* tree.h (MTAG_P): Likewise.
	(struct tree_memory_partition_tag): Declare.
	(MPT_SYMBOLS): Define.
	(union tree_node): Add field 'mpt'.
	* treestruct.def (TS_MEMORY_PARTITION_TAG): Define.
	* tree.def (MEMORY_PARTITION_TAG): Define.

	* tree-pass.h (TDF_MEMSYMS): Define.

	* params.h (GLOBAL_VAR_THRESHOLD): Remove.

	* tree-ssa-alias.c: Include pointer-set.h
	(struct alias_map_d): Remove fields total_alias_vops,
	grouped_p and may_aliases.  Update all users.
	(struct mp_info_def): Declare.
	(mp_info_t): New type.
	(get_smt_for): Rename from get_tmt_for.  Update all
	users.
	(add_may_alias): Add argument ALREADY_ADDED.  If given,
	use it to avoid adding duplicate entries to alias sets.
	(replace_may_alias): Remove.  Update all users.
	(total_alias_vops_cmp): Remove.  Update all users.
	(group_aliases_into): Remove.  Update all users.
	(tree_pointer_compare): Remove.  Update all users.
	(compact_name_tags): Remove.  Update all users.
	(group_aliases): Remove.  Update all users.
	(mark_non_addressable): Move from tree-flow-inline.h.
	Remove the symbol from the partition holding it, if
	needed.
	(dump_mp_info): New.
	(debug_mp_info): New.
	(sort_mp_info): New.
	(create_partition_for): New.
	(rewrite_alias_set_for): New.
	(compute_memory_partitions): New.
	(compute_may_aliases): Call it.
	(init_alias_info): If computing aliases for the first
	time, mark every memory symbol for renaming.
	(have_common_aliases_p): New.
	(compute_flow_insensitive_aliasing): Call it.
	(setup_pointers_and_addressables): Do not cache
	num_referenced_vars.
	For register promoted symbols, mark their former
	partition for renaming.
	(maybe_create_global_var): Only create .GLOBAL_VAR if
	there are no call-clobbered variables and a mix of pure
	and non-pure functions were found.
	(may_alias_p): Tidy comments.
	(create_tag_raw): Remove unused variable new_type.
	(dump_alias_info): call dump_memory_partitions.
	(dump_points_to_info_for): Call dump_decl_set.
	(may_be_aliased): Tidy comments and formatting.

	* timevar.def (TV_MEMORY_PARTITIONING): Define.
	* tree-vectorizer.c (vect_memsyms_to_rename): Rename from
	vect_vnames_to_rename.  Set DECL_UIDs instead of SSA name
	versions in it.
	(slpeel_update_phi_nodes_for_guard1): Ignore memory PHIs.
	* tree-vect-transform.c (vect_transform_loop): Call
	mark_set_for_renaming with vect_memsyms_to_rename.
	* tree-flow-inline.h (zero_imm_uses_p): New.
	(memory_partition): New.
	(set_memory_partition): New.
	(factoring_name_p): New.
	(symbol_mem_tag): New.  Update every function that used
	to access the annotation directly.
	(set_symbol_mem_tag): Likewise.

	* tree-ssa-copy.c (may_propagate_copy): Allow copies
	between a partition and a symbol as long as the symbol
	belongs to the partition.
	(merge_alias_info): Ignore merge requests when memory
	partitions are involved.

	* tree-ssa.c (verify_ssa_name): Check that default
	definitions have empty defining statements.
	(verify_use): Remove argument IS_VIRTUAL.
	Don't call verify_ssa_name.
	(verify_phi_args): Call verify_ssa_name.
	(verify_flow_insensitive_alias_info): Handle MPTs.
	(verify_flow_sensitive_alias_info): Likewise.
	(verify_name_tags): Likewise.
	(verify_call_clobbering): Likewise.
	(verify_ssa): Check for VOPs only after aliasing
	information is available.
	Check virtuals and real operands separately.
	Call verify_ssa_name on every operand.
	(stmt_references_memory_p): Move to tree-ssa-operands.c.
	(walk_use_def_chains_1): Guard against NULL PHI
	arguments.

	* tree-ssa-operands.c (stmt_references_memory_p): Move from
	tree-ssa.c.
	(get_mpt_for): New.
	(dump_memory_partitions): New.
	(debug_memory_partitions): New.

	* tree-flow.h (struct var_ann_d): Add field mpt.
	(struct stmt_ann_d): Add bitfield references_memory.
	* Makefile.in (tree-ssa-structalias.o): Include
	pointer-set.h
	(tree-ssa-alias.o): Likewise.
	* tree-ssa-structalias.c: (update_alias_info): Use
	STORED_SYMS to determine which variables are being
	written to by the store operation.
	* tree-ssa-structalias.h (struct alias_info)
	<total_alias_vops>: Remove.  Update all users.
	<written_vars>: Change to a pointer set.  Update all
	users.
	<dereferenced_ptrs_store>: Likewise.
	<dereferenced_ptrs_load>: Likewise.
	(NUM_REFERENCES): Remove.  Update all users.
	(NUM_REFERENCES_CLEAR): Remove.  Update all users.
	(NUM_REFERENCES_INC): Remove.  Update all users.
	(NUM_REFERENCES_SET): Remove.  Update all users.

	* params.def (PARAM_GLOBAL_VAR_THRESHOLD): Remove.
	Update all users.
	(PARAM_MAX_ALIASED_VOPS): Set to 10.
	* tree-ssanames.c (make_ssa_name): Initialize
	SSA_NAME_IS_DEFAULT_DEF to 0.

2006-12-11  Aldy Hernandez  <aldyh@redhat.com>

	* tree-ssa-dse.c (aggregate_vardecl_d): New.
	(dse_global_data): Add aggregate_vardecl field.
	(dse_possible_dead_store_p): New.
	Add prev_defvar variable.
	Allow immediate uses and previous immediate uses to differ
	if they are setting different parts of the whole.
	(get_aggregate_vardecl): New.
	(dse_record_partial_aggregate_store): New.
	(dse_whole_aggregate_clobbered_p): New.
	(dse_partial_kill_p): New.
	(dse_optimize_stmt): Abstract code checking a possible dead store
	into new function dse_possible_dead_store_p().
	Call dse_maybe_record_aggregate_store().
	When checking whether a STMT and its USE_STMT refer to the
	same memory address, check also for partial kills that clobber
	the whole.
	Move some variable definitions to the block where they are used.
	(aggregate_vardecl_hash): New.
	(aggregate_vardecl_eq): New.
	(aggregate_vardecl_free): New.
	(aggregate_whole_store_p): New.
	(tree_ssa_dse): Initialize and free aggregate_vardecl.
	Mark which aggregate stores we care about.

2006-12-11  Andrew Macleod  <amacleod@redhat.com>

	* tree-ssa-operands.h (struct vuse_element_d): Declare.
	(vuse_element_t): Declare.
	(struct vuse_vec_d): Declare.
	(vuse_vec_p): Declare.
	(VUSE_VECT_NUM_ELEM): Define.
	(VUSE_VECT_ELEMENT_NC): Define.
	(VUSE_ELEMENT_PTR_NC): Define.
	(VUSE_ELEMENT_VAR_NC): Define.
	(VUSE_VECT_ELEMENT): Define.
	(VUSE_ELEMENT_PTR): Define.
	(VUSE_ELEMENT_VAR): Define.
	(struct maydef_optype_d) <use_var>: Remove.
	<use_ptr>: Remove.
	<usev>: Add.
	(struct vuse_optype_d) <kill_var>: Remove.
	<use_ptr>: Remove.
	<usev>: Add.
	(struct mustdef_optype_d) <kill_var>: Remove.
	<use_ptr>: Remove.
	<usev>: Add.
	(VUSE_OP_PTR): Add argument.  Use VUSE_ELEMENT_PTR.
	(VUSE_OP): Add argument.  Use VUSE_ELEMENT_PTR.
	(VUSE_NUM): Define.
	(VUSE_VECT): Define.
	(MAYDEF_OP_PTR): Add argument.  Use VUSE_OP_PTR.
	(MAYDEF_OP): Add argument.  Use VUSE_OP.
	(MAYDEF_NUM): Define.
	(MAYDEF_VECT): Define.
	(MUSTDEF_KILL_PTR): Use VUSE_OP_PTR.
	(MUSTDEF_KILL): Use VUSE_OP.
	(MUSTDEF_NUM): Define.
	(MUSTDEF_VECT): Define.
	(realloc_maydef): Declare.
	(realloc_vuse): Declare.
	(struct ssa_operand_iterator_d) <vuse_index>: Add.
	<mayuse_index>: Add.
	(LOADED_SYMS): Define.
	(STORED_SYMS): Define.
	(FOR_EACH_SSA_MUSTDEF_OPERAND): Call op_iter_next_mustdef.
	* tree-into-ssa.c: Adapt for multi-operand V_MAY_DEF and VUSE
	operators.
	* tree-pretty-print.c: Likewise.
	* tree-ssa-dse.c: Likewise.
	* tree-flow-inline.h: Likewise.
	(op_iter_next_mustdef): New.
	* tree-ssa-operands.c: Likewise.
	(ALLOC_OPTYPE): Remove.
	Update all users.
	(alloc_def): New.
	(alloc_use): New.
	(alloc_maydef): New.
	(alloc_vuse): New.
	(alloc_mustdef): New.
	(realloc_maydef): New.
	(realloc_vuse): New.

2006-12-11  Aldy Hernandez  <aldyh@redhat.com>

	* tree-ssa-operands.c: Remove build_v_must_defs.
	(init_ssa_operands): Delete build_v_must_defs.
	(finalize_ssa_v_must_def_ops): Remove.
	(finalize_ssa_v_must_defs): Remove.
	(finalize_ssa_stmt_operands): Do not call
	finalize_ssa_v_must_defs.
	(start_ssa_stmt_operands): Do not check build_v_must_defs.
	(append_v_must_def): Delete.
	(copy_virtual_operands): Do not copy V_MUST_DEFs.
	(get_modify_expr_operands): Remove reference to V_MUST_DEF from
	comment.  Remove opf_kill_def.
	(build_ssa_operands): Remove references to v_must_defs.
	(copy_virtual_operands): Same.
	(copy_virtual_operands): Same.
	(fini_ssa_operands): Same.
	(free_ssa_operands): Same.
	(add_mustdef_op): Remove.
	Remove mustdef_optype_p.
	(alloc_mustdef): Remove.
	Remove references to V_MUST_DEFs in comment at top of file.
	(get_expr_operands): Remove opf_kill_def.
	(opf_kill_def): Remove.
	(add_virtual_operand): Remove opf_kill_def.
	(get_indirect_ref_operands): Same.
	(get_tmr_operands): Same.

	* tree-vectorizer.c (rename_variables_in_bb): Remove
	SSA_OP_ALL_KILLS.

	* tree-ssa-loop-manip.c (find_uses_to_rename_stmt): Remove
	SSA_OP_ALL_KILLS.
	(check_loop_closed_ssa_stmt): Same.

	* tree-ssa.c (verify_def): Remove V_MUST_DEF from comment.
	(verify_use): Same.
	(verify_ssa): Remove V_MUST_DEFs traces.
	(verify_ssa): Remove SSA_OP_ALL_KILLS.

	* tree-into-ssa.c (mark_def_sites): Change SSA_OP_VMUSTDEF to
	SSA_OP_VMAYDEF.
	(rewrite_update_stmt): Remove SSA_OP_VIRTUAL_KILLS.
	(rewrite_stmt): Remove SSA_OP_ALL_KILLS.

	* tree-ssa-operands.h (struct stmt_operands_d): Remove V_MUST_DEF
	references.
	(MUSTDEF_OPS): Remove.
	(SSA_OP_VMUSTDEF): Remove.
	(FOR_EACH_SSA_MUSTDEF_OPERAND): Remove.
	(struct mustdef_optype_d): Remove.
	Remove mustdef_optype_p.
	(struct stmt_operands_d): Remove mustdef_ops.
	(ssa_operand_iterator_d): Remove mustdefs and mustkills.
	(SSA_OP_VIRTUAL_DEFS): Remove SSA_OP_VMUSTDEF.
	(MUSTDEF_RESULT_PTR): Remove.
	(MUSTDEF_RESULT): Remove.
	(MUSTDEF_KILL_PTR): Remove.
	(MUSTDEF_KILL): Remove.
	(MUSTDEF_NUM): Remove.
	(MUSTDEF_VECT): Remove.
	(SSA_OP_VIRTUAL_KILLS): Remove.
	(SSA_OP_ALL_VIRTUALS): Remove SSA_OP_VIRTUAL_KILLS.
	(SSA_OP_VMUSTKILL): Remove.
	(SSA_OP_ALL_KILLS): Remove.
	(SSA_OP_ALL_OPERANDS): Remove SSA_OP_ALL_KILLS.

	* tree-flow-inline.h (op_iter_init_def): Remove
	SSA_OP_VIRTUAL_KILLS.
	(delink_stmt_imm_use): Remove SSA_OP_ALL_KILLS.

	* tree-ssa-pre.c (compute_rvuse_and_antic_safe): Remove
	SSA_OP_VIRTUAL_KILLS.

	* tree-ssa-loop-im.c (determine_max_movement): Remove
	SSA_OP_VIRTUAL_KILLS.
	(gather_mem_refs_stmt): Same.
	(gather_mem_refs_stmt): Same.

	* tree-ssa-dce.c (mark_really_necessary_kill_operand_phis): Delete.
	(perform_tree_ssa_dce): Remove call to
	mark_really_necessary_kill_operand_phis.

	* tree-flow-inline.h (op_iter_init): Remove setting of mustdefs
	and mustkills.
	(op_iter_next_use): Do not check mustkills.
	(op_iter_next_def): Do not check mustdefs.
	(op_iter_next_tree): Do not check mustkills or mustdefs.
	(clear_and_done_ssa_iter): Do not set mustdefs or mustkills.
	(op_iter_next_maymustdef): Do not check mustkills.
	(op_iter_init_must_and_may_def): Remove SSA_OP_VMUSTKILL.
	(op_iter_init_mustdef): Remove.

	* tree-ssa-live.c (create_ssa_var_map): Change SSA_OP_VMUSTDEF to
	SSA_OP_VMAYDEF.

	* tree-ssa-dse.c (dse_optimize_stmt): Remove SSA_OP_VMUSTDEF.

	* tree-ssa-ccp.c: Remove V_MUST_DEF traces from comments.
	(visit_assignment): Same.

	* tree-ssa-copy.c (copy_prop_visit_assignment): Same.

	* tree-sra.c (mark_all_v_defs_1): Remove V_MUST_DEF from comment.

	* tree-outof-ssa.c (check_replaceable): Remove SSA_OP_VMUSTDEF.

	* tree-pretty-print.c (dump_vops): Remove printing of V_MUST_DEF.
	Remove kill_p variable.

	* tree-dfa.c (struct dfa_stats_d): Remove num_v_must_defs.
	(dump_dfa_stats): Remove code related to V_MUST_DEFs.
	(collect_dfa_stats_r): Do not set num_v_must_defs.
	(mark_new_vars_to_rename): Remove v_must_defs_{before,after}
	code.

	* tree-into-ssa.c (mark_def_sites): Change SSA_OP_VMUSTKILL to
	SSA_OP_VMAYUSE.

	* tree-ssa-pre.c (compute_rvuse_and_antic_safe): Remove
	SSA_OP_VMUSTDEF and SSA_OP_VMUSTKILL.

	* tree-ssa-propagate.c (stmt_makes_single_store): Remove
	SSA_OP_VMUSTDEF.

From-SVN: r119760
This commit is contained in:
Diego Novillo 2006-12-11 20:48:51 -05:00
parent 419cb3431b
commit 38635499e9
57 changed files with 3478 additions and 2168 deletions

View File

@ -1,3 +1,412 @@
2006-12-11 Diego Novillo <dnovillo@redhat.com>
* doc/tree-ssa.texi: Update documentation for virtual operands
and the use of push_stmt_changes/pop_stmt_changes.
* doc/invoke.texi: Remove documentation for params
global-var-threshold.
Update documentation on max-aliased-vops.
* tree-into-ssa.c: Cleanup comments, variables and
spacing in various functions.
(regs_to_rename): Declare.
(mem_syms_to_rename): Declare.
(dump_update_ssa): Declare.
(debug_update_ssa): Declare.
(dump_names_replaced_by): Declare.
(debug_names_replaced_by): Declare.
(dump_def_blocks): Declare.
(debug_def_blocks): Declare.
(dump_defs_stack): Declare.
(debug_defs_stack): Declare.
(dump_currdefs): Declare.
(debug_currdefs): Declare.
(mark_def_sites): Do not handle virtual operands.
(compute_idf): Rename from find_idf. Update users.
(register_new_def): Make local. Convert second argument
to 'tree'.
Use BLOCK_DEFS_STACK directly.
If pushing a non-register, also push the underlying
symbol.
(rewrite_stmt): Do not handle virtual operands.
(dump_tree_ssa): Call dump_def_blocks, dump_defs_stack,
dump_currdefs and dump_tree_ssa_stats.
(dump_tree_ssa_stats): Also dump REPL_TBL.
(replace_use): Remove. Update all users to call SET_USE
instead.
(rewrite_blocks): Move code to free memory to
fini_ssa_renamer.
(mark_def_site_blocks): Move initialization code to
init_ssa_renamer.
(init_ssa_renamer): New.
(fini_ssa_renamer): New.
(rewrite_into_ssa): Call them.
(prepare_block_for_update): Process SSA_OP_ALL_USES first
and SSA_OP_ALL_DEFS later. Do not process virtual
operands separately.
(dump_update_ssa): Call dump_decl_set.
(init_update_ssa): Initialize regs_to_rename and
mem_syms_to_rename.
Call init_ssa_renamer.
(delete_update_ssa): Call fini_ssa_renamer.
Free blocks_with_phis_to_rewrite.
(mark_sym_for_renaming): If the variable has
sub-variables, also mark them.
If the variable belongs to a partition, also mark it.
(mark_set_for_renaming): Call mark_sym_for_renaming on
every symbol in the set.
(switch_virtuals_to_full_rewrite): Call
mark_set_for_renaming.
(update_ssa): Separate syms_to_rename into regs_to_rename
and mem_syms_to_rename.
* tree-dump.c (dump_options): Add TDF_MEMSYMS.
* tree-pretty-print.c (debug_generic_expr): Add TDF_MEMSYMS.
(debug_generic_stmt): Likewise.
(debug_tree_chain): Likewise.
(dump_symbols): New.
(dump_generic_node): Check for TDF_MEMSYMS.
Handle MEMORY_PARTITION_TAG.
If the statement references memory and TDF_MEMSYMS is
given, call dump_symbols.
Indicate default names with (D).
(dump_vops): Update for new virtual operator format.
* tree.c (init_ttree): Add MEMORY_PARTITION_TAG to
tree_contains_struct.
(tree_code_size): Handle MEMORY_PARTITION_TAG.
(tree_node_structure): Likewise.
(needs_to_live_in_memory): Handle SSA names.
* tree.h (MTAG_P): Likewise.
(struct tree_memory_partition_tag): Declare.
(MPT_SYMBOLS): Define.
(union tree_node): Add field 'mpt'.
* treestruct.def (TS_MEMORY_PARTITION_TAG): Define.
* tree.def (MEMORY_PARTITION_TAG): Define.
* tree-pass.h (TDF_MEMSYMS): Define.
* params.h (GLOBAL_VAR_THRESHOLD): Remove.
* tree-ssa-alias.c: Include pointer-set.h
(struct alias_map_d): Remove fields total_alias_vops,
grouped_p and may_aliases. Update all users.
(struct mp_info_def): Declare.
(mp_info_t): New type.
(get_smt_for): Rename from get_tmt_for. Update all
users.
(add_may_alias): Add argument ALREADY_ADDED. If given,
use it to avoid adding duplicate entries to alias sets.
(replace_may_alias): Remove. Update all users.
(total_alias_vops_cmp): Remove. Update all users.
(group_aliases_into): Remove. Update all users.
(tree_pointer_compare): Remove. Update all users.
(compact_name_tags): Remove. Update all users.
(group_aliases): Remove. Update all users.
(mark_non_addressable): Move from tree-flow-inline.h.
Remove the symbol from the partition holding it, if
needed.
(dump_mp_info): New.
(debug_mp_info): New.
(sort_mp_info): New.
(create_partition_for): New.
(rewrite_alias_set_for): New.
(compute_memory_partitions): New.
(compute_may_aliases): Call it.
(init_alias_info): If computing aliases for the first
time, mark every memory symbol for renaming.
(have_common_aliases_p): New.
(compute_flow_insensitive_aliasing): Call it.
(setup_pointers_and_addressables): Do not cache
num_referenced_vars.
For register promoted symbols, mark their former
partition for renaming.
(maybe_create_global_var): Only create .GLOBAL_VAR if
there are no call-clobbered variables and a mix of pure
and non-pure functions were found.
(may_alias_p): Tidy comments.
(create_tag_raw): Remove unused variable new_type.
(dump_alias_info): call dump_memory_partitions.
(dump_points_to_info_for): Call dump_decl_set.
(may_be_aliased): Tidy comments and formatting.
* timevar.def (TV_MEMORY_PARTITIONING): Define.
* tree-vectorizer.c (vect_memsyms_to_rename): Rename from
vect_vnames_to_rename. Set DECL_UIDs instead of SSA name
versions in it.
(slpeel_update_phi_nodes_for_guard1): Ignore memory PHIs.
* tree-vect-transform.c (vect_transform_loop): Call
mark_set_for_renaming with vect_memsyms_to_rename.
* tree-flow-inline.h (zero_imm_uses_p): New.
(memory_partition): New.
(set_memory_partition): New.
(factoring_name_p): New.
(symbol_mem_tag): New. Update every function that used
to access the annotation directly.
(set_symbol_mem_tag): Likewise.
* tree-ssa-copy.c (may_propagate_copy): Allow copies
between a partition and a symbol as long as the symbol
belongs to the partition.
(merge_alias_info): Ignore merge requests when memory
partitions are involved.
* tree-ssa.c (verify_ssa_name): Check that default
definitions have empty defining statements.
(verify_use): Remove argument IS_VIRTUAL.
Don't call verify_ssa_name.
(verify_phi_args): Call verify_ssa_name.
(verify_flow_insensitive_alias_info): Handle MPTs.
(verify_flow_sensitive_alias_info): Likewise.
(verify_name_tags): Likewise.
(verify_call_clobbering): Likewise.
(verify_ssa): Check for VOPs only after aliasing
information is available.
Check virtuals and real operands separately.
Call verify_ssa_name on every operand.
(stmt_references_memory_p): Move to tree-ssa-operands.c.
(walk_use_def_chains_1): Guard against NULL PHI
arguments.
* tree-ssa-operands.c (stmt_references_memory_p): Move from
tree-ssa.c.
(get_mpt_for): New.
(dump_memory_partitions): New.
(debug_memory_partitions): New.
* tree-flow.h (struct var_ann_d): Add field mpt.
(struct stmt_ann_d): Add bitfield references_memory.
* Makefile.in (tree-ssa-structalias.o): Include
pointer-set.h
(tree-ssa-alias.o): Likewise.
* tree-ssa-structalias.c: (update_alias_info): Use
STORED_SYMS to determine which variables are being
written to by the store operation.
* tree-ssa-structalias.h (struct alias_info)
<total_alias_vops>: Remove. Update all users.
<written_vars>: Change to a pointer set. Update all
users.
<dereferenced_ptrs_store>: Likewise.
<dereferenced_ptrs_load>: Likewise.
(NUM_REFERENCES): Remove. Update all users.
(NUM_REFERENCES_CLEAR): Remove. Update all users.
(NUM_REFERENCES_INC): Remove. Update all users.
(NUM_REFERENCES_SET): Remove. Update all users.
* params.def (PARAM_GLOBAL_VAR_THRESHOLD): Remove.
Update all users.
(PARAM_MAX_ALIASED_VOPS): Set to 10.
* tree-ssanames.c (make_ssa_name): Initialize
SSA_NAME_IS_DEFAULT_DEF to 0.
2006-12-11 Aldy Hernandez <aldyh@redhat.com>
* tree-ssa-dse.c (aggregate_vardecl_d): New.
(dse_global_data): Add aggregate_vardecl field.
(dse_possible_dead_store_p): New.
Add prev_defvar variable.
Allow immediate uses and previous immediate uses to differ
if they are setting different parts of the whole.
(get_aggregate_vardecl): New.
(dse_record_partial_aggregate_store): New.
(dse_whole_aggregate_clobbered_p): New.
(dse_partial_kill_p): New.
(dse_optimize_stmt): Abstract code checking a possible dead store
into new function dse_possible_dead_store_p().
Call dse_maybe_record_aggregate_store().
When checking whether a STMT and its USE_STMT refer to the
same memory address, check also for partial kills that clobber
the whole.
Move some variable definitions to the block where they are used.
(aggregate_vardecl_hash): New.
(aggregate_vardecl_eq): New.
(aggregate_vardecl_free): New.
(aggregate_whole_store_p): New.
(tree_ssa_dse): Initialize and free aggregate_vardecl.
Mark which aggregate stores we care about.
2006-12-11 Andrew Macleod <amacleod@redhat.com>
* tree-ssa-operands.h (struct vuse_element_d): Declare.
(vuse_element_t): Declare.
(struct vuse_vec_d): Declare.
(vuse_vec_p): Declare.
(VUSE_VECT_NUM_ELEM): Define.
(VUSE_VECT_ELEMENT_NC): Define.
(VUSE_ELEMENT_PTR_NC): Define.
(VUSE_ELEMENT_VAR_NC): Define.
(VUSE_VECT_ELEMENT): Define.
(VUSE_ELEMENT_PTR): Define.
(VUSE_ELEMENT_VAR): Define.
(struct maydef_optype_d) <use_var>: Remove.
<use_ptr>: Remove.
<usev>: Add.
(struct vuse_optype_d) <kill_var>: Remove.
<use_ptr>: Remove.
<usev>: Add.
(struct mustdef_optype_d) <kill_var>: Remove.
<use_ptr>: Remove.
<usev>: Add.
(VUSE_OP_PTR): Add argument. Use VUSE_ELEMENT_PTR.
(VUSE_OP): Add argument. Use VUSE_ELEMENT_PTR.
(VUSE_NUM): Define.
(VUSE_VECT): Define.
(MAYDEF_OP_PTR): Add argument. Use VUSE_OP_PTR.
(MAYDEF_OP): Add argument. Use VUSE_OP.
(MAYDEF_NUM): Define.
(MAYDEF_VECT): Define.
(MUSTDEF_KILL_PTR): Use VUSE_OP_PTR.
(MUSTDEF_KILL): Use VUSE_OP.
(MUSTDEF_NUM): Define.
(MUSTDEF_VECT): Define.
(realloc_maydef): Declare.
(realloc_vuse): Declare.
(struct ssa_operand_iterator_d) <vuse_index>: Add.
<mayuse_index>: Add.
(LOADED_SYMS): Define.
(STORED_SYMS): Define.
(FOR_EACH_SSA_MUSTDEF_OPERAND): Call op_iter_next_mustdef.
* tree-into-ssa.c: Adapt for multi-operand V_MAY_DEF and VUSE
operators.
* tree-pretty-print.c: Likewise.
* tree-ssa-dse.c: Likewise.
* tree-flow-inline.h: Likewise.
(op_iter_next_mustdef): New.
* tree-ssa-operands.c: Likewise.
(ALLOC_OPTYPE): Remove.
Update all users.
(alloc_def): New.
(alloc_use): New.
(alloc_maydef): New.
(alloc_vuse): New.
(alloc_mustdef): New.
(realloc_maydef): New.
(realloc_vuse): New.
2006-12-11 Aldy Hernandez <aldyh@redhat.com>
* tree-ssa-operands.c: Remove build_v_must_defs.
(init_ssa_operands): Delete build_v_must_defs.
(finalize_ssa_v_must_def_ops): Remove.
(finalize_ssa_v_must_defs): Remove.
(finalize_ssa_stmt_operands): Do not call
finalize_ssa_v_must_defs.
(start_ssa_stmt_operands): Do not check build_v_must_defs.
(append_v_must_def): Delete.
(copy_virtual_operands): Do not copy V_MUST_DEFs.
(get_modify_expr_operands): Remove reference to V_MUST_DEF from
comment. Remove opf_kill_def.
(build_ssa_operands): Remove references to v_must_defs.
(copy_virtual_operands): Same.
(copy_virtual_operands): Same.
(fini_ssa_operands): Same.
(free_ssa_operands): Same.
(add_mustdef_op): Remove.
Remove mustdef_optype_p.
(alloc_mustdef): Remove.
Remove references to V_MUST_DEFs in comment at top of file.
(get_expr_operands): Remove opf_kill_def.
(opf_kill_def): Remove.
(add_virtual_operand): Remove opf_kill_def.
(get_indirect_ref_operands): Same.
(get_tmr_operands): Same.
* tree-vectorizer.c (rename_variables_in_bb): Remove
SSA_OP_ALL_KILLS.
* tree-ssa-loop-manip.c (find_uses_to_rename_stmt): Remove
SSA_OP_ALL_KILLS.
(check_loop_closed_ssa_stmt): Same.
* tree-ssa.c (verify_def): Remove V_MUST_DEF from comment.
(verify_use): Same.
(verify_ssa): Remove V_MUST_DEFs traces.
(verify_ssa): Remove SSA_OP_ALL_KILLS.
* tree-into-ssa.c (mark_def_sites): Change SSA_OP_VMUSTDEF to
SSA_OP_VMAYDEF.
(rewrite_update_stmt): Remove SSA_OP_VIRTUAL_KILLS.
(rewrite_stmt): Remove SSA_OP_ALL_KILLS.
* tree-ssa-operands.h (struct stmt_operands_d): Remove V_MUST_DEF
references.
(MUSTDEF_OPS): Remove.
(SSA_OP_VMUSTDEF): Remove.
(FOR_EACH_SSA_MUSTDEF_OPERAND): Remove.
(struct mustdef_optype_d): Remove.
Remove mustdef_optype_p.
(struct stmt_operands_d): Remove mustdef_ops.
(ssa_operand_iterator_d): Remove mustdefs and mustkills.
(SSA_OP_VIRTUAL_DEFS): Remove SSA_OP_VMUSTDEF.
(MUSTDEF_RESULT_PTR): Remove.
(MUSTDEF_RESULT): Remove.
(MUSTDEF_KILL_PTR): Remove.
(MUSTDEF_KILL): Remove.
(MUSTDEF_NUM): Remove.
(MUSTDEF_VECT): Remove.
(SSA_OP_VIRTUAL_KILLS): Remove.
(SSA_OP_ALL_VIRTUALS): Remove SSA_OP_VIRTUAL_KILLS.
(SSA_OP_VMUSTKILL): Remove.
(SSA_OP_ALL_KILLS): Remove.
(SSA_OP_ALL_OPERANDS): Remove SSA_OP_ALL_KILLS.
* tree-flow-inline.h (op_iter_init_def): Remove
SSA_OP_VIRTUAL_KILLS.
(delink_stmt_imm_use): Remove SSA_OP_ALL_KILLS.
* tree-ssa-pre.c (compute_rvuse_and_antic_safe): Remove
SSA_OP_VIRTUAL_KILLS.
* tree-ssa-loop-im.c (determine_max_movement): Remove
SSA_OP_VIRTUAL_KILLS.
(gather_mem_refs_stmt): Same.
(gather_mem_refs_stmt): Same.
* tree-ssa-dce.c (mark_really_necessary_kill_operand_phis): Delete.
(perform_tree_ssa_dce): Remove call to
mark_really_necessary_kill_operand_phis.
* tree-flow-inline.h (op_iter_init): Remove setting of mustdefs
and mustkills.
(op_iter_next_use): Do not check mustkills.
(op_iter_next_def): Do not check mustdefs.
(op_iter_next_tree): Do not check mustkills or mustdefs.
(clear_and_done_ssa_iter): Do not set mustdefs or mustkills.
(op_iter_next_maymustdef): Do not check mustkills.
(op_iter_init_must_and_may_def): Remove SSA_OP_VMUSTKILL.
(op_iter_init_mustdef): Remove.
* tree-ssa-live.c (create_ssa_var_map): Change SSA_OP_VMUSTDEF to
SSA_OP_VMAYDEF.
* tree-ssa-dse.c (dse_optimize_stmt): Remove SSA_OP_VMUSTDEF.
* tree-ssa-ccp.c: Remove V_MUST_DEF traces from comments.
(visit_assignment): Same.
* tree-ssa-copy.c (copy_prop_visit_assignment): Same.
* tree-sra.c (mark_all_v_defs_1): Remove V_MUST_DEF from comment.
* tree-outof-ssa.c (check_replaceable): Remove SSA_OP_VMUSTDEF.
* tree-pretty-print.c (dump_vops): Remove printing of V_MUST_DEF.
Remove kill_p variable.
* tree-dfa.c (struct dfa_stats_d): Remove num_v_must_defs.
(dump_dfa_stats): Remove code related to V_MUST_DEFs.
(collect_dfa_stats_r): Do not set num_v_must_defs.
(mark_new_vars_to_rename): Remove v_must_defs_{before,after}
code.
* tree-into-ssa.c (mark_def_sites): Change SSA_OP_VMUSTKILL to
SSA_OP_VMAYUSE.
* tree-ssa-pre.c (compute_rvuse_and_antic_safe): Remove
SSA_OP_VMUSTDEF and SSA_OP_VMUSTKILL.
* tree-ssa-propagate.c (stmt_makes_single_store): Remove
SSA_OP_VMUSTDEF.
2006-12-11 Zdenek Dvorak <dvorakz@suse.cz>
PR rtl-optimization/30113

View File

@ -1837,7 +1837,7 @@ stor-layout.o : stor-layout.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
tree-ssa-structalias.o: tree-ssa-structalias.c tree-ssa-structalias.h \
$(SYSTEM_H) $(CONFIG_H) $(GGC_H) $(TREE_H) $(TREE_FLOW_H) \
$(TM_H) coretypes.h $(CGRAPH_H) tree-pass.h $(TIMEVAR_H) \
gt-tree-ssa-structalias.h $(PARAMS_H) $(ALIAS_H)
gt-tree-ssa-structalias.h $(PARAMS_H) $(ALIAS_H) pointer-set.h
tree-ssa.o : tree-ssa.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
$(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) output.h $(DIAGNOSTIC_H) \
toplev.h $(FUNCTION_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
@ -2036,7 +2036,7 @@ tree-ssa-alias.o : tree-ssa-alias.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
$(FUNCTION_H) $(TIMEVAR_H) convert.h $(TM_H) coretypes.h langhooks.h \
$(TREE_DUMP_H) tree-pass.h $(PARAMS_H) $(BASIC_BLOCK_H) $(DIAGNOSTIC_H) \
hard-reg-set.h $(TREE_GIMPLE_H) vec.h tree-ssa-structalias.h \
$(IPA_TYPE_ESCAPE_H) vecprim.h
$(IPA_TYPE_ESCAPE_H) vecprim.h pointer-set.h
tree-ssa-reassoc.o : tree-ssa-reassoc.c $(TREE_FLOW_H) $(CONFIG_H) \
$(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) errors.h $(TIMEVAR_H) \
$(TM_H) coretypes.h $(TREE_DUMP_H) tree-pass.h $(FLAGS_H) tree-iterator.h\

View File

@ -6205,21 +6205,12 @@ Maximum number of basic blocks on path that cse considers. The default is 10.
@item max-cse-insns
The maximum instructions CSE process before flushing. The default is 1000.
@item global-var-threshold
Counts the number of function calls (@var{n}) and the number of
call-clobbered variables (@var{v}). If @var{n}x@var{v} is larger than this limit, a
single artificial variable will be created to represent all the
call-clobbered variables at function call sites. This artificial
variable will then be made to alias every call-clobbered variable.
(done as @code{int * size_t} on the host machine; beware overflow).
@item max-aliased-vops
Maximum number of virtual operands allowed to represent aliases
before triggering the alias grouping heuristic. Alias grouping
reduces compile times and memory consumption needed for aliasing at
the expense of precision loss in alias information.
Maximum number of virtual operands per statement allowed to represent
aliases before triggering the alias grouping heuristic. Alias
grouping reduces compile times and memory consumption needed for
aliasing at the expense of precision loss in alias information.
@item ggc-min-expand

View File

@ -852,8 +852,8 @@ operands, use the @option{-vops} option to @option{-fdump-tree}:
p = &a;
else
p = &b;
# a = V_MAY_DEF <a>
# b = V_MAY_DEF <b>
# a = VDEF <a>
# b = VDEF <b>
*p = 5;
# VUSE <a>
@ -862,11 +862,11 @@ operands, use the @option{-vops} option to @option{-fdump-tree}:
@}
@end smallexample
Notice that @code{V_MAY_DEF} operands have two copies of the referenced
Notice that @code{VDEF} operands have two copies of the referenced
variable. This indicates that this is not a killing definition of
that variable. In this case we refer to it as a @dfn{may definition}
or @dfn{aliased store}. The presence of the second copy of the
variable in the @code{V_MAY_DEF} operand will become important when the
variable in the @code{VDEF} operand will become important when the
function is converted into SSA form. This will be used to link all
the non-killing definitions to prevent optimizations from making
incorrect assumptions about them.
@ -963,8 +963,8 @@ tree FOR_EACH_SSA_TREE_OPERAND
#define SSA_OP_USE 0x01 /* @r{Real USE operands.} */
#define SSA_OP_DEF 0x02 /* @r{Real DEF operands.} */
#define SSA_OP_VUSE 0x04 /* @r{VUSE operands.} */
#define SSA_OP_VMAYUSE 0x08 /* @r{USE portion of V_MAY_DEFS.} */
#define SSA_OP_VMAYDEF 0x10 /* @r{DEF portion of V_MAY_DEFS.} */
#define SSA_OP_VMAYUSE 0x08 /* @r{USE portion of VDEFS.} */
#define SSA_OP_VMAYDEF 0x10 /* @r{DEF portion of VDEFS.} */
#define SSA_OP_VMUSTDEF 0x20 /* @r{V_MUST_DEF definitions.} */
/* @r{These are commonly grouped operand flags.} */
@ -1004,12 +1004,12 @@ aren't using operand pointers, use and defs flags can be mixed.
@}
@end smallexample
@code{V_MAY_DEF}s are broken into two flags, one for the
@code{VDEF}s are broken into two flags, one for the
@code{DEF} portion (@code{SSA_OP_VMAYDEF}) and one for the USE portion
(@code{SSA_OP_VMAYUSE}). If all you want to look at are the
@code{V_MAY_DEF}s together, there is a fourth iterator macro for this,
@code{VDEF}s together, there is a fourth iterator macro for this,
which returns both a def_operand_p and a use_operand_p for each
@code{V_MAY_DEF} in the statement. Note that you don't need any flags for
@code{VDEF} in the statement. Note that you don't need any flags for
this one.
@smallexample
@ -1400,21 +1400,34 @@ There are several @code{TODO} flags that control the behavior of
The virtual SSA form is harder to preserve than the non-virtual SSA form
mainly because the set of virtual operands for a statement may change at
what some would consider unexpected times. In general, any time you
have modified a statement that has virtual operands, you should verify
whether the list of virtual operands has changed, and if so, mark the
newly exposed symbols by calling @code{mark_new_vars_to_rename}.
what some would consider unexpected times. In general, statement
modifications should be bracketed between calls to
@code{push_stmt_changes} and @code{pop_stmt_changes}. For example,
There is one additional caveat to preserving virtual SSA form. When the
entire set of virtual operands may be eliminated due to better
disambiguation, a bare SMT will be added to the list of virtual
operands, to signify the non-visible aliases that the are still being
referenced. If the set of bare SMT's may change,
@code{TODO_update_smt_usage} should be added to the todo flags.
@smallexample
munge_stmt (tree stmt)
@{
push_stmt_changes (&stmt);
... rewrite STMT ...
pop_stmt_changes (&stmt);
@}
@end smallexample
With the current pruning code, this can only occur when constants are
propagated into array references that were previously non-constant, or
address expressions are propagated into their uses.
The call to @code{push_stmt_changes} saves the current state of the
statement operands and the call to @code{pop_stmt_changes} compares
the saved state with the current one and does the appropriate symbol
marking for the SSA renamer.
It is possible to modify several statements at a time, provided that
@code{push_stmt_changes} and @code{pop_stmt_changes} are called in
LIFO order, as when processing a stack of statements.
Additionally, if the pass discovers that it did not need to make
changes to the statement after calling @code{push_stmt_changes}, it
can simply discard the topmost change buffer by calling
@code{discard_stmt_changes}. This will avoid the expensive operand
re-scan operation and the buffer comparison that determines if symbols
need to be marked for renaming.
@subsection Examining @code{SSA_NAME} nodes
@cindex examining SSA_NAMEs
@ -1635,11 +1648,11 @@ foo (int i)
p_6 = &b;
# p_1 = PHI <p_4(1), p_6(2)>;
# a_7 = V_MAY_DEF <a_3>;
# b_8 = V_MAY_DEF <b_5>;
# a_7 = VDEF <a_3>;
# b_8 = VDEF <b_5>;
*p_1 = 3;
# a_9 = V_MAY_DEF <a_7>
# a_9 = VDEF <a_7>
# VUSE <b_8>
a_9 = b_8 + 2;

View File

@ -451,13 +451,6 @@ DEFPARAM(PARAM_VECT_MAX_VERSION_CHECKS,
"Bound on number of runtime checks inserted by the vectorizer's loop versioning",
6, 0, 0)
/* The product of the next two is used to decide whether or not to
use .GLOBAL_VAR. See tree-dfa.c. */
DEFPARAM(PARAM_GLOBAL_VAR_THRESHOLD,
"global-var-threshold",
"Given N calls and V call-clobbered vars in a function. Use .GLOBAL_VAR if NxV is larger than this limit",
500000, 0, 0)
DEFPARAM(PARAM_MAX_CSELIB_MEMORY_LOCATIONS,
"max-cselib-memory-locations",
"The maximum memory locations recorded by cselib",
@ -495,8 +488,8 @@ DEFPARAM(PARAM_MAX_RELOAD_SEARCH_INSNS,
DEFPARAM(PARAM_MAX_ALIASED_VOPS,
"max-aliased-vops",
"The maximum number of virtual operands allowed to represent aliases before triggering alias grouping",
500, 0, 0)
"The maximum number of virtual operators per statement allowed to represent aliases before triggering alias grouping",
10, 0, 0)
DEFPARAM(PARAM_MAX_SCHED_REGION_BLOCKS,
"max-sched-region-blocks",

View File

@ -142,8 +142,6 @@ typedef enum compiler_param
PARAM_VALUE (PARAM_SMS_DFA_HISTORY)
#define SMS_LOOP_AVERAGE_COUNT_THRESHOLD \
PARAM_VALUE (PARAM_SMS_LOOP_AVERAGE_COUNT_THRESHOLD)
#define GLOBAL_VAR_THRESHOLD \
PARAM_VALUE (PARAM_GLOBAL_VAR_THRESHOLD)
#define MAX_ALIASED_VOPS \
PARAM_VALUE (PARAM_MAX_ALIASED_VOPS)
#define INTEGER_SHARE_LIMIT \

View File

@ -1,3 +1,19 @@
2006-12-11 Aldy Hernandez <aldyh@redhat.com>
Diego Novillo <dnovillo@redhat.com>
* gcc.dg/tree-ssa/20040517-1.c: Adapt pattern to
VDEF/VUSE changes.
* gcc.dg/tree-ssa/pr26421.c: Likewise
* gcc.dg/tree-ssa/inline_asm-1.c: Likewise.
* gcc.dg/tree-ssa/pr23382.c: Likewise.
* gcc.dg/tree-ssa/inline_asm-2.c: Likewise.
* gcc.dg/tree-ssa/pr28410.c: Likewise.
* gcc.dg/tree-ssa/20031015-1.c: Likewise.
* gcc.dg/tree-ssa/20040302-1.c: Likewise.
* gcc.dg/tree-ssa/vrp07.c: Likewise.
* gcc.dg/tree-ssa/vrp08.c: Likewise.
* gcc.dg/tree-ssa/alias-12.c: Likewise.
2006-12-11 Jan Hubicka <jh@suse.cz>
* gcc.dg/tree-prof/stringop-1.c: New test.

View File

@ -13,6 +13,6 @@ main(void)
return 0;
}
/* The V_*_DEF comes from the initial assignment and the asm. */
/* { dg-final { scan-tree-dump-times "_DEF" 2 "alias1" } } */
/* The VDEF comes from the initial assignment and the asm. */
/* { dg-final { scan-tree-dump-times "DEF" 2 "alias1" } } */
/* { dg-final { cleanup-tree-dump "alias1" } } */

View File

@ -1,5 +1,5 @@
/* { dg-do compile } */
/* { dg-options "-O1 --param global-var-threshold=0" } */
/* { dg-options "-O1" } */
/* Test for .GLOBAL_VAR not being renamed into SSA after alias analysis.
provided by Dale Johannesen in PR 14266. */

View File

@ -17,6 +17,5 @@ void bar (void)
malloc functions may clobber global memory. Only the function result
does not alias any other pointer.
Hence, we must have a VDEF for a before and after the call to foo(). */
/* { dg-final { scan-tree-dump-times "V_MAY_DEF" 1 "alias1"} } */
/* { dg-final { scan-tree-dump-times "V_MUST_DEF" 1 "alias1"} } */
/* { dg-final { scan-tree-dump-times "VDEF" 2 "alias1"} } */
/* { dg-final { cleanup-tree-dump "alias1" } } */

View File

@ -13,6 +13,6 @@ int foo(int i)
return a.x[i];
}
/* { dg-final { scan-tree-dump "V_MAY_DEF" "alias1" } } */
/* { dg-final { scan-tree-dump "VDEF" "alias1" } } */
/* { dg-final { cleanup-tree-dump "alias1" } } */

View File

@ -0,0 +1,12 @@
/* { dg-do compile } */
/* { dg-options "-O1 -fno-tree-dce -fdump-tree-optimized" } */
_Complex int t = 0;
int f(void)
{
t = 0;
__real__ t = 2;
__imag__ t = 2;
}
/* { dg-final { scan-tree-dump-times "__complex__" 0 "optimized" } } */
/* { dg-final { cleanup-tree-dump "optimized" } } */

View File

@ -15,6 +15,6 @@ char f(char *a)
/* { dg-final { scan-tree-dump-times "test_function" 2 "optimized"} } */
/* { dg-final { cleanup-tree-dump "optimized" } } */
/* There should a V_MAY_DEF for the inline-asm. */
/* { dg-final { scan-tree-dump-times "V_MAY_DEF" 1 "alias1"} } */
/* There should a VDEF for the inline-asm. */
/* { dg-final { scan-tree-dump-times "VDEF" 1 "alias1"} } */
/* { dg-final { cleanup-tree-dump "alias1" } } */

View File

@ -13,6 +13,6 @@ void f(char *a)
link_error ();
}
/* There should a V_MAY_DEF for the inline-asm and one for the link_error. */
/* { dg-final { scan-tree-dump-times "V_MAY_DEF" 2 "alias1"} } */
/* There should a VDEF for the inline-asm and one for the link_error. */
/* { dg-final { scan-tree-dump-times "VDEF" 2 "alias1"} } */
/* { dg-final { cleanup-tree-dump "alias1" } } */

View File

@ -12,11 +12,11 @@ void f(void)
{
struct a *a = malloc(sizeof(struct a));
}
/* { dg-final { scan-tree-dump-times "V_MAY_DEF <HEAP" 1 "alias1"} } */
/* { dg-final { scan-tree-dump-times "V_MAY_DEF <HEAP" 1 "alias2"} } */
/* { dg-final { scan-tree-dump-times "V_MAY_DEF <HEAP" 1 "alias3"} } */
/* { dg-final { scan-tree-dump-times "V_MAY_DEF <HEAP" 1 "alias4"} } */
/* { dg-final { scan-tree-dump-times "V_MAY_DEF <HEAP" 1 "alias5"} } */
/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias1"} } */
/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias2"} } */
/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias3"} } */
/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias4"} } */
/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias5"} } */
/* { dg-final { cleanup-tree-dump "alias1" } } */
/* { dg-final { cleanup-tree-dump "alias2" } } */
/* { dg-final { cleanup-tree-dump "alias3" } } */

View File

@ -16,6 +16,5 @@ int foo(void)
return a.i;
}
/* { dg-final { scan-tree-dump-times "V_MAY_DEF" 1 "alias1" } } */
/* { dg-final { scan-tree-dump-times "V_MUST_DEF" 1 "alias1" } } */
/* { dg-final { scan-tree-dump-times "VDEF" 2 "alias1" } } */
/* { dg-final { cleanup-tree-dump "alias1" } } */

View File

@ -1,5 +1,5 @@
/* { dg-do run } */
/* { dg-options "-O2 --param global-var-threshold=1" } */
/* { dg-options "-O2" } */
extern void abort(void);
struct Bar { int p; };

View File

@ -0,0 +1,13 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-dse1-vops" } */
struct { int a, b; } soup1, soup2;
foo ()
{
soup1 = soup2;
soup1.a = 66;
soup1.b = 77;
}
/* We should eliminate the first assignment. */
/* { dg-final { scan-tree-dump-times "VDEF" 2 "dse1"} } */

View File

@ -32,5 +32,5 @@ foo (int i, int *p)
/* { dg-final { scan-tree-dump-times "Folding predicate p_.*to 1" 1 "vrp1" } } */
/* { dg-final { scan-tree-dump-times "Folding predicate p_.*to 0" 1 "vrp1" } } */
/* { dg-final { scan-tree-dump-times "PREDICATE: p_\[0-9\] ne_expr 0B" 2 "vrp1" } } */
/* { dg-final { scan-tree-dump-times "PREDICATE: p_\[0-9\]" 2 "vrp1" } } */
/* { dg-final { cleanup-tree-dump "vrp1" } } */

View File

@ -19,5 +19,5 @@ foo (int a, int *p)
}
/* { dg-final { scan-tree-dump-times "Folding predicate p_.*to 1" 1 "vrp1" } } */
/* { dg-final { scan-tree-dump-times "PREDICATE: p_. ne_expr 0" 1 "vrp1" } } */
/* { dg-final { scan-tree-dump-times "PREDICATE: p_.* ne_expr 0" 1 "vrp1" } } */
/* { dg-final { cleanup-tree-dump "vrp1" } } */

View File

@ -53,9 +53,5 @@ int main (void)
return main1 (x);
}
/* Currently the loops fail to vectorize due to aliasing problems.
If/when the aliasing problems are resolved, unalignment may
prevent vectorization on some targets. */
/* { dg-final { scan-tree-dump-times "vectorized 2 loops" 1 "vect" { xfail *-*-* } } } */
/* { dg-final { scan-tree-dump-times "can't determine dependence between" 2 "vect" } } */
/* { dg-final { scan-tree-dump-times "vectorized 2 loops" 1 "vect" } } */
/* { dg-final { cleanup-tree-dump "vect" } } */

View File

@ -77,6 +77,7 @@ DEFTIMEVAR (TV_TREE_STORE_COPY_PROP , "tree store copy prop")
DEFTIMEVAR (TV_FIND_REFERENCED_VARS , "tree find ref. vars")
DEFTIMEVAR (TV_TREE_PTA , "tree PTA")
DEFTIMEVAR (TV_TREE_MAY_ALIAS , "tree alias analysis")
DEFTIMEVAR (TV_MEMORY_PARTITIONING , "tree memory partitioning")
DEFTIMEVAR (TV_TREE_INSERT_PHI_NODES , "tree PHI insertion")
DEFTIMEVAR (TV_TREE_SSA_REWRITE_BLOCKS, "tree SSA rewrite")
DEFTIMEVAR (TV_TREE_SSA_OTHER , "tree SSA other")

View File

@ -2230,7 +2230,7 @@ find_case_label_for_value (tree switch_expr, tree val)
void
tree_dump_bb (basic_block bb, FILE *outf, int indent)
{
dump_generic_bb (outf, bb, indent, TDF_VOPS);
dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
}

View File

@ -59,9 +59,8 @@ struct dfa_stats_d
long num_phis;
long num_phi_args;
int max_num_phi_args;
long num_v_may_defs;
long num_vdefs;
long num_vuses;
long num_v_must_defs;
};
@ -378,6 +377,21 @@ dump_variable (FILE *file, tree var)
dump_subvars_for (file, var);
}
if (!is_gimple_reg (var))
{
if (memory_partition (var))
{
fprintf (file, ", belongs to partition: ");
print_generic_expr (file, memory_partition (var), dump_flags);
}
if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
{
fprintf (file, ", partition symbols: ");
dump_decl_set (file, MPT_SYMBOLS (var));
}
}
fprintf (file, "\n");
}
@ -444,14 +458,9 @@ dump_dfa_stats (FILE *file)
fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
SCALE (size), LABEL (size));
size = dfa_stats.num_v_may_defs * sizeof (tree *);
size = dfa_stats.num_vdefs * sizeof (tree *);
total += size;
fprintf (file, fmt_str_1, "V_MAY_DEF operands", dfa_stats.num_v_may_defs,
SCALE (size), LABEL (size));
size = dfa_stats.num_v_must_defs * sizeof (tree *);
total += size;
fprintf (file, fmt_str_1, "V_MUST_DEF operands", dfa_stats.num_v_must_defs,
fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
SCALE (size), LABEL (size));
size = dfa_stats.num_phis * sizeof (struct tree_phi_node);
@ -546,10 +555,8 @@ collect_dfa_stats_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
dfa_stats_p->num_stmt_anns++;
dfa_stats_p->num_defs += NUM_SSA_OPERANDS (t, SSA_OP_DEF);
dfa_stats_p->num_uses += NUM_SSA_OPERANDS (t, SSA_OP_USE);
dfa_stats_p->num_v_may_defs += NUM_SSA_OPERANDS (t, SSA_OP_VMAYDEF);
dfa_stats_p->num_vdefs += NUM_SSA_OPERANDS (t, SSA_OP_VDEF);
dfa_stats_p->num_vuses += NUM_SSA_OPERANDS (t, SSA_OP_VUSE);
dfa_stats_p->num_v_must_defs +=
NUM_SSA_OPERANDS (t, SSA_OP_VMUSTDEF);
break;
}
@ -674,6 +681,7 @@ set_default_def (tree var, tree def)
gcc_assert (TREE_CODE (def) == SSA_NAME);
loc = htab_find_slot_with_hash (DEFAULT_DEFS (cfun), &in,
DECL_UID (var), INSERT);
/* Default definition might be changed by tail call optimization. */
if (!*loc)
{

View File

@ -789,6 +789,7 @@ static const struct dump_option_value_info dump_options[] =
{"lineno", TDF_LINENO},
{"uid", TDF_UID},
{"stmtaddr", TDF_STMTADDR},
{"memsyms", TDF_MEMSYMS},
{"all", ~(TDF_RAW | TDF_SLIM | TDF_LINENO | TDF_TREE | TDF_RTL | TDF_IPA
| TDF_STMTADDR | TDF_GRAPH)},
{NULL, 0}

View File

@ -594,7 +594,7 @@ zero_imm_uses_p (tree var)
ssa_use_operand_t *ptr = &(SSA_NAME_IMM_USE_NODE (var));
return (ptr == ptr->next);
}
/* Return the tree pointer to by USE. */
static inline tree
get_use_from_ptr (use_operand_p use)
@ -693,9 +693,6 @@ set_is_used (tree var)
ann->used = 1;
}
/* ----------------------------------------------------------------------- */
/* Return true if T is an executable statement. */
static inline bool
is_exec_stmt (tree t)
@ -841,6 +838,63 @@ loop_containing_stmt (tree stmt)
return bb->loop_father;
}
/* Return the memory partition tag associated with symbol SYM. */
static inline tree
memory_partition (tree sym)
{
tree tag;
/* MPTs belong to their own partition. */
if (TREE_CODE (sym) == MEMORY_PARTITION_TAG)
return sym;
gcc_assert (!is_gimple_reg (sym));
tag = get_var_ann (sym)->mpt;
#if defined ENABLE_CHECKING
if (tag)
gcc_assert (TREE_CODE (tag) == MEMORY_PARTITION_TAG);
#endif
return tag;
}
/* Set MPT to be the memory partition associated with symbol SYM. */
static inline void
set_memory_partition (tree sym, tree mpt)
{
#if defined ENABLE_CHECKING
if (mpt)
gcc_assert (TREE_CODE (mpt) == MEMORY_PARTITION_TAG
&& !is_gimple_reg (sym));
#endif
var_ann (sym)->mpt = mpt;
if (mpt)
{
bitmap_set_bit (MPT_SYMBOLS (mpt), DECL_UID (sym));
/* MPT inherits the call-clobbering attributes from SYM. */
if (is_call_clobbered (sym))
{
MTAG_GLOBAL (mpt) = 1;
mark_call_clobbered (mpt, ESCAPE_IS_GLOBAL);
}
}
}
/* Return true if NAME is a memory factoring SSA name (i.e., an SSA
name for a memory partition. */
static inline bool
factoring_name_p (tree name)
{
return TREE_CODE (SSA_NAME_VAR (name)) == MEMORY_PARTITION_TAG;
}
/* Return true if VAR is a clobbered by function calls. */
static inline bool
is_call_clobbered (tree var)
@ -874,16 +928,6 @@ clear_call_clobbered (tree var)
bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
}
/* Mark variable VAR as being non-addressable. */
static inline void
mark_non_addressable (tree var)
{
if (!MTAG_P (var))
DECL_CALL_CLOBBERED (var) = false;
bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
TREE_ADDRESSABLE (var) = 0;
}
/* Return the common annotation for T. Return NULL if the annotation
doesn't already exist. */
static inline tree_ann_common_t
@ -929,20 +973,22 @@ op_iter_next_use (ssa_op_iter *ptr)
}
if (ptr->vuses)
{
use_p = VUSE_OP_PTR (ptr->vuses);
ptr->vuses = ptr->vuses->next;
use_p = VUSE_OP_PTR (ptr->vuses, ptr->vuse_index);
if (++(ptr->vuse_index) >= VUSE_NUM (ptr->vuses))
{
ptr->vuse_index = 0;
ptr->vuses = ptr->vuses->next;
}
return use_p;
}
if (ptr->mayuses)
{
use_p = MAYDEF_OP_PTR (ptr->mayuses);
ptr->mayuses = ptr->mayuses->next;
return use_p;
}
if (ptr->mustkills)
{
use_p = MUSTDEF_KILL_PTR (ptr->mustkills);
ptr->mustkills = ptr->mustkills->next;
use_p = VDEF_OP_PTR (ptr->mayuses, ptr->mayuse_index);
if (++(ptr->mayuse_index) >= VDEF_NUM (ptr->mayuses))
{
ptr->mayuse_index = 0;
ptr->mayuses = ptr->mayuses->next;
}
return use_p;
}
if (ptr->phi_i < ptr->num_phi)
@ -967,16 +1013,10 @@ op_iter_next_def (ssa_op_iter *ptr)
ptr->defs = ptr->defs->next;
return def_p;
}
if (ptr->mustdefs)
if (ptr->vdefs)
{
def_p = MUSTDEF_RESULT_PTR (ptr->mustdefs);
ptr->mustdefs = ptr->mustdefs->next;
return def_p;
}
if (ptr->maydefs)
{
def_p = MAYDEF_RESULT_PTR (ptr->maydefs);
ptr->maydefs = ptr->maydefs->next;
def_p = VDEF_RESULT_PTR (ptr->vdefs);
ptr->vdefs = ptr->vdefs->next;
return def_p;
}
ptr->done = true;
@ -999,20 +1039,22 @@ op_iter_next_tree (ssa_op_iter *ptr)
}
if (ptr->vuses)
{
val = VUSE_OP (ptr->vuses);
ptr->vuses = ptr->vuses->next;
val = VUSE_OP (ptr->vuses, ptr->vuse_index);
if (++(ptr->vuse_index) >= VUSE_NUM (ptr->vuses))
{
ptr->vuse_index = 0;
ptr->vuses = ptr->vuses->next;
}
return val;
}
if (ptr->mayuses)
{
val = MAYDEF_OP (ptr->mayuses);
ptr->mayuses = ptr->mayuses->next;
return val;
}
if (ptr->mustkills)
{
val = MUSTDEF_KILL (ptr->mustkills);
ptr->mustkills = ptr->mustkills->next;
val = VDEF_OP (ptr->mayuses, ptr->mayuse_index);
if (++(ptr->mayuse_index) >= VDEF_NUM (ptr->mayuses))
{
ptr->mayuse_index = 0;
ptr->mayuses = ptr->mayuses->next;
}
return val;
}
if (ptr->defs)
@ -1021,16 +1063,10 @@ op_iter_next_tree (ssa_op_iter *ptr)
ptr->defs = ptr->defs->next;
return val;
}
if (ptr->mustdefs)
if (ptr->vdefs)
{
val = MUSTDEF_RESULT (ptr->mustdefs);
ptr->mustdefs = ptr->mustdefs->next;
return val;
}
if (ptr->maydefs)
{
val = MAYDEF_RESULT (ptr->maydefs);
ptr->maydefs = ptr->maydefs->next;
val = VDEF_RESULT (ptr->vdefs);
ptr->vdefs = ptr->vdefs->next;
return val;
}
@ -1050,15 +1086,15 @@ clear_and_done_ssa_iter (ssa_op_iter *ptr)
ptr->defs = NULL;
ptr->uses = NULL;
ptr->vuses = NULL;
ptr->maydefs = NULL;
ptr->vdefs = NULL;
ptr->mayuses = NULL;
ptr->mustdefs = NULL;
ptr->mustkills = NULL;
ptr->iter_type = ssa_op_iter_none;
ptr->phi_i = 0;
ptr->num_phi = 0;
ptr->phi_stmt = NULL_TREE;
ptr->done = true;
ptr->vuse_index = 0;
ptr->mayuse_index = 0;
}
/* Initialize the iterator PTR to the virtual defs in STMT. */
@ -1072,15 +1108,15 @@ op_iter_init (ssa_op_iter *ptr, tree stmt, int flags)
ptr->defs = (flags & SSA_OP_DEF) ? DEF_OPS (stmt) : NULL;
ptr->uses = (flags & SSA_OP_USE) ? USE_OPS (stmt) : NULL;
ptr->vuses = (flags & SSA_OP_VUSE) ? VUSE_OPS (stmt) : NULL;
ptr->maydefs = (flags & SSA_OP_VMAYDEF) ? MAYDEF_OPS (stmt) : NULL;
ptr->mayuses = (flags & SSA_OP_VMAYUSE) ? MAYDEF_OPS (stmt) : NULL;
ptr->mustdefs = (flags & SSA_OP_VMUSTDEF) ? MUSTDEF_OPS (stmt) : NULL;
ptr->mustkills = (flags & SSA_OP_VMUSTKILL) ? MUSTDEF_OPS (stmt) : NULL;
ptr->vdefs = (flags & SSA_OP_VDEF) ? VDEF_OPS (stmt) : NULL;
ptr->mayuses = (flags & SSA_OP_VMAYUSE) ? VDEF_OPS (stmt) : NULL;
ptr->done = false;
ptr->phi_i = 0;
ptr->num_phi = 0;
ptr->phi_stmt = NULL_TREE;
ptr->vuse_index = 0;
ptr->mayuse_index = 0;
}
/* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
@ -1099,7 +1135,7 @@ op_iter_init_use (ssa_op_iter *ptr, tree stmt, int flags)
static inline def_operand_p
op_iter_init_def (ssa_op_iter *ptr, tree stmt, int flags)
{
gcc_assert ((flags & (SSA_OP_ALL_USES | SSA_OP_VIRTUAL_KILLS)) == 0);
gcc_assert ((flags & SSA_OP_ALL_USES) == 0);
op_iter_init (ptr, stmt, flags);
ptr->iter_type = ssa_op_iter_def;
return op_iter_next_def (ptr);
@ -1118,73 +1154,53 @@ op_iter_init_tree (ssa_op_iter *ptr, tree stmt, int flags)
/* Get the next iterator mustdef value for PTR, returning the mustdef values in
KILL and DEF. */
static inline void
op_iter_next_maymustdef (use_operand_p *use, def_operand_p *def,
op_iter_next_vdef (vuse_vec_p *use, def_operand_p *def,
ssa_op_iter *ptr)
{
#ifdef ENABLE_CHECKING
gcc_assert (ptr->iter_type == ssa_op_iter_maymustdef);
gcc_assert (ptr->iter_type == ssa_op_iter_vdef);
#endif
if (ptr->mayuses)
{
*def = MAYDEF_RESULT_PTR (ptr->mayuses);
*use = MAYDEF_OP_PTR (ptr->mayuses);
*def = VDEF_RESULT_PTR (ptr->mayuses);
*use = VDEF_VECT (ptr->mayuses);
ptr->mayuses = ptr->mayuses->next;
return;
}
if (ptr->mustkills)
{
*def = MUSTDEF_RESULT_PTR (ptr->mustkills);
*use = MUSTDEF_KILL_PTR (ptr->mustkills);
ptr->mustkills = ptr->mustkills->next;
return;
}
*def = NULL_DEF_OPERAND_P;
*use = NULL_USE_OPERAND_P;
*use = NULL;
ptr->done = true;
return;
}
static inline void
op_iter_next_mustdef (use_operand_p *use, def_operand_p *def,
ssa_op_iter *ptr)
{
vuse_vec_p vp;
op_iter_next_vdef (&vp, def, ptr);
if (vp != NULL)
{
gcc_assert (VUSE_VECT_NUM_ELEM (*vp) == 1);
*use = VUSE_ELEMENT_PTR (*vp, 0);
}
else
*use = NULL_USE_OPERAND_P;
}
/* Initialize iterator PTR to the operands in STMT. Return the first operands
in USE and DEF. */
static inline void
op_iter_init_maydef (ssa_op_iter *ptr, tree stmt, use_operand_p *use,
op_iter_init_vdef (ssa_op_iter *ptr, tree stmt, vuse_vec_p *use,
def_operand_p *def)
{
gcc_assert (TREE_CODE (stmt) != PHI_NODE);
op_iter_init (ptr, stmt, SSA_OP_VMAYUSE);
ptr->iter_type = ssa_op_iter_maymustdef;
op_iter_next_maymustdef (use, def, ptr);
}
/* Initialize iterator PTR to the operands in STMT. Return the first operands
in KILL and DEF. */
static inline void
op_iter_init_mustdef (ssa_op_iter *ptr, tree stmt, use_operand_p *kill,
def_operand_p *def)
{
gcc_assert (TREE_CODE (stmt) != PHI_NODE);
op_iter_init (ptr, stmt, SSA_OP_VMUSTKILL);
ptr->iter_type = ssa_op_iter_maymustdef;
op_iter_next_maymustdef (kill, def, ptr);
}
/* Initialize iterator PTR to the operands in STMT. Return the first operands
in KILL and DEF. */
static inline void
op_iter_init_must_and_may_def (ssa_op_iter *ptr, tree stmt,
use_operand_p *kill, def_operand_p *def)
{
gcc_assert (TREE_CODE (stmt) != PHI_NODE);
op_iter_init (ptr, stmt, SSA_OP_VMUSTKILL|SSA_OP_VMAYUSE);
ptr->iter_type = ssa_op_iter_maymustdef;
op_iter_next_maymustdef (kill, def, ptr);
ptr->iter_type = ssa_op_iter_vdef;
op_iter_next_vdef (use, def, ptr);
}
@ -1277,8 +1293,7 @@ delink_stmt_imm_use (tree stmt)
use_operand_p use_p;
if (ssa_operands_active ())
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
(SSA_OP_ALL_USES | SSA_OP_ALL_KILLS))
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
delink_imm_use (use_p);
}

View File

@ -45,12 +45,14 @@ typedef struct basic_block_def *basic_block;
struct gimple_df GTY(()) {
/* Array of all variables referenced in the function. */
htab_t GTY((param_is (struct int_tree_map))) referenced_vars;
/* A list of all the noreturn calls passed to modify_stmt.
cleanup_control_flow uses it to detect cases where a mid-block
indirect call has been turned into a noreturn call. When this
happens, all the instructions after the call are no longer
reachable and must be deleted as dead. */
VEC(tree,gc) *modified_noreturn_calls;
/* Array of all SSA_NAMEs used in the function. */
VEC(tree,gc) *ssa_names;
@ -234,23 +236,31 @@ struct var_ann_d GTY(())
ENUM_BITFIELD (need_phi_state) need_phi_state : 2;
/* Used during operand processing to determine if this variable is already
in the vuse list. */
in the VUSE list. */
unsigned in_vuse_list : 1;
/* Used during operand processing to determine if this variable is already
in the v_may_def list. */
unsigned in_v_may_def_list : 1;
in the VDEF list. */
unsigned in_vdef_list : 1;
/* True for HEAP and PARM_NOALIAS artificial variables. */
unsigned is_heapvar : 1;
/* An artificial variable representing the memory location pointed-to by
all the pointer symbols that flow-insensitive alias analysis
(mostly type-based) considers to be aliased. If the variable is
not a pointer or if it is never dereferenced, this must be NULL. */
/* Memory partition tag assigned to this symbol. */
tree mpt;
/* If this variable is a pointer P that has been dereferenced, this
field is an artificial variable that represents the memory
location *P. Every other pointer Q that is type-compatible with
P will also have the same memory tag. If the variable is not a
pointer or if it is never dereferenced, this must be NULL.
FIXME, do we really need this here? How much slower would it be
to convert to hash table? */
tree symbol_mem_tag;
/* Variables that may alias this variable. */
/* Variables that may alias this variable. This may only be set on
memory tags (NAME_MEMORY_TAG or TYPE_MEMORY_TAG). FIXME, move to
struct tree_memory_tag. */
VEC(tree, gc) *may_aliases;
/* Used when going out of SSA form to indicate which partition this
@ -357,6 +367,10 @@ struct stmt_ann_d GTY(())
and local addressable variables. */
unsigned makes_clobbering_call : 1;
/* Nonzero if the statement references memory (at least one of its
expressions contains a non-register operand). */
unsigned references_memory : 1;
/* Basic block that contains this statement. */
basic_block bb;
@ -719,6 +733,7 @@ static inline bool var_can_have_subvars (tree);
static inline bool overlap_subvar (unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
tree, bool *);
extern tree create_tag_raw (enum tree_code, tree, const char *);
/* Call-back function for walk_use_def_chains(). At each reaching
definition, a function with this prototype is called. */

View File

@ -93,7 +93,7 @@ is_gimple_reg_rhs (tree t)
variable is only modified if evaluation of the RHS does not throw.
Don't force a temp of a non-renamable type; the copy could be
arbitrarily expensive. Instead we will generate a V_MAY_DEF for
arbitrarily expensive. Instead we will generate a VDEF for
the assignment. */
if (is_gimple_reg_type (TREE_TYPE (t))
@ -377,7 +377,7 @@ is_gimple_val (tree t)
/* FIXME make these decls. That can happen only when we expose the
entire landing-pad construct at the tree level. */
if (TREE_CODE (t) == EXC_PTR_EXPR || TREE_CODE (t) == FILTER_EXPR)
return 1;
return true;
return (is_gimple_variable (t) || is_gimple_min_invariant (t));
}

File diff suppressed because it is too large Load Diff

View File

@ -68,6 +68,8 @@ enum tree_dump_index
#define TDF_STMTADDR (1 << 12) /* Address of stmt. */
#define TDF_GRAPH (1 << 13) /* a graph dump is being emitted */
#define TDF_MEMSYMS (1 << 14) /* display memory symbols in expr.
Implies TDF_VOPS. */
extern char *get_dump_file_name (enum tree_dump_index);
extern int dump_enabled_p (enum tree_dump_index);

View File

@ -87,14 +87,14 @@ do_niy (pretty_printer *buffer, tree node)
void
debug_generic_expr (tree t)
{
print_generic_expr (stderr, t, TDF_VOPS|TDF_UID);
print_generic_expr (stderr, t, TDF_VOPS|TDF_MEMSYMS);
fprintf (stderr, "\n");
}
void
debug_generic_stmt (tree t)
{
print_generic_stmt (stderr, t, TDF_VOPS|TDF_UID);
print_generic_stmt (stderr, t, TDF_VOPS|TDF_MEMSYMS);
fprintf (stderr, "\n");
}
@ -103,7 +103,7 @@ debug_tree_chain (tree t)
{
while (t)
{
print_generic_expr (stderr, t, TDF_VOPS|TDF_UID);
print_generic_expr (stderr, t, TDF_VOPS|TDF_MEMSYMS|TDF_UID);
fprintf(stderr, " ");
t = TREE_CHAIN (t);
}
@ -402,6 +402,33 @@ dump_omp_clauses (pretty_printer *buffer, tree clause, int spc, int flags)
}
/* Dump the set of decls SYMS. BUFFER, SPC and FLAGS are as in
dump_generic_node. */
static void
dump_symbols (pretty_printer *buffer, bitmap syms, int flags)
{
unsigned i;
bitmap_iterator bi;
if (syms == NULL)
pp_string (buffer, "NIL");
else
{
pp_string (buffer, " { ");
EXECUTE_IF_SET_IN_BITMAP (syms, 0, i, bi)
{
tree sym = referenced_var_lookup (i);
dump_generic_node (buffer, sym, 0, flags, false);
pp_string (buffer, " ");
}
pp_string (buffer, "}");
}
}
/* Dump the node NODE on the pretty_printer BUFFER, SPC spaces of indent.
FLAGS specifies details to show in the dump (see TDF_* in tree.h). If
IS_STMT is true, the object printed is considered to be a statement
@ -427,7 +454,7 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
if you call it on something with a non-stmt annotation attached. */
if (TREE_CODE (node) != ERROR_MARK
&& is_gimple_stmt (node)
&& (flags & TDF_VOPS)
&& (flags & (TDF_VOPS|TDF_MEMSYMS))
&& has_stmt_ann (node)
&& TREE_CODE (node) != PHI_NODE)
dump_vops (buffer, node, spc, flags);
@ -855,6 +882,7 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
case PARM_DECL:
case FIELD_DECL:
case NAMESPACE_DECL:
case MEMORY_PARTITION_TAG:
dump_decl_name (buffer, node, flags);
break;
@ -1626,7 +1654,10 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
if (i < PHI_NUM_ARGS (node) - 1)
pp_string (buffer, ", ");
}
pp_string (buffer, ">;");
pp_string (buffer, ">");
if (stmt_references_memory_p (node) && (flags & TDF_MEMSYMS))
dump_symbols (buffer, STORED_SYMS (node), flags);
}
break;
@ -1636,6 +1667,8 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
pp_decimal_int (buffer, SSA_NAME_VERSION (node));
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
pp_string (buffer, "(ab)");
else if (SSA_NAME_IS_DEFAULT_DEF (node))
pp_string (buffer, "(D)");
break;
case WITH_SIZE_EXPR:
@ -2654,51 +2687,89 @@ newline_and_indent (pretty_printer *buffer, int spc)
INDENT (spc);
}
static void
dump_vops (pretty_printer *buffer, tree stmt, int spc, int flags)
{
tree use;
use_operand_p use_p;
def_operand_p def_p;
use_operand_p kill_p;
ssa_op_iter iter;
struct vdef_optype_d *vdefs;
struct vuse_optype_d *vuses;
int i, n;
if (!ssa_operands_active ())
if (!ssa_operands_active () || !stmt_references_memory_p (stmt))
return;
FOR_EACH_SSA_MAYDEF_OPERAND (def_p, use_p, stmt, iter)
/* Even if the statement doesn't have virtual operators yet, it may
contain symbol information (this happens before aliases have been
computed). */
if ((flags & TDF_MEMSYMS)
&& VUSE_OPS (stmt) == NULL
&& VDEF_OPS (stmt) == NULL)
{
pp_string (buffer, "# ");
dump_generic_node (buffer, DEF_FROM_PTR (def_p),
spc + 2, flags, false);
pp_string (buffer, " = V_MAY_DEF <");
dump_generic_node (buffer, USE_FROM_PTR (use_p),
spc + 2, flags, false);
pp_string (buffer, ">;");
newline_and_indent (buffer, spc);
if (LOADED_SYMS (stmt))
{
pp_string (buffer, "# LOADS: ");
dump_symbols (buffer, LOADED_SYMS (stmt), flags);
newline_and_indent (buffer, spc);
}
if (STORED_SYMS (stmt))
{
pp_string (buffer, "# STORES: ");
dump_symbols (buffer, STORED_SYMS (stmt), flags);
newline_and_indent (buffer, spc);
}
return;
}
FOR_EACH_SSA_MUSTDEF_OPERAND (def_p, kill_p, stmt, iter)
vuses = VUSE_OPS (stmt);
while (vuses)
{
pp_string (buffer, "# ");
dump_generic_node (buffer, DEF_FROM_PTR (def_p),
spc + 2, flags, false);
pp_string (buffer, " = V_MUST_DEF <");
dump_generic_node (buffer, USE_FROM_PTR (kill_p),
spc + 2, flags, false);
pp_string (buffer, ">;");
pp_string (buffer, "# VUSE <");
n = VUSE_NUM (vuses);
for (i = 0; i < n; i++)
{
dump_generic_node (buffer, VUSE_OP (vuses, i), spc + 2, flags, false);
if (i < n - 1)
pp_string (buffer, ", ");
}
pp_string (buffer, ">");
if (flags & TDF_MEMSYMS)
dump_symbols (buffer, LOADED_SYMS (stmt), flags);
newline_and_indent (buffer, spc);
vuses = vuses->next;
}
FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_VUSE)
vdefs = VDEF_OPS (stmt);
while (vdefs)
{
pp_string (buffer, "# VUSE <");
dump_generic_node (buffer, use, spc + 2, flags, false);
pp_string (buffer, ">;");
pp_string (buffer, "# ");
dump_generic_node (buffer, VDEF_RESULT (vdefs), spc + 2, flags, false);
pp_string (buffer, " = VDEF <");
n = VDEF_NUM (vdefs);
for (i = 0; i < n; i++)
{
dump_generic_node (buffer, VDEF_OP (vdefs, i), spc + 2, flags, 0);
if (i < n - 1)
pp_string (buffer, ", ");
}
pp_string (buffer, ">");
if ((flags & TDF_MEMSYMS) && vdefs->next == NULL)
dump_symbols (buffer, STORED_SYMS (stmt), flags);
newline_and_indent (buffer, spc);
vdefs = vdefs->next;
}
}
/* Dumps basic block BB to FILE with details described by FLAGS and
indented by INDENT spaces. */
@ -2807,8 +2878,8 @@ dump_bb_end (pretty_printer *buffer, basic_block bb, int indent, int flags)
pp_newline (buffer);
}
/* Dumps phi nodes of basic block BB to buffer BUFFER with details described by
FLAGS indented by INDENT spaces. */
/* Dump PHI nodes of basic block BB to BUFFER with details described
by FLAGS and indented by INDENT spaces. */
static void
dump_phi_nodes (pretty_printer *buffer, basic_block bb, int indent, int flags)
@ -2829,6 +2900,7 @@ dump_phi_nodes (pretty_printer *buffer, basic_block bb, int indent, int flags)
}
}
/* Dump jump to basic block BB that is represented implicitly in the cfg
to BUFFER. */

View File

@ -1563,8 +1563,9 @@ decide_instantiations (void)
/* Phase Four: Update the function to match the replacements created. */
/* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
renaming. This becomes necessary when we modify all of a non-scalar. */
/* Mark all the variables in VDEF/VUSE operators for STMT for
renaming. This becomes necessary when we modify all of a
non-scalar. */
static void
mark_all_v_defs_1 (tree stmt)
@ -1599,6 +1600,7 @@ mark_all_v_defs (tree list)
}
}
/* Mark every replacement under ELT with TREE_NO_WARNING. */
static void
@ -2358,8 +2360,9 @@ struct tree_opt_pass pass_sra =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func /* todo_flags_finish */
TODO_dump_func
| TODO_update_ssa
| TODO_ggc_collect | TODO_verify_ssa,
| TODO_ggc_collect
| TODO_verify_ssa, /* todo_flags_finish */
0 /* letter */
};

File diff suppressed because it is too large Load Diff

View File

@ -132,13 +132,12 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
We should be able to deduce that the predicate 'a.a != B' is always
false. To achieve this, we associate constant values to the SSA
names in the V_MAY_DEF and V_MUST_DEF operands for each store.
Additionally, since we also glob partial loads/stores with the base
symbol, we also keep track of the memory reference where the
constant value was stored (in the MEM_REF field of PROP_VALUE_T).
For instance,
names in the VDEF operands for each store. Additionally,
since we also glob partial loads/stores with the base symbol, we
also keep track of the memory reference where the constant value
was stored (in the MEM_REF field of PROP_VALUE_T). For instance,
# a_5 = V_MAY_DEF <a_4>
# a_5 = VDEF <a_4>
a.a = 2;
# VUSE <a_5>
@ -222,9 +221,9 @@ typedef enum
/* Array of propagated constant values. After propagation,
CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
the constant is held in an SSA name representing a memory store
(i.e., a V_MAY_DEF or V_MUST_DEF), CONST_VAL[I].MEM_REF will
contain the actual memory reference used to store (i.e., the LHS of
the assignment doing the store). */
(i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
memory reference used to store (i.e., the LHS of the assignment
doing the store). */
static prop_value_t *const_val;
/* True if we are also propagating constants in stores and loads. */
@ -1274,9 +1273,9 @@ visit_assignment (tree stmt, tree *output_p)
}
else if (do_store_ccp && stmt_makes_single_store (stmt))
{
/* Otherwise, set the names in V_MAY_DEF/V_MUST_DEF operands
to the new constant value and mark the LHS as the memory
reference associated with VAL. */
/* Otherwise, set the names in VDEF operands to the new
constant value and mark the LHS as the memory reference
associated with VAL. */
ssa_op_iter i;
tree vdef;
bool changed;

View File

@ -1073,8 +1073,7 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
bitmap_set_bit (used_in_real_ops, DECL_UID (SSA_NAME_VAR (var)));
/* Validate that virtual ops don't get used in funny ways. */
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter,
SSA_OP_VIRTUAL_USES | SSA_OP_VMUSTDEF)
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_VIRTUALS)
{
bitmap_set_bit (used_in_virtual_ops,
DECL_UID (SSA_NAME_VAR (var)));

View File

@ -63,6 +63,24 @@ may_propagate_copy (tree dest, tree orig)
tree type_d = TREE_TYPE (dest);
tree type_o = TREE_TYPE (orig);
/* For memory partitions, copies are OK as long as the memory symbol
belongs to the partition. */
if (TREE_CODE (dest) == SSA_NAME
&& TREE_CODE (SSA_NAME_VAR (dest)) == MEMORY_PARTITION_TAG)
return (TREE_CODE (orig) == SSA_NAME
&& !is_gimple_reg (orig)
&& (bitmap_bit_p (MPT_SYMBOLS (SSA_NAME_VAR (dest)),
DECL_UID (SSA_NAME_VAR (orig)))
|| SSA_NAME_VAR (dest) == SSA_NAME_VAR (orig)));
if (TREE_CODE (orig) == SSA_NAME
&& TREE_CODE (SSA_NAME_VAR (orig)) == MEMORY_PARTITION_TAG)
return (TREE_CODE (dest) == SSA_NAME
&& !is_gimple_reg (dest)
&& (bitmap_bit_p (MPT_SYMBOLS (SSA_NAME_VAR (orig)),
DECL_UID (SSA_NAME_VAR (dest)))
|| SSA_NAME_VAR (dest) == SSA_NAME_VAR (orig)));
/* Do not copy between types for which we *do* need a conversion. */
if (!tree_ssa_useless_type_conversion_1 (type_d, type_o))
return false;
@ -108,8 +126,8 @@ may_propagate_copy (tree dest, tree orig)
&& POINTER_TYPE_P (type_d)
&& POINTER_TYPE_P (type_o))
{
tree mt_dest = var_ann (SSA_NAME_VAR (dest))->symbol_mem_tag;
tree mt_orig = var_ann (SSA_NAME_VAR (orig))->symbol_mem_tag;
tree mt_dest = symbol_mem_tag (SSA_NAME_VAR (dest));
tree mt_orig = symbol_mem_tag (SSA_NAME_VAR (orig));
if (mt_dest && mt_orig && mt_dest != mt_orig)
return false;
else if (!lang_hooks.types_compatible_p (type_d, type_o))
@ -188,6 +206,18 @@ merge_alias_info (tree orig, tree new)
var_ann_t new_ann = var_ann (new_sym);
var_ann_t orig_ann = var_ann (orig_sym);
/* No merging necessary when memory partitions are involved. */
if (factoring_name_p (new))
{
gcc_assert (!is_gimple_reg (orig_sym));
return;
}
else if (factoring_name_p (orig))
{
gcc_assert (!is_gimple_reg (new_sym));
return;
}
gcc_assert (POINTER_TYPE_P (TREE_TYPE (orig)));
gcc_assert (POINTER_TYPE_P (TREE_TYPE (new)));
@ -545,7 +575,7 @@ dump_copy_of (FILE *file, tree var)
/* Evaluate the RHS of STMT. If it produces a valid copy, set the LHS
value and store the LHS into *RESULT_P. If STMT generates more
than one name (i.e., STMT is an aliased store), it is enough to
store the first name in the V_MAY_DEF list into *RESULT_P. After
store the first name in the VDEF list into *RESULT_P. After
all, the names generated will be VUSEd in the same statements. */
static enum ssa_prop_result
@ -582,8 +612,8 @@ copy_prop_visit_assignment (tree stmt, tree *result_p)
}
else if (stmt_makes_single_store (stmt))
{
/* Otherwise, set the names in V_MAY_DEF/V_MUST_DEF operands
to be a copy of RHS. */
/* Otherwise, set the names in VDEF operands to be a copy
of RHS. */
ssa_op_iter i;
tree vdef;
bool changed;

View File

@ -222,11 +222,11 @@ mark_stmt_necessary (tree stmt, bool add_to_worklist)
VEC_safe_push (tree, heap, worklist, stmt);
}
/* Mark the statement defining operand OP as necessary. PHIONLY is true
if we should only mark it necessary if it is a phi node. */
/* Mark the statement defining operand OP as necessary. */
static inline void
mark_operand_necessary (tree op, bool phionly)
mark_operand_necessary (tree op)
{
tree stmt;
int ver;
@ -241,9 +241,7 @@ mark_operand_necessary (tree op, bool phionly)
stmt = SSA_NAME_DEF_STMT (op);
gcc_assert (stmt);
if (NECESSARY (stmt)
|| IS_EMPTY_STMT (stmt)
|| (phionly && TREE_CODE (stmt) != PHI_NODE))
if (NECESSARY (stmt) || IS_EMPTY_STMT (stmt))
return;
NECESSARY (stmt) = 1;
@ -489,7 +487,7 @@ propagate_necessity (struct edge_list *el)
{
tree arg = PHI_ARG_DEF (stmt, k);
if (TREE_CODE (arg) == SSA_NAME)
mark_operand_necessary (arg, false);
mark_operand_necessary (arg);
}
if (aggressive)
@ -509,87 +507,22 @@ propagate_necessity (struct edge_list *el)
else
{
/* Propagate through the operands. Examine all the USE, VUSE and
V_MAY_DEF operands in this statement. Mark all the statements
which feed this statement's uses as necessary. */
VDEF operands in this statement. Mark all the statements
which feed this statement's uses as necessary. The
operands of VDEF expressions are also needed as they
represent potential definitions that may reach this
statement (VDEF operands allow us to follow def-def
links). */
ssa_op_iter iter;
tree use;
/* The operands of V_MAY_DEF expressions are also needed as they
represent potential definitions that may reach this
statement (V_MAY_DEF operands allow us to follow def-def
links). */
FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
mark_operand_necessary (use, false);
mark_operand_necessary (use);
}
}
}
/* Propagate necessity around virtual phi nodes used in kill operands.
The reason this isn't done during propagate_necessity is because we don't
want to keep phis around that are just there for must-defs, unless we
absolutely have to. After we've rewritten the reaching definitions to be
correct in the previous part of the fixup routine, we can simply propagate
around the information about which of these virtual phi nodes are really
used, and set the NECESSARY flag accordingly.
Note that we do the minimum here to ensure that we keep alive the phis that
are actually used in the corrected SSA form. In particular, some of these
phis may now have all of the same operand, and will be deleted by some
other pass. */
static void
mark_really_necessary_kill_operand_phis (void)
{
basic_block bb;
int i;
/* Seed the worklist with the new virtual phi arguments and virtual
uses */
FOR_EACH_BB (bb)
{
block_stmt_iterator bsi;
tree phi;
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
{
if (!is_gimple_reg (PHI_RESULT (phi)) && NECESSARY (phi))
{
for (i = 0; i < PHI_NUM_ARGS (phi); i++)
mark_operand_necessary (PHI_ARG_DEF (phi, i), true);
}
}
for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
{
tree stmt = bsi_stmt (bsi);
if (NECESSARY (stmt))
{
use_operand_p use_p;
ssa_op_iter iter;
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
{
tree use = USE_FROM_PTR (use_p);
mark_operand_necessary (use, true);
}
}
}
}
/* Mark all virtual phis still in use as necessary, and all of their
arguments that are phis as necessary. */
while (VEC_length (tree, worklist) > 0)
{
tree use = VEC_pop (tree, worklist);
for (i = 0; i < PHI_NUM_ARGS (use); i++)
mark_operand_necessary (PHI_ARG_DEF (use, i), true);
}
}
/* Remove dead PHI nodes from block BB. */
static void
@ -634,9 +567,6 @@ static void
remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
{
tree t = bsi_stmt (*i);
def_operand_p def_p;
ssa_op_iter iter;
if (dump_file && (dump_flags & TDF_DETAILS))
{
@ -711,11 +641,6 @@ remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
}
}
FOR_EACH_SSA_DEF_OPERAND (def_p, t, iter, SSA_OP_VIRTUAL_DEFS)
{
tree def = DEF_FROM_PTR (def_p);
mark_sym_for_renaming (SSA_NAME_VAR (def));
}
bsi_remove (i, true);
release_defs (t);
}
@ -875,7 +800,6 @@ perform_tree_ssa_dce (bool aggressive)
propagate_necessity (el);
mark_really_necessary_kill_operand_phis ();
eliminate_unnecessary_stmts ();
if (aggressive)

View File

@ -1486,7 +1486,7 @@ eliminate_redundant_computations (tree stmt)
if (! def
|| TREE_CODE (def) != SSA_NAME
|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
|| !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF)
|| !ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF)
/* Do not record equivalences for increments of ivs. This would create
overlapping live ranges for a very questionable gain. */
|| simple_iv_increment_p (stmt))

View File

@ -1,5 +1,5 @@
/* Dead store elimination
Copyright (C) 2004, 2005 Free Software Foundation, Inc.
Copyright (C) 2004, 2005, 2006 Free Software Foundation, Inc.
This file is part of GCC.
@ -34,6 +34,8 @@ Boston, MA 02110-1301, USA. */
#include "tree-dump.h"
#include "domwalk.h"
#include "flags.h"
#include "hashtab.h"
#include "sbitmap.h"
/* This file implements dead store elimination.
@ -65,6 +67,26 @@ Boston, MA 02110-1301, USA. */
the CFG. */
/* Given an aggregate, this records the parts of it which have been
stored into. */
struct aggregate_vardecl_d
{
/* The aggregate. */
tree decl;
/* Some aggregates are too big for us to handle or never get stored
to as a whole. If this field is TRUE, we don't care about this
aggregate. */
bool ignore;
/* Number of parts in the whole. */
unsigned nparts;
/* A bitmap of parts of the aggregate that have been set. If part N
of an aggregate has been stored to, bit N should be on. */
sbitmap parts_set;
};
struct dse_global_data
{
/* This is the global bitmap for store statements.
@ -73,6 +95,10 @@ struct dse_global_data
that we want to record, set the bit corresponding to the statement's
unique ID in this bitmap. */
bitmap stores;
/* A hash table containing the parts of an aggregate which have been
stored to. */
htab_t aggregate_vardecl;
};
/* We allocate a bitmap-per-block for stores which are encountered
@ -101,6 +127,7 @@ static void dse_optimize_stmt (struct dom_walk_data *,
static void dse_record_phis (struct dom_walk_data *, basic_block);
static void dse_finalize_block (struct dom_walk_data *, basic_block);
static void record_voperand_set (bitmap, bitmap *, unsigned int);
static void dse_record_partial_aggregate_store (tree, struct dse_global_data *);
static unsigned max_stmt_uid; /* Maximal uid of a statement. Uids to phi
nodes are assigned using the versions of
@ -173,7 +200,7 @@ memory_ssa_name_same (tree *expr_p, int *walk_subtrees ATTRIBUTE_UNUSED,
/* If we've found a default definition, then there's no problem. Both
stores will post-dominate it. And def_bb will be NULL. */
if (expr == gimple_default_def (cfun, SSA_NAME_VAR (expr)))
if (SSA_NAME_IS_DEFAULT_DEF (expr))
return NULL_TREE;
def_stmt = SSA_NAME_DEF_STMT (expr);
@ -210,6 +237,288 @@ memory_address_same (tree store1, tree store2)
== NULL);
}
/* A helper of dse_optimize_stmt.
Given a GIMPLE_MODIFY_STMT in STMT, check that each VDEF has one
use, and that one use is another VDEF clobbering the first one.
Return TRUE if the above conditions are met, otherwise FALSE. */
static bool
dse_possible_dead_store_p (tree stmt,
use_operand_p *first_use_p,
use_operand_p *use_p,
tree *use_stmt,
struct dse_global_data *dse_gd,
struct dse_block_local_data *bd)
{
ssa_op_iter op_iter;
bool fail = false;
def_operand_p var1;
vuse_vec_p vv;
tree defvar = NULL_TREE, temp;
tree prev_defvar = NULL_TREE;
stmt_ann_t ann = stmt_ann (stmt);
/* We want to verify that each virtual definition in STMT has
precisely one use and that all the virtual definitions are
used by the same single statement. When complete, we
want USE_STMT to refer to the one statement which uses
all of the virtual definitions from STMT. */
*use_stmt = NULL;
FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
{
defvar = DEF_FROM_PTR (var1);
/* If this virtual def does not have precisely one use, then
we will not be able to eliminate STMT. */
if (!has_single_use (defvar))
{
fail = true;
break;
}
/* Get the one and only immediate use of DEFVAR. */
single_imm_use (defvar, use_p, &temp);
gcc_assert (*use_p != NULL_USE_OPERAND_P);
*first_use_p = *use_p;
/* If the immediate use of DEF_VAR is not the same as the
previously find immediate uses, then we will not be able
to eliminate STMT. */
if (*use_stmt == NULL)
{
*use_stmt = temp;
prev_defvar = defvar;
}
else if (temp != *use_stmt)
{
/* The immediate use and the previously found immediate use
must be the same, except... if they're uses of different
parts of the whole. */
if (TREE_CODE (defvar) == SSA_NAME
&& TREE_CODE (SSA_NAME_VAR (defvar)) == STRUCT_FIELD_TAG
&& TREE_CODE (prev_defvar) == SSA_NAME
&& TREE_CODE (SSA_NAME_VAR (prev_defvar)) == STRUCT_FIELD_TAG
&& (SFT_PARENT_VAR (SSA_NAME_VAR (defvar))
== SFT_PARENT_VAR (SSA_NAME_VAR (prev_defvar))))
;
else
{
fail = true;
break;
}
}
}
if (fail)
{
record_voperand_set (dse_gd->stores, &bd->stores, ann->uid);
dse_record_partial_aggregate_store (stmt, dse_gd);
return false;
}
/* Skip through any PHI nodes we have already seen if the PHI
represents the only use of this store.
Note this does not handle the case where the store has
multiple VDEFs which all reach a set of PHI nodes in the same block. */
while (*use_p != NULL_USE_OPERAND_P
&& TREE_CODE (*use_stmt) == PHI_NODE
&& bitmap_bit_p (dse_gd->stores, get_stmt_uid (*use_stmt)))
{
/* A PHI node can both define and use the same SSA_NAME if
the PHI is at the top of a loop and the PHI_RESULT is
a loop invariant and copies have not been fully propagated.
The safe thing to do is exit assuming no optimization is
possible. */
if (SSA_NAME_DEF_STMT (PHI_RESULT (*use_stmt)) == *use_stmt)
return false;
/* Skip past this PHI and loop again in case we had a PHI
chain. */
single_imm_use (PHI_RESULT (*use_stmt), use_p, use_stmt);
}
return true;
}
/* Given a DECL, return its AGGREGATE_VARDECL_D entry. If no entry is
found and INSERT is TRUE, add a new entry. */
static struct aggregate_vardecl_d *
get_aggregate_vardecl (tree decl, struct dse_global_data *dse_gd, bool insert)
{
struct aggregate_vardecl_d av, *av_p;
void **slot;
av.decl = decl;
slot = htab_find_slot (dse_gd->aggregate_vardecl, &av, insert ? INSERT : NO_INSERT);
/* Not found, and we don't want to insert. */
if (slot == NULL)
return NULL;
/* Create new entry. */
if (*slot == NULL)
{
av_p = XNEW (struct aggregate_vardecl_d);
av_p->decl = decl;
/* Record how many parts the whole has. */
if (TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
av_p->nparts = 2;
else if (TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
{
tree fields;
/* Count the number of fields. */
fields = TYPE_FIELDS (TREE_TYPE (decl));
av_p->nparts = 0;
while (fields)
{
av_p->nparts++;
fields = TREE_CHAIN (fields);
}
}
else
abort ();
av_p->ignore = true;
av_p->parts_set = sbitmap_alloc (HOST_BITS_PER_LONG);
sbitmap_zero (av_p->parts_set);
*slot = av_p;
}
else
av_p = (struct aggregate_vardecl_d *) *slot;
return av_p;
}
/* If STMT is a partial store into an aggregate, record which part got set. */
static void
dse_record_partial_aggregate_store (tree stmt, struct dse_global_data *dse_gd)
{
tree lhs, decl;
enum tree_code code;
struct aggregate_vardecl_d *av_p;
int part;
gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
lhs = GIMPLE_STMT_OPERAND (stmt, 0);
code = TREE_CODE (lhs);
if (code != IMAGPART_EXPR
&& code != REALPART_EXPR
&& code != COMPONENT_REF)
return;
decl = TREE_OPERAND (lhs, 0);
/* Early bail on things like nested COMPONENT_REFs. */
if (TREE_CODE (decl) != VAR_DECL)
return;
/* Early bail on unions. */
if (code == COMPONENT_REF
&& TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != RECORD_TYPE)
return;
av_p = get_aggregate_vardecl (decl, dse_gd, /*insert=*/false);
/* Run away, this isn't an aggregate we care about. */
if (!av_p || av_p->ignore)
return;
switch (code)
{
case IMAGPART_EXPR:
part = 0;
break;
case REALPART_EXPR:
part = 1;
break;
case COMPONENT_REF:
{
tree orig_field, fields;
tree record_type = TREE_TYPE (TREE_OPERAND (lhs, 0));
/* Get FIELD_DECL. */
orig_field = TREE_OPERAND (lhs, 1);
/* FIXME: Eeech, do this more efficiently. Perhaps
calculate bit/byte offsets. */
part = -1;
fields = TYPE_FIELDS (record_type);
while (fields)
{
++part;
if (fields == orig_field)
break;
fields = TREE_CHAIN (fields);
}
gcc_assert (part >= 0);
}
break;
default:
return;
}
/* Record which part was set. */
SET_BIT (av_p->parts_set, part);
}
/* Return TRUE if all parts in an AGGREGATE_VARDECL have been set. */
static inline bool
dse_whole_aggregate_clobbered_p (struct aggregate_vardecl_d *av_p)
{
unsigned int i;
sbitmap_iterator sbi;
int nbits_set = 0;
/* Count the number of partial stores (bits set). */
EXECUTE_IF_SET_IN_SBITMAP (av_p->parts_set, 0, i, sbi)
nbits_set++;
return ((unsigned) nbits_set == av_p->nparts);
}
/* Return TRUE if STMT is a store into a whole aggregate whose parts we
have already seen and recorded. */
static bool
dse_partial_kill_p (tree stmt, struct dse_global_data *dse_gd)
{
tree decl;
struct aggregate_vardecl_d *av_p;
/* Make sure this is a store into the whole. */
if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
{
enum tree_code code;
decl = GIMPLE_STMT_OPERAND (stmt, 0);
code = TREE_CODE (TREE_TYPE (decl));
if (code != COMPLEX_TYPE && code != RECORD_TYPE)
return false;
if (TREE_CODE (decl) != VAR_DECL)
return false;
}
else
return false;
av_p = get_aggregate_vardecl (decl, dse_gd, /*insert=*/false);
gcc_assert (av_p != NULL);
return dse_whole_aggregate_clobbered_p (av_p);
}
/* Attempt to eliminate dead stores in the statement referenced by BSI.
A dead store is a store into a memory location which will later be
@ -234,7 +543,7 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
/* If this statement has no virtual defs, then there is nothing
to do. */
if (ZERO_SSA_OPERANDS (stmt, (SSA_OP_VMAYDEF|SSA_OP_VMUSTDEF)))
if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
return;
/* We know we have virtual definitions. If this is a GIMPLE_MODIFY_STMT
@ -249,78 +558,14 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
{
use_operand_p first_use_p = NULL_USE_OPERAND_P;
use_operand_p use_p = NULL;
tree use_stmt, temp;
tree defvar = NULL_TREE, usevar = NULL_TREE;
bool fail = false;
use_operand_p var2;
def_operand_p var1;
ssa_op_iter op_iter;
tree use_stmt;
/* We want to verify that each virtual definition in STMT has
precisely one use and that all the virtual definitions are
used by the same single statement. When complete, we
want USE_STMT to refer to the one statement which uses
all of the virtual definitions from STMT. */
use_stmt = NULL;
FOR_EACH_SSA_MUST_AND_MAY_DEF_OPERAND (var1, var2, stmt, op_iter)
{
defvar = DEF_FROM_PTR (var1);
usevar = USE_FROM_PTR (var2);
if (!dse_possible_dead_store_p (stmt, &first_use_p, &use_p, &use_stmt,
dse_gd, bd))
return;
/* If this virtual def does not have precisely one use, then
we will not be able to eliminate STMT. */
if (! has_single_use (defvar))
{
fail = true;
break;
}
/* Get the one and only immediate use of DEFVAR. */
single_imm_use (defvar, &use_p, &temp);
gcc_assert (use_p != NULL_USE_OPERAND_P);
first_use_p = use_p;
/* If the immediate use of DEF_VAR is not the same as the
previously find immediate uses, then we will not be able
to eliminate STMT. */
if (use_stmt == NULL)
use_stmt = temp;
else if (temp != use_stmt)
{
fail = true;
break;
}
}
if (fail)
{
record_voperand_set (dse_gd->stores, &bd->stores, ann->uid);
return;
}
/* Skip through any PHI nodes we have already seen if the PHI
represents the only use of this store.
Note this does not handle the case where the store has
multiple V_{MAY,MUST}_DEFs which all reach a set of PHI nodes in the
same block. */
while (use_p != NULL_USE_OPERAND_P
&& TREE_CODE (use_stmt) == PHI_NODE
&& bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt)))
{
/* A PHI node can both define and use the same SSA_NAME if
the PHI is at the top of a loop and the PHI_RESULT is
a loop invariant and copies have not been fully propagated.
The safe thing to do is exit assuming no optimization is
possible. */
if (SSA_NAME_DEF_STMT (PHI_RESULT (use_stmt)) == use_stmt)
return;
/* Skip past this PHI and loop again in case we had a PHI
chain. */
single_imm_use (PHI_RESULT (use_stmt), &use_p, &use_stmt);
}
/* If this is a partial store into an aggregate, record it. */
dse_record_partial_aggregate_store (stmt, dse_gd);
/* If we have precisely one immediate use at this point, then we may
have found redundant store. Make sure that the stores are to
@ -328,13 +573,15 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
SSA-form variables in the address will have the same values. */
if (use_p != NULL_USE_OPERAND_P
&& bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
&& operand_equal_p (GIMPLE_STMT_OPERAND (stmt, 0),
GIMPLE_STMT_OPERAND (use_stmt, 0), 0)
&& (operand_equal_p (GIMPLE_STMT_OPERAND (stmt, 0),
GIMPLE_STMT_OPERAND (use_stmt, 0), 0)
|| dse_partial_kill_p (stmt, dse_gd))
&& memory_address_same (stmt, use_stmt))
{
/* Make sure we propagate the ABNORMAL bit setting. */
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (first_use_p)))
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (usevar) = 1;
ssa_op_iter op_iter;
def_operand_p var1;
vuse_vec_p vv;
tree stmt_lhs;
if (dump_file && (dump_flags & TDF_DETAILS))
{
@ -342,12 +589,23 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
print_generic_expr (dump_file, bsi_stmt (bsi), dump_flags);
fprintf (dump_file, "'\n");
}
/* Then we need to fix the operand of the consuming stmt. */
FOR_EACH_SSA_MUST_AND_MAY_DEF_OPERAND (var1, var2, stmt, op_iter)
stmt_lhs = USE_FROM_PTR (first_use_p);
FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
{
tree usevar, temp;
single_imm_use (DEF_FROM_PTR (var1), &use_p, &temp);
SET_USE (use_p, USE_FROM_PTR (var2));
gcc_assert (VUSE_VECT_NUM_ELEM (*vv) == 1);
usevar = VUSE_ELEMENT_VAR (*vv, 0);
SET_USE (use_p, usevar);
/* Make sure we propagate the ABNORMAL bit setting. */
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (stmt_lhs))
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (usevar) = 1;
}
/* Remove the dead store. */
bsi_remove (&bsi, true);
@ -396,6 +654,54 @@ dse_finalize_block (struct dom_walk_data *walk_data,
}
}
/* Hashing and equality functions for AGGREGATE_VARDECL. */
static hashval_t
aggregate_vardecl_hash (const void *p)
{
return htab_hash_pointer
((const void *)((const struct aggregate_vardecl_d *)p)->decl);
}
static int
aggregate_vardecl_eq (const void *p1, const void *p2)
{
return ((const struct aggregate_vardecl_d *)p1)->decl
== ((const struct aggregate_vardecl_d *)p2)->decl;
}
/* Free memory allocated by one entry in AGGREGATE_VARDECL. */
static void
aggregate_vardecl_free (void *p)
{
struct aggregate_vardecl_d *entry = (struct aggregate_vardecl_d *) p;
sbitmap_free (entry->parts_set);
free (entry);
}
/* Return true if STMT is a store into an entire aggregate. */
static bool
aggregate_whole_store_p (tree stmt)
{
if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
{
tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
enum tree_code code = TREE_CODE (TREE_TYPE (lhs));
if (code == COMPLEX_TYPE || code == RECORD_TYPE)
return true;
}
return false;
}
/* Main entry point. */
static unsigned int
tree_ssa_dse (void)
{
@ -403,15 +709,40 @@ tree_ssa_dse (void)
struct dse_global_data dse_gd;
basic_block bb;
/* Create a UID for each statement in the function. Ordering of the
UIDs is not important for this pass. */
dse_gd.aggregate_vardecl =
htab_create (37, aggregate_vardecl_hash,
aggregate_vardecl_eq, aggregate_vardecl_free);
max_stmt_uid = 0;
FOR_EACH_BB (bb)
{
block_stmt_iterator bsi;
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
stmt_ann (bsi_stmt (bsi))->uid = max_stmt_uid++;
{
tree stmt = bsi_stmt (bsi);
/* Record aggregates which have been stored into as a whole. */
if (aggregate_whole_store_p (stmt))
{
tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
if (TREE_CODE (lhs) == VAR_DECL)
{
struct aggregate_vardecl_d *av_p;
av_p = get_aggregate_vardecl (lhs, &dse_gd, /*insert=*/true);
av_p->ignore = false;
/* Ignore aggregates with too many parts. */
if (av_p->nparts > HOST_BITS_PER_LONG)
av_p->ignore = true;
}
}
/* Create a UID for each statement in the function.
Ordering of the UIDs is not important for this pass. */
stmt_ann (stmt)->uid = max_stmt_uid++;
}
}
/* We might consider making this a property of each pass so that it
@ -437,6 +768,7 @@ tree_ssa_dse (void)
/* This is the main hash table for the dead store elimination pass. */
dse_gd.stores = BITMAP_ALLOC (NULL);
walk_data.global_data = &dse_gd;
/* Initialize the dominator walker. */
@ -448,8 +780,9 @@ tree_ssa_dse (void)
/* Finalize the dominator walker. */
fini_walk_dominator_tree (&walk_data);
/* Release the main bitmap. */
/* Release unneeded data. */
BITMAP_FREE (dse_gd.stores);
htab_delete (dse_gd.aggregate_vardecl);
/* For now, just wipe the post-dominator information. */
free_dominance_info (CDI_POST_DOMINATORS);

View File

@ -496,7 +496,7 @@ determine_max_movement (tree stmt, bool must_preserve_exec)
if (!add_dependency (val, lim_data, loop, true))
return false;
FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
if (!add_dependency (val, lim_data, loop, false))
return false;
@ -1251,15 +1251,13 @@ gather_mem_refs_stmt (struct loop *loop, htab_t mem_refs,
}
ref->is_stored |= is_stored;
FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi,
SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
record_mem_ref_loc (&ref->locs, stmt, mem);
return;
fail:
FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi,
SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
bitmap_set_bit (clobbered_vops, DECL_UID (SSA_NAME_VAR (vname)));
}

View File

@ -262,7 +262,7 @@ find_uses_to_rename_stmt (tree stmt, bitmap *use_blocks, bitmap need_phis)
tree var;
basic_block bb = bb_for_stmt (stmt);
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_USES | SSA_OP_ALL_KILLS)
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_USES)
find_uses_to_rename_use (bb, var, use_blocks, need_phis);
}
@ -406,7 +406,7 @@ check_loop_closed_ssa_stmt (basic_block bb, tree stmt)
ssa_op_iter iter;
tree var;
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_USES | SSA_OP_ALL_KILLS)
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_USES)
check_loop_closed_ssa_use (bb, var);
}
@ -454,13 +454,13 @@ split_loop_exit_edge (edge exit)
name = USE_FROM_PTR (op_p);
/* If the argument of the phi node is a constant, we do not need
/* If the argument of the PHI node is a constant, we do not need
to keep it inside loop. */
if (TREE_CODE (name) != SSA_NAME)
continue;
/* Otherwise create an auxiliary phi node that will copy the value
of the ssa name out of the loop. */
of the SSA name out of the loop. */
new_name = duplicate_ssa_name (name, NULL);
new_phi = create_phi_node (new_name, bb);
SSA_NAME_DEF_STMT (new_name) = new_phi;

File diff suppressed because it is too large Load Diff

View File

@ -50,35 +50,71 @@ struct use_optype_d
};
typedef struct use_optype_d *use_optype_p;
/* This represents the MAY_DEFS for a stmt. */
struct maydef_optype_d
typedef struct vuse_element_d
{
struct maydef_optype_d *next;
tree def_var;
tree use_var;
struct ssa_use_operand_d use_ptr;
} vuse_element_t;
typedef struct vuse_vec_d
{
int num_vuse;
vuse_element_t uses[1];
} vuse_vec_t;
typedef struct vuse_vec_d *vuse_vec_p;
#define VUSE_VECT_NUM_ELEM(V) (V).num_vuse
#define VUSE_VECT_ELEMENT_NC(V,X) (V).uses[(X)]
#define VUSE_ELEMENT_PTR_NC(V,X) (&(VUSE_VECT_ELEMENT_NC ((V),(X)).use_ptr))
#define VUSE_ELEMENT_VAR_NC(V,X) (VUSE_VECT_ELEMENT_NC ((V),(X)).use_var)
#ifdef ENABLE_CHECKING
#define VUSE_VECT_ELEMENT(V,X) \
(gcc_assert ((X) >= 0 && (X) < VUSE_VECT_NUM_ELEM (V)), \
VUSE_VECT_ELEMENT_NC (V,X))
#define VUSE_ELEMENT_PTR(V,X) \
(gcc_assert ((X) >= 0 && (X) < VUSE_VECT_NUM_ELEM (V)), \
VUSE_ELEMENT_PTR_NC (V, X))
#define SET_VUSE_VECT_ELEMENT(V,X,N) \
(gcc_assert ((X) >= 0 && (X) < VUSE_VECT_NUM_ELEM (V)), \
VUSE_VECT_ELEMENT_NC (V,X) = (N))
#define SET_VUSE_ELEMENT_VAR(V,X,N) \
(gcc_assert ((X) >= 0 && (X) < VUSE_VECT_NUM_ELEM (V)), \
VUSE_VECT_ELEMENT_NC ((V),(X)).use_var = (N))
#define SET_VUSE_ELEMENT_PTR(V,X,N) \
(gcc_assert ((X) >= 0 && (X) < VUSE_VECT_NUM_ELEM (V)), \
VUSE_ELEMENT_PTR_NC (V, X) = (N))
#else
#define VUSE_VECT_ELEMENT(V,X) VUSE_VECT_ELEMENT_NC(V,X)
#define VUSE_ELEMENT_PTR(V,X) VUSE_ELEMENT_PTR_NC(V,X)
#define SET_VUSE_VECT_ELEMENT(V,X,N) VUSE_VECT_ELEMENT_NC(V,X) = (N)
#define SET_VUSE_ELEMENT_PTR(V,X,N) VUSE_ELEMENT_PTR_NC(V,X) = (N)
#define SET_VUSE_ELEMENT_VAR(V,X,N) VUSE_VECT_ELEMENT_NC ((V),(X)).use_var = (N)
#endif
#define VUSE_ELEMENT_VAR(V,X) (VUSE_VECT_ELEMENT ((V),(X)).use_var)
/* This represents the VDEFS for a stmt. */
struct vdef_optype_d
{
struct vdef_optype_d *next;
tree def_var;
vuse_vec_t usev;
};
typedef struct maydef_optype_d *maydef_optype_p;
typedef struct vdef_optype_d *vdef_optype_p;
/* This represents the VUSEs for a stmt. */
struct vuse_optype_d
{
struct vuse_optype_d *next;
tree use_var;
struct ssa_use_operand_d use_ptr;
vuse_vec_t usev;
};
typedef struct vuse_optype_d *vuse_optype_p;
/* This represents the V_MUST_DEFS for a stmt. */
struct mustdef_optype_d
{
struct mustdef_optype_d *next;
tree def_var;
tree kill_var;
struct ssa_use_operand_d use_ptr;
};
typedef struct mustdef_optype_d *mustdef_optype_p;
#define SSA_OPERAND_MEMORY_SIZE (2048 - sizeof (void *))
@ -98,8 +134,8 @@ struct ssa_operands GTY(()) {
struct def_optype_d * GTY ((skip (""))) free_defs;
struct use_optype_d * GTY ((skip (""))) free_uses;
struct vuse_optype_d * GTY ((skip (""))) free_vuses;
struct maydef_optype_d * GTY ((skip (""))) free_maydefs;
struct mustdef_optype_d * GTY ((skip (""))) free_mustdefs;
struct vdef_optype_d * GTY ((skip (""))) free_vdefs;
VEC(tree,heap) * GTY ((skip (""))) mpt_table;
};
/* This represents the operand cache for a stmt. */
@ -109,10 +145,13 @@ struct stmt_operands_d
struct def_optype_d * def_ops;
struct use_optype_d * use_ops;
/* Virtual operands (V_MAY_DEF, VUSE, and V_MUST_DEF). */
struct maydef_optype_d * maydef_ops;
/* Virtual operands (VDEF, VUSE). */
struct vdef_optype_d * vdef_ops;
struct vuse_optype_d * vuse_ops;
struct mustdef_optype_d * mustdef_ops;
/* Sets of memory symbols loaded and stored. */
bitmap stores;
bitmap loads;
};
typedef struct stmt_operands_d *stmt_operands_p;
@ -127,8 +166,10 @@ typedef struct stmt_operands_d *stmt_operands_p;
#define DEF_OPS(STMT) (stmt_ann (STMT)->operands.def_ops)
#define USE_OPS(STMT) (stmt_ann (STMT)->operands.use_ops)
#define VUSE_OPS(STMT) (stmt_ann (STMT)->operands.vuse_ops)
#define MAYDEF_OPS(STMT) (stmt_ann (STMT)->operands.maydef_ops)
#define MUSTDEF_OPS(STMT) (stmt_ann (STMT)->operands.mustdef_ops)
#define VDEF_OPS(STMT) (stmt_ann (STMT)->operands.vdef_ops)
#define LOADED_SYMS(STMT) (stmt_ann (STMT)->operands.loads)
#define STORED_SYMS(STMT) (stmt_ann (STMT)->operands.stores)
#define USE_OP_PTR(OP) (&((OP)->use_ptr))
#define USE_OP(OP) (USE_FROM_PTR (USE_OP_PTR (OP)))
@ -136,18 +177,19 @@ typedef struct stmt_operands_d *stmt_operands_p;
#define DEF_OP_PTR(OP) ((OP)->def_ptr)
#define DEF_OP(OP) (DEF_FROM_PTR (DEF_OP_PTR (OP)))
#define VUSE_OP_PTR(OP) USE_OP_PTR(OP)
#define VUSE_OP(OP) ((OP)->use_var)
#define VUSE_OP_PTR(OP,X) VUSE_ELEMENT_PTR ((OP)->usev, (X))
#define VUSE_OP(OP,X) VUSE_ELEMENT_VAR ((OP)->usev, (X))
#define SET_VUSE_OP(OP,X,N) SET_VUSE_ELEMENT_VAR ((OP)->usev, (X), (N))
#define VUSE_NUM(OP) VUSE_VECT_NUM_ELEM ((OP)->usev)
#define VUSE_VECT(OP) &((OP)->usev)
#define MAYDEF_RESULT_PTR(OP) (&((OP)->def_var))
#define MAYDEF_RESULT(OP) ((OP)->def_var)
#define MAYDEF_OP_PTR(OP) USE_OP_PTR (OP)
#define MAYDEF_OP(OP) ((OP)->use_var)
#define MUSTDEF_RESULT_PTR(OP) (&((OP)->def_var))
#define MUSTDEF_RESULT(OP) ((OP)->def_var)
#define MUSTDEF_KILL_PTR(OP) USE_OP_PTR (OP)
#define MUSTDEF_KILL(OP) ((OP)->kill_var)
#define VDEF_RESULT_PTR(OP) (&((OP)->def_var))
#define VDEF_RESULT(OP) ((OP)->def_var)
#define VDEF_OP_PTR(OP,X) VUSE_OP_PTR (OP, X)
#define VDEF_OP(OP,X) VUSE_OP (OP, X)
#define SET_VDEF_OP(OP,X,N) SET_VUSE_OP (OP, X, N)
#define VDEF_NUM(OP) VUSE_VECT_NUM_ELEM ((OP)->usev)
#define VDEF_VECT(OP) &((OP)->usev)
#define PHI_RESULT_PTR(PHI) get_phi_result_ptr (PHI)
#define PHI_RESULT(PHI) DEF_FROM_PTR (PHI_RESULT_PTR (PHI))
@ -164,6 +206,9 @@ typedef struct stmt_operands_d *stmt_operands_p;
#define PHI_ARG_INDEX_FROM_USE(USE) phi_arg_index_from_use (USE)
extern struct vdef_optype_d *realloc_vdef (struct vdef_optype_d *, int);
extern struct vuse_optype_d *realloc_vuse (struct vuse_optype_d *, int);
extern void init_ssa_operands (void);
extern void fini_ssa_operands (void);
extern void free_ssa_operands (stmt_operands_p);
@ -177,6 +222,8 @@ extern void dump_immediate_uses (FILE *file);
extern void dump_immediate_uses_for (FILE *file, tree var);
extern void debug_immediate_uses (void);
extern void debug_immediate_uses_for (tree var);
extern void dump_decl_set (FILE *, bitmap);
extern void debug_decl_set (bitmap);
extern bool ssa_operands_active (void);
@ -190,8 +237,9 @@ enum ssa_op_iter_type {
ssa_op_iter_tree,
ssa_op_iter_use,
ssa_op_iter_def,
ssa_op_iter_maymustdef
ssa_op_iter_vdef
};
/* This structure is used in the operand iterator loops. It contains the
items required to determine which operand is retrieved next. During
optimization, this structure is scalarized, and any unused fields are
@ -202,15 +250,15 @@ typedef struct ssa_operand_iterator_d
def_optype_p defs;
use_optype_p uses;
vuse_optype_p vuses;
maydef_optype_p maydefs;
maydef_optype_p mayuses;
mustdef_optype_p mustdefs;
mustdef_optype_p mustkills;
vdef_optype_p vdefs;
vdef_optype_p mayuses;
enum ssa_op_iter_type iter_type;
int phi_i;
int num_phi;
tree phi_stmt;
bool done;
int vuse_index;
int mayuse_index;
} ssa_op_iter;
/* These flags are used to determine which operands are returned during
@ -218,22 +266,16 @@ typedef struct ssa_operand_iterator_d
#define SSA_OP_USE 0x01 /* Real USE operands. */
#define SSA_OP_DEF 0x02 /* Real DEF operands. */
#define SSA_OP_VUSE 0x04 /* VUSE operands. */
#define SSA_OP_VMAYUSE 0x08 /* USE portion of V_MAY_DEFS. */
#define SSA_OP_VMAYDEF 0x10 /* DEF portion of V_MAY_DEFS. */
#define SSA_OP_VMUSTDEF 0x20 /* V_MUST_DEF definitions. */
#define SSA_OP_VMUSTKILL 0x40 /* V_MUST_DEF kills. */
#define SSA_OP_VMAYUSE 0x08 /* USE portion of VDEFS. */
#define SSA_OP_VDEF 0x10 /* DEF portion of VDEFS. */
/* These are commonly grouped operand flags. */
#define SSA_OP_VIRTUAL_USES (SSA_OP_VUSE | SSA_OP_VMAYUSE)
#define SSA_OP_VIRTUAL_DEFS (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF)
#define SSA_OP_VIRTUAL_KILLS (SSA_OP_VMUSTKILL)
#define SSA_OP_ALL_VIRTUALS (SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS \
| SSA_OP_VIRTUAL_DEFS)
#define SSA_OP_VIRTUAL_DEFS (SSA_OP_VDEF)
#define SSA_OP_ALL_VIRTUALS (SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_DEFS)
#define SSA_OP_ALL_USES (SSA_OP_VIRTUAL_USES | SSA_OP_USE)
#define SSA_OP_ALL_DEFS (SSA_OP_VIRTUAL_DEFS | SSA_OP_DEF)
#define SSA_OP_ALL_KILLS (SSA_OP_VIRTUAL_KILLS)
#define SSA_OP_ALL_OPERANDS (SSA_OP_ALL_USES | SSA_OP_ALL_DEFS \
| SSA_OP_ALL_KILLS)
#define SSA_OP_ALL_OPERANDS (SSA_OP_ALL_USES | SSA_OP_ALL_DEFS)
/* This macro executes a loop over the operands of STMT specified in FLAG,
returning each operand as a 'tree' in the variable TREEVAR. ITER is an
@ -259,29 +301,13 @@ typedef struct ssa_operand_iterator_d
!op_iter_done (&(ITER)); \
DEFVAR = op_iter_next_def (&(ITER)))
/* This macro executes a loop over the V_MAY_DEF operands of STMT. The def
and use for each V_MAY_DEF is returned in DEFVAR and USEVAR.
/* This macro executes a loop over the VDEF operands of STMT. The def
and use for each VDEF is returned in DEFVAR and USEVAR.
ITER is an ssa_op_iter structure used to control the loop. */
#define FOR_EACH_SSA_MAYDEF_OPERAND(DEFVAR, USEVAR, STMT, ITER) \
for (op_iter_init_maydef (&(ITER), STMT, &(USEVAR), &(DEFVAR)); \
#define FOR_EACH_SSA_VDEF_OPERAND(DEFVAR, USEVAR, STMT, ITER) \
for (op_iter_init_vdef (&(ITER), STMT, &(USEVAR), &(DEFVAR)); \
!op_iter_done (&(ITER)); \
op_iter_next_maymustdef (&(USEVAR), &(DEFVAR), &(ITER)))
/* This macro executes a loop over the V_MUST_DEF operands of STMT. The def
and kill for each V_MUST_DEF is returned in DEFVAR and KILLVAR.
ITER is an ssa_op_iter structure used to control the loop. */
#define FOR_EACH_SSA_MUSTDEF_OPERAND(DEFVAR, KILLVAR, STMT, ITER) \
for (op_iter_init_mustdef (&(ITER), STMT, &(KILLVAR), &(DEFVAR)); \
!op_iter_done (&(ITER)); \
op_iter_next_maymustdef (&(KILLVAR), &(DEFVAR), &(ITER)))
/* This macro executes a loop over the V_{MUST,MAY}_DEF of STMT. The def
and kill for each V_{MUST,MAY}_DEF is returned in DEFVAR and KILLVAR.
ITER is an ssa_op_iter structure used to control the loop. */
#define FOR_EACH_SSA_MUST_AND_MAY_DEF_OPERAND(DEFVAR, KILLVAR, STMT, ITER)\
for (op_iter_init_must_and_may_def (&(ITER), STMT, &(KILLVAR), &(DEFVAR));\
!op_iter_done (&(ITER)); \
op_iter_next_maymustdef (&(KILLVAR), &(DEFVAR), &(ITER)))
op_iter_next_vdef (&(USEVAR), &(DEFVAR), &(ITER)))
/* This macro will execute a loop over all the arguments of a PHI which
match FLAGS. A use_operand_p is always returned via USEVAR. FLAGS
@ -334,4 +360,8 @@ typedef struct ssa_operand_iterator_d
/* This macro counts the number of operands in STMT matching FLAGS. */
#define NUM_SSA_OPERANDS(STMT, FLAGS) num_ssa_operands (STMT, FLAGS)
extern tree get_mpt_for (tree);
extern void dump_memory_partitions (FILE *);
extern void debug_memory_partitions (void);
#endif /* GCC_TREE_SSA_OPERANDS_H */

View File

@ -2177,16 +2177,13 @@ compute_rvuse_and_antic_safe (void)
{
tree stmt = bsi_stmt (bsi);
if (first_store_uid[bb->index] == 0
&& !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYUSE | SSA_OP_VMAYDEF
| SSA_OP_VMUSTDEF | SSA_OP_VMUSTKILL))
if (first_store_uid[bb->index] == 0
&& !ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYUSE | SSA_OP_VDEF))
{
first_store_uid[bb->index] = stmt_ann (stmt)->uid;
}
FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_VIRTUAL_KILLS
| SSA_OP_VMAYUSE)
FOR_EACH_SSA_USE_OPERAND (usep, stmt, iter, SSA_OP_VMAYUSE)
{
tree use = USE_FROM_PTR (usep);
bitmap repbit = get_representative (vuse_names,
@ -4004,14 +4001,14 @@ remove_dead_inserted_code (void)
else
{
/* Propagate through the operands. Examine all the USE, VUSE and
V_MAY_DEF operands in this statement. Mark all the statements
VDEF operands in this statement. Mark all the statements
which feed this statement's uses as necessary. */
ssa_op_iter iter;
tree use;
/* The operands of V_MAY_DEF expressions are also needed as they
/* The operands of VDEF expressions are also needed as they
represent potential definitions that may reach this
statement (V_MAY_DEF operands allow us to follow def-def
statement (VDEF operands allow us to follow def-def
links). */
FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)

View File

@ -749,7 +749,7 @@ ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
}
/* Return the first V_MAY_DEF or V_MUST_DEF operand for STMT. */
/* Return the first VDEF operand for STMT. */
tree
first_vdef (tree stmt)
@ -778,7 +778,7 @@ stmt_makes_single_load (tree stmt)
if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
return false;
if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF|SSA_OP_VUSE))
if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF|SSA_OP_VUSE))
return false;
rhs = GIMPLE_STMT_OPERAND (stmt, 1);
@ -803,7 +803,7 @@ stmt_makes_single_store (tree stmt)
if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
return false;
if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF|SSA_OP_VMUSTDEF))
if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
return false;
lhs = GIMPLE_STMT_OPERAND (stmt, 0);
@ -906,7 +906,7 @@ replace_uses_in (tree stmt, bool *replaced_addresses_p,
GIMPLE register, then we are making a copy/constant propagation
from a memory store. For instance,
# a_3 = V_MAY_DEF <a_2>
# a_3 = VDEF <a_2>
a.b = x_1;
...
# VUSE <a_3>
@ -917,8 +917,8 @@ replace_uses_in (tree stmt, bool *replaced_addresses_p,
the VUSE(s) that we are replacing. Otherwise, we may do the
wrong replacement:
# a_3 = V_MAY_DEF <a_2>
# b_5 = V_MAY_DEF <b_4>
# a_3 = VDEF <a_2>
# b_5 = VDEF <b_4>
*p = 10;
...
# VUSE <b_5>
@ -938,10 +938,10 @@ replace_uses_in (tree stmt, bool *replaced_addresses_p,
stored in different locations:
if (...)
# a_3 = V_MAY_DEF <a_2>
# a_3 = VDEF <a_2>
a.b = 3;
else
# a_4 = V_MAY_DEF <a_2>
# a_4 = VDEF <a_2>
a.c = 3;
# a_5 = PHI <a_3, a_4>

View File

@ -131,7 +131,7 @@ all_immediate_uses_same_place (tree stmt)
return true;
}
/* Some global stores don't necessarily have V_MAY_DEF's of global variables,
/* Some global stores don't necessarily have VDEF's of global variables,
but we still must avoid moving them around. */
bool
@ -156,7 +156,7 @@ is_hidden_global_store (tree stmt)
int x;
p_1 = (i_2 > 3) ? &x : p;
# x_4 = V_MAY_DEF <x_3>
# x_4 = VDEF <x_3>
*p_1 = 5;
return 2;
@ -194,7 +194,7 @@ is_hidden_global_store (tree stmt)
tree ptr = TREE_OPERAND (lhs, 0);
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
tree nmt = (pi) ? pi->name_mem_tag : NULL_TREE;
tree smt = var_ann (SSA_NAME_VAR (ptr))->symbol_mem_tag;
tree smt = symbol_mem_tag (SSA_NAME_VAR (ptr));
/* If either the name tag or the symbol tag for PTR is a
global variable, then the store is necessary. */
@ -207,6 +207,7 @@ is_hidden_global_store (tree stmt)
else
gcc_unreachable ();
}
return false;
}
@ -402,7 +403,7 @@ statement_sink_location (tree stmt, basic_block frombb)
/* This will happen when you have
a_3 = PHI <a_13, a_26>
a_26 = V_MAY_DEF <a_3>
a_26 = VDEF <a_3>
If the use is a phi, and is in the same bb as the def,
we can't sink it. */

View File

@ -52,6 +52,7 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#include "tree-ssa-structalias.h"
#include "cgraph.h"
#include "alias.h"
#include "pointer-set.h"
/* The idea behind this analyzer is to generate set constraints from the
program, then solve the resulting constraints in order to generate the
@ -285,8 +286,8 @@ DEF_VEC_P(varinfo_t);
DEF_VEC_ALLOC_P(varinfo_t, heap);
/* Table of variable info structures for constraint variables. Indexed directly
by variable info id. */
/* Table of variable info structures for constraint variables.
Indexed directly by variable info id. */
static VEC(varinfo_t,heap) *varmap;
/* Return the varmap element N */
@ -1840,7 +1841,7 @@ get_constraint_exp_from_ssa_var (tree t)
decl. */
if (TREE_CODE (t) == SSA_NAME
&& TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
&& gimple_default_def (cfun, SSA_NAME_VAR (t)) == t)
&& SSA_NAME_IS_DEFAULT_DEF (t))
return get_constraint_exp_from_ssa_var (SSA_NAME_VAR (t));
cexpr.type = SCALAR;
@ -2641,7 +2642,6 @@ update_alias_info (tree stmt, struct alias_info *ai)
use_operand_p use_p;
ssa_op_iter iter;
enum escape_type stmt_escape_type = is_escape_site (stmt);
tree op;
if (stmt_escape_type == ESCAPE_TO_CALL
|| stmt_escape_type == ESCAPE_TO_PURE_CONST)
@ -2715,7 +2715,7 @@ update_alias_info (tree stmt, struct alias_info *ai)
var = SSA_NAME_VAR (op);
v_ann = var_ann (var);
/* The base variable of an ssa name must be a GIMPLE register, and thus
/* The base variable of an SSA name must be a GIMPLE register, and thus
it cannot be aliased. */
gcc_assert (!may_be_aliased (var));
@ -2751,7 +2751,7 @@ update_alias_info (tree stmt, struct alias_info *ai)
So, if the original code had no other dereferences of PTR,
the aliaser will not create memory tags for it, and when
&PTR->FLD gets propagated to INDIRECT_REF expressions, the
memory operations will receive no V_MAY_DEF/VUSE operands.
memory operations will receive no VDEF/VUSE operands.
One solution would be to have count_uses_and_derefs consider
&PTR->FLD a dereference of PTR. But that is wrong, since it
@ -2784,17 +2784,13 @@ update_alias_info (tree stmt, struct alias_info *ai)
all the variables OP points to. */
pi->is_dereferenced = 1;
/* Keep track of how many time we've dereferenced each
pointer. */
NUM_REFERENCES_INC (v_ann);
/* If this is a store operation, mark OP as being
dereferenced to store, otherwise mark it as being
dereferenced to load. */
if (is_store)
bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
pointer_set_insert (ai->dereferenced_ptrs_store, var);
else
bitmap_set_bit (ai->dereferenced_ptrs_load, DECL_UID (var));
pointer_set_insert (ai->dereferenced_ptrs_load, var);
}
if (stmt_escape_type != NO_ESCAPE && num_derefs < num_uses)
@ -2812,7 +2808,7 @@ update_alias_info (tree stmt, struct alias_info *ai)
if (get_call_expr_in (stmt)
|| stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
{
bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
pointer_set_insert (ai->dereferenced_ptrs_store, var);
pi->is_dereferenced = 1;
}
}
@ -2821,24 +2817,14 @@ update_alias_info (tree stmt, struct alias_info *ai)
if (TREE_CODE (stmt) == PHI_NODE)
return;
/* Update reference counter for definitions to any
potentially aliased variable. This is used in the alias
grouping heuristics. */
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
/* Mark stored variables in STMT as being written to and update the
reference counter for potentially aliased symbols in STMT. */
if (stmt_references_memory_p (stmt) && STORED_SYMS (stmt))
{
tree var = SSA_NAME_VAR (op);
var_ann_t ann = var_ann (var);
bitmap_set_bit (ai->written_vars, DECL_UID (var));
if (may_be_aliased (var))
NUM_REFERENCES_INC (ann);
}
/* Mark variables in V_MAY_DEF operands as being written to. */
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
{
tree var = DECL_P (op) ? op : SSA_NAME_VAR (op);
bitmap_set_bit (ai->written_vars, DECL_UID (var));
unsigned i;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
pointer_set_insert (ai->written_vars, referenced_var (i));
}
}
@ -3992,7 +3978,7 @@ find_what_p_points_to (tree p)
decl. */
if (TREE_CODE (p) == SSA_NAME
&& TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
&& gimple_default_def (cfun, SSA_NAME_VAR (p)) == p)
&& SSA_NAME_IS_DEFAULT_DEF (p))
lookup_p = SSA_NAME_VAR (p);
if (lookup_id_for_tree (lookup_p, &id))
@ -4286,6 +4272,7 @@ compute_points_to_sets (struct alias_info *ai)
tree stmt = bsi_stmt (bsi);
find_func_aliases (stmt);
/* Update various related attributes like escaped
addresses, pointer dereferences for loads and stores.
This is used when creating name tags and alias

View File

@ -55,31 +55,20 @@ struct alias_info
/* Number of const/pure function calls found in the program. */
size_t num_pure_const_calls_found;
/* Total number of virtual operands that will be needed to represent
all the aliases of all the pointers found in the program. */
long total_alias_vops;
/* Variables that have been written to. */
bitmap written_vars;
/* Variables that have been written to directly (i.e., not through a
pointer dereference). */
struct pointer_set_t *written_vars;
/* Pointers that have been used in an indirect store operation. */
bitmap dereferenced_ptrs_store;
struct pointer_set_t *dereferenced_ptrs_store;
/* Pointers that have been used in an indirect load operation. */
bitmap dereferenced_ptrs_load;
struct pointer_set_t *dereferenced_ptrs_load;
/* Memory tag for all the PTR_IS_REF_ALL pointers. */
tree ref_all_symbol_mem_tag;
};
/* Keep track of how many times each pointer has been dereferenced in
the program using the aux variable. This is used by the alias
grouping heuristic in compute_flow_insensitive_aliasing. */
#define NUM_REFERENCES(ANN) ((size_t)((ANN)->common.aux))
#define NUM_REFERENCES_CLEAR(ANN) ((ANN)->common.aux) = 0
#define NUM_REFERENCES_INC(ANN) (ANN)->common.aux = (void*) (((size_t)((ANN)->common.aux)) + 1)
#define NUM_REFERENCES_SET(ANN, VAL) (ANN)->common.aux = (void*) ((void *)(VAL))
/* In tree-ssa-alias.c. */
enum escape_type is_escape_site (tree);

View File

@ -167,7 +167,7 @@ typedef struct temp_expr_table_d
int *num_in_part; /* # of ssa_names in a partition. */
} *temp_expr_table_p;
/* Used to indicate a dependency on V_MAY_DEFs. */
/* Used to indicate a dependency on VDEFs. */
#define VIRTUAL_PARTITION(table) (table->virtual_partition)
#ifdef ENABLE_CHECKING
@ -384,8 +384,8 @@ is_replaceable_p (tree stmt)
if (TREE_CODE (use_stmt) == PHI_NODE)
return false;
/* There must be no V_MAY_DEFS or V_MUST_DEFS. */
if (!(ZERO_SSA_OPERANDS (stmt, (SSA_OP_VMAYDEF | SSA_OP_VMUSTDEF))))
/* There must be no VDEFs. */
if (!(ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF)))
return false;
/* Float expressions must go through memory if float-store is on. */

View File

@ -76,7 +76,7 @@ ssa_redirect_edge (edge e, basic_block dest)
return e;
}
/* Add PHI arguments queued in PENDINT_STMT list on edge E to edge
/* Add PHI arguments queued in PENDING_STMT list on edge E to edge
E->dest. */
void
@ -143,6 +143,13 @@ verify_ssa_name (tree ssa_name, bool is_virtual)
return true;
}
if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
&& !IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name)))
{
error ("found a default name with a non-empty defining statement");
return true;
}
return false;
}
@ -156,8 +163,7 @@ verify_ssa_name (tree ssa_name, bool is_virtual)
it means that the block in that array slot contains the
definition of SSA_NAME.
IS_VIRTUAL is true if SSA_NAME is created by a V_MAY_DEF or a
V_MUST_DEF. */
IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */
static bool
verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
@ -208,22 +214,16 @@ err:
is flowing through an abnormal edge (only used when checking PHI
arguments).
IS_VIRTUAL is true if SSA_NAME is created by a V_MAY_DEF or a
V_MUST_DEF.
If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
that are defined before STMT in basic block BB. */
static bool
verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
tree stmt, bool check_abnormal, bool is_virtual,
bitmap names_defined_in_bb)
tree stmt, bool check_abnormal, bitmap names_defined_in_bb)
{
bool err = false;
tree ssa_name = USE_FROM_PTR (use_p);
err = verify_ssa_name (ssa_name, is_virtual);
if (!TREE_VISITED (ssa_name))
if (verify_imm_links (stderr, ssa_name))
err = true;
@ -231,7 +231,7 @@ verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
TREE_VISITED (ssa_name) = 1;
if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name))
&& gimple_default_def (cfun, SSA_NAME_VAR (ssa_name)) == ssa_name)
&& SSA_NAME_IS_DEFAULT_DEF (ssa_name))
; /* Default definitions have empty statements. Nothing to do. */
else if (!def_bb)
{
@ -296,9 +296,10 @@ verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
/* Return true if any of the arguments for PHI node PHI at block BB is
malformed.
DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME version
numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set, it means that the
block in that array slot contains the definition of SSA_NAME. */
DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
it means that the block in that array slot contains the
definition of SSA_NAME. */
static bool
verify_phi_args (tree phi, basic_block bb, basic_block *definition_block)
@ -319,7 +320,6 @@ verify_phi_args (tree phi, basic_block bb, basic_block *definition_block)
use_operand_p op_p = PHI_ARG_DEF_PTR (phi, i);
tree op = USE_FROM_PTR (op_p);
e = EDGE_PRED (bb, i);
if (op == NULL_TREE)
@ -338,10 +338,11 @@ verify_phi_args (tree phi, basic_block bb, basic_block *definition_block)
}
if (TREE_CODE (op) == SSA_NAME)
err = verify_use (e->src, definition_block[SSA_NAME_VERSION (op)], op_p,
phi, e->flags & EDGE_ABNORMAL,
!is_gimple_reg (PHI_RESULT (phi)),
NULL);
{
err = verify_ssa_name (op, !is_gimple_reg (PHI_RESULT (phi)));
err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
}
if (e->dest != bb)
{
@ -362,7 +363,7 @@ error:
if (err)
{
fprintf (stderr, "for PHI node\n");
print_generic_stmt (stderr, phi, TDF_VOPS);
print_generic_stmt (stderr, phi, TDF_VOPS|TDF_MEMSYMS);
}
@ -391,7 +392,8 @@ verify_flow_insensitive_alias_info (void)
{
bitmap_set_bit (visited, DECL_UID (alias));
if (!may_be_aliased (alias))
if (TREE_CODE (alias) != MEMORY_PARTITION_TAG
&& !may_be_aliased (alias))
{
error ("non-addressable variable inside an alias set");
debug_variable (alias);
@ -407,9 +409,11 @@ verify_flow_insensitive_alias_info (void)
if (!MTAG_P (var)
&& ann->is_aliased
&& memory_partition (var) == NULL_TREE
&& !bitmap_bit_p (visited, DECL_UID (var)))
{
error ("addressable variable that is aliased but is not in any alias set");
error ("addressable variable that is aliased but is not in any "
"alias set");
goto err;
}
}
@ -472,12 +476,17 @@ verify_flow_sensitive_alias_info (void)
goto err;
}
if (pi->value_escapes_p
&& pi->name_mem_tag
&& !is_call_clobbered (pi->name_mem_tag))
if (pi->value_escapes_p && pi->name_mem_tag)
{
error ("pointer escapes but its name tag is not call-clobbered");
goto err;
tree t = memory_partition (pi->name_mem_tag);
if (t == NULL_TREE)
t = pi->name_mem_tag;
if (!is_call_clobbered (t))
{
error ("pointer escapes but its name tag is not call-clobbered");
goto err;
}
}
}
@ -488,7 +497,9 @@ err:
internal_error ("verify_flow_sensitive_alias_info failed");
}
/* Verify the consistency of call clobbering information. */
static void
verify_call_clobbering (void)
{
@ -505,23 +516,38 @@ verify_call_clobbering (void)
EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
{
var = referenced_var (i);
if (memory_partition (var))
var = memory_partition (var);
if (!MTAG_P (var) && !DECL_CALL_CLOBBERED (var))
{
error ("variable in call_clobbered_vars but not marked DECL_CALL_CLOBBERED");
error ("variable in call_clobbered_vars but not marked "
"DECL_CALL_CLOBBERED");
debug_variable (var);
goto err;
}
}
FOR_EACH_REFERENCED_VAR (var, rvi)
{
if (!MTAG_P (var) && DECL_CALL_CLOBBERED (var)
if (is_gimple_reg (var))
continue;
if (memory_partition (var))
var = memory_partition (var);
if (!MTAG_P (var)
&& DECL_CALL_CLOBBERED (var)
&& !bitmap_bit_p (gimple_call_clobbered_vars (cfun), DECL_UID (var)))
{
error ("variable marked DECL_CALL_CLOBBERED but not in call_clobbered_vars bitmap.");
error ("variable marked DECL_CALL_CLOBBERED but not in "
"call_clobbered_vars bitmap.");
debug_variable (var);
goto err;
}
}
return;
err:
@ -606,6 +632,7 @@ verify_ssa (bool check_modified_stmt)
{
if (verify_phi_args (phi, bb, definition_block))
goto err;
bitmap_set_bit (names_defined_in_bb,
SSA_NAME_VERSION (PHI_RESULT (phi)));
}
@ -618,7 +645,7 @@ verify_ssa (bool check_modified_stmt)
if (check_modified_stmt && stmt_modified_p (stmt))
{
error ("stmt (%p) marked modified after optimization pass : ",
error ("stmt (%p) marked modified after optimization pass: ",
(void *)stmt);
print_generic_stmt (stderr, stmt, TDF_VOPS);
goto err;
@ -633,23 +660,42 @@ verify_ssa (bool check_modified_stmt)
base_address = get_base_address (lhs);
if (base_address
&& gimple_aliases_computed_p (cfun)
&& SSA_VAR_P (base_address)
&& ZERO_SSA_OPERANDS (stmt, SSA_OP_VMAYDEF|SSA_OP_VMUSTDEF))
&& !stmt_ann (stmt)->has_volatile_ops
&& ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
{
error ("statement makes a memory store, but has no "
"V_MAY_DEFS nor V_MUST_DEFS");
error ("statement makes a memory store, but has no VDEFS");
print_generic_stmt (stderr, stmt, TDF_VOPS);
goto err;
}
}
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
SSA_OP_ALL_USES | SSA_OP_ALL_KILLS)
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_VIRTUALS)
{
if (verify_ssa_name (op, true))
{
error ("in statement");
print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
goto err;
}
}
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE|SSA_OP_DEF)
{
if (verify_ssa_name (op, false))
{
error ("in statement");
print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
goto err;
}
}
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
{
op = USE_FROM_PTR (use_p);
if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
use_p, stmt, false, !is_gimple_reg (op),
names_defined_in_bb))
use_p, stmt, false, names_defined_in_bb))
goto err;
}
@ -661,7 +707,8 @@ verify_ssa (bool check_modified_stmt)
}
/* Finally, verify alias information. */
verify_alias_info ();
if (gimple_aliases_computed_p (cfun))
verify_alias_info ();
free (definition_block);
@ -774,6 +821,7 @@ delete_tree_ssa (void)
cfun->gimple_df->addressable_vars = NULL;
cfun->gimple_df->modified_noreturn_calls = NULL;
cfun->gimple_df->aliases_computed_p = false;
delete_alias_heapvars ();
gcc_assert (!need_ssa_update_p ());
}
@ -882,18 +930,6 @@ tree_ssa_useless_type_conversion (tree expr)
return false;
}
/* Returns true if statement STMT may read memory. */
bool
stmt_references_memory_p (tree stmt)
{
stmt_ann_t ann = stmt_ann (stmt);
if (ann->has_volatile_ops)
return true;
return (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS));
}
/* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
described in walk_use_def_chains.
@ -940,7 +976,10 @@ walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
{
tree arg = PHI_ARG_DEF (def_stmt, i);
if (TREE_CODE (arg) == SSA_NAME
/* ARG may be NULL for newly introduced PHI nodes. */
if (arg
&& TREE_CODE (arg) == SSA_NAME
&& walk_use_def_chains_1 (arg, fn, data, visited, is_dfs))
return true;
}
@ -978,7 +1017,6 @@ walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
If IS_DFS is false, the two steps above are done in reverse order
(i.e., a breadth-first search). */
void
walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
bool is_dfs)
@ -1189,4 +1227,3 @@ struct tree_opt_pass pass_late_warn_uninitialized =
0, /* todo_flags_finish */
0 /* letter */
};

View File

@ -301,7 +301,7 @@ vect_create_data_ref_ptr (tree stmt,
if (!MTAG_P (tag))
new_type_alias (vect_ptr, tag, DR_REF (dr));
else
var_ann (vect_ptr)->symbol_mem_tag = tag;
set_symbol_mem_tag (vect_ptr, tag);
var_ann (vect_ptr)->subvars = DR_SUBVARS (dr);
@ -1660,7 +1660,7 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
return false;
}
gcc_assert (!stmt_references_memory_p (stmt));
gcc_assert (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS));
for (args = TREE_OPERAND (operation, 1); args; args = TREE_CHAIN (args))
{
@ -2851,16 +2851,16 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
vec_oprnd);
vect_finish_stmt_generation (stmt, new_stmt, bsi);
/* Set the V_MAY_DEFS for the vector pointer. If this virtual def has a
use outside the loop and a loop peel is performed then the def may be
renamed by the peel. Mark it for renaming so the later use will also
be renamed. */
/* Set the VDEFs for the vector pointer. If this virtual def
has a use outside the loop and a loop peel is performed
then the def may be renamed by the peel. Mark it for
renaming so the later use will also be renamed. */
copy_virtual_operands (new_stmt, next_stmt);
if (j == 0)
{
/* The original store is deleted so the same SSA_NAMEs can be used.
*/
FOR_EACH_SSA_TREE_OPERAND (def, next_stmt, iter, SSA_OP_VMAYDEF)
/* The original store is deleted so the same SSA_NAMEs
can be used. */
FOR_EACH_SSA_TREE_OPERAND (def, next_stmt, iter, SSA_OP_VDEF)
{
SSA_NAME_DEF_STMT (def) = new_stmt;
mark_sym_for_renaming (SSA_NAME_VAR (def));
@ -2872,7 +2872,7 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
{
/* Create new names for all the definitions created by COPY and
add replacement mappings for each new name. */
FOR_EACH_SSA_DEF_OPERAND (def_p, new_stmt, iter, SSA_OP_VMAYDEF)
FOR_EACH_SSA_DEF_OPERAND (def_p, new_stmt, iter, SSA_OP_VDEF)
{
create_new_def_for (DEF_FROM_PTR (def_p), new_stmt, def_p);
mark_sym_for_renaming (SSA_NAME_VAR (DEF_FROM_PTR (def_p)));
@ -4037,9 +4037,9 @@ vect_generate_tmps_on_preheader (loop_vec_info loop_vinfo,
LOOP - the loop whose preheader will contain STMT.
It's possible to vectorize a loop even though an SSA_NAME from a VUSE
appears to be defined in a V_MAY_DEF in another statement in a loop.
appears to be defined in a VDEF in another statement in a loop.
One such case is when the VUSE is at the dereference of a __restricted__
pointer in a load and the V_MAY_DEF is at the dereference of a different
pointer in a load and the VDEF is at the dereference of a different
__restricted__ pointer in a store. Vectorization may result in
copy_virtual_uses being called to copy the problematic VUSE to a new
statement that is being inserted in the loop preheader. This procedure
@ -4651,8 +4651,6 @@ vect_transform_loop (loop_vec_info loop_vinfo)
int i;
tree ratio = NULL;
int vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
bitmap_iterator bi;
unsigned int j;
bool strided_store;
if (vect_print_dump_info (REPORT_DETAILS))
@ -4715,7 +4713,7 @@ vect_transform_loop (loop_vec_info loop_vinfo)
/* CHECKME: we wouldn't need this if we called update_ssa once
for all loops. */
bitmap_zero (vect_vnames_to_rename);
bitmap_zero (vect_memsyms_to_rename);
/* Peel the loop if there are data refs with unknown alignment.
Only one data ref with unknown store is allowed. */
@ -4837,8 +4835,7 @@ vect_transform_loop (loop_vec_info loop_vinfo)
slpeel_make_loop_iterate_ntimes (loop, ratio);
EXECUTE_IF_SET_IN_BITMAP (vect_vnames_to_rename, 0, j, bi)
mark_sym_for_renaming (SSA_NAME_VAR (ssa_name (j)));
mark_set_for_renaming (vect_memsyms_to_rename);
/* The memory tags and pointers in vectorized statements need to
have their SSA forms updated. FIXME, why can't this be delayed

View File

@ -178,7 +178,7 @@ enum verbosity_levels vect_verbosity_level = MAX_VERBOSITY_LEVEL;
static LOC vect_loop_location;
/* Bitmap of virtual variables to be renamed. */
bitmap vect_vnames_to_rename;
bitmap vect_memsyms_to_rename;
/*************************************************************************
Simple Loop Peeling Utilities
@ -226,8 +226,7 @@ rename_variables_in_bb (basic_block bb)
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
{
stmt = bsi_stmt (bsi);
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
(SSA_OP_ALL_USES | SSA_OP_ALL_KILLS))
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
rename_use_op (use_p);
}
@ -529,7 +528,7 @@ slpeel_update_phi_nodes_for_guard1 (edge guard_edge, struct loop *loop,
renaming later. */
name = PHI_RESULT (orig_phi);
if (!is_gimple_reg (SSA_NAME_VAR (name)))
bitmap_set_bit (vect_vnames_to_rename, SSA_NAME_VERSION (name));
bitmap_set_bit (vect_memsyms_to_rename, DECL_UID (SSA_NAME_VAR (name)));
/** 1. Handle new-merge-point phis **/
@ -554,6 +553,9 @@ slpeel_update_phi_nodes_for_guard1 (edge guard_edge, struct loop *loop,
/** 2. Handle loop-closed-ssa-form phis **/
if (!is_gimple_reg (PHI_RESULT (orig_phi)))
continue;
/* 2.1. Generate new phi node in NEW_EXIT_BB: */
new_phi = create_phi_node (SSA_NAME_VAR (PHI_RESULT (orig_phi)),
*new_exit_bb);
@ -2163,7 +2165,7 @@ vectorize_loops (void)
/* Allocate the bitmap that records which virtual variables that
need to be renamed. */
vect_vnames_to_rename = BITMAP_ALLOC (NULL);
vect_memsyms_to_rename = BITMAP_ALLOC (NULL);
/* ----------- Analyze loops. ----------- */
@ -2193,7 +2195,7 @@ vectorize_loops (void)
/* ----------- Finalize. ----------- */
BITMAP_FREE (vect_vnames_to_rename);
BITMAP_FREE (vect_memsyms_to_rename);
for (i = 1; i < vect_loops_num; i++)
{

View File

@ -328,7 +328,7 @@ extern FILE *vect_dump;
extern enum verbosity_levels vect_verbosity_level;
/* Bitmap of virtual variables to be renamed. */
extern bitmap vect_vnames_to_rename;
extern bitmap vect_memsyms_to_rename;
/*-----------------------------------------------------------------*/
/* Function prototypes. */

View File

@ -271,12 +271,15 @@ init_ttree (void)
tree_contains_struct[STRUCT_FIELD_TAG][TS_DECL_MINIMAL] = 1;
tree_contains_struct[NAME_MEMORY_TAG][TS_DECL_MINIMAL] = 1;
tree_contains_struct[SYMBOL_MEMORY_TAG][TS_DECL_MINIMAL] = 1;
tree_contains_struct[MEMORY_PARTITION_TAG][TS_DECL_MINIMAL] = 1;
tree_contains_struct[STRUCT_FIELD_TAG][TS_MEMORY_TAG] = 1;
tree_contains_struct[NAME_MEMORY_TAG][TS_MEMORY_TAG] = 1;
tree_contains_struct[SYMBOL_MEMORY_TAG][TS_MEMORY_TAG] = 1;
tree_contains_struct[MEMORY_PARTITION_TAG][TS_MEMORY_TAG] = 1;
tree_contains_struct[STRUCT_FIELD_TAG][TS_STRUCT_FIELD_TAG] = 1;
tree_contains_struct[MEMORY_PARTITION_TAG][TS_MEMORY_PARTITION_TAG] = 1;
tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS] = 1;
tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS] = 1;
@ -374,6 +377,8 @@ tree_code_size (enum tree_code code)
return sizeof (struct tree_memory_tag);
case STRUCT_FIELD_TAG:
return sizeof (struct tree_struct_field_tag);
case MEMORY_PARTITION_TAG:
return sizeof (struct tree_memory_partition_tag);
default:
return sizeof (struct tree_decl_non_common);
}
@ -2189,6 +2194,7 @@ tree_node_structure (tree t)
case SYMBOL_MEMORY_TAG:
case NAME_MEMORY_TAG:
case STRUCT_FIELD_TAG:
case MEMORY_PARTITION_TAG:
return TS_MEMORY_TAG;
default:
return TS_DECL_NON_COMMON;

View File

@ -359,6 +359,7 @@ DEFTREECODE (RESULT_DECL, "result_decl", tcc_declaration, 0)
DEFTREECODE (STRUCT_FIELD_TAG, "struct_field_tag", tcc_declaration, 0)
DEFTREECODE (NAME_MEMORY_TAG, "name_memory_tag", tcc_declaration, 0)
DEFTREECODE (SYMBOL_MEMORY_TAG, "symbol_memory_tag", tcc_declaration, 0)
DEFTREECODE (MEMORY_PARTITION_TAG, "memory_partition_tag", tcc_declaration, 0)
/* A namespace declaration. Namespaces appear in DECL_CONTEXT of other
_DECLs, providing a hierarchy of names. */

View File

@ -107,7 +107,8 @@ extern const enum tree_code_class tree_code_type[];
#define MTAG_P(CODE) \
(TREE_CODE (CODE) == STRUCT_FIELD_TAG \
|| TREE_CODE (CODE) == NAME_MEMORY_TAG \
|| TREE_CODE (CODE) == SYMBOL_MEMORY_TAG)
|| TREE_CODE (CODE) == SYMBOL_MEMORY_TAG \
|| TREE_CODE (CODE) == MEMORY_PARTITION_TAG)
/* Nonzero if DECL represents a VAR_DECL or FUNCTION_DECL. */
@ -1859,13 +1860,14 @@ struct tree_phi_node GTY(())
int num_args;
int capacity;
/* Basic block to that the phi node belongs. */
/* Basic block holding this PHI node. */
struct basic_block_def *bb;
/* Arguments of the PHI node. These are maintained in the same
order as predecessor edge vector BB->PREDS. */
struct phi_arg_d GTY ((length ("((tree)&%h)->phi.num_args"))) a[1];
};
#define OMP_CLAUSE_CODE(NODE) \
(OMP_CLAUSE_CHECK (NODE))->omp_clause.code
@ -2443,6 +2445,20 @@ struct tree_struct_field_tag GTY(())
#define SFT_OFFSET(NODE) (STRUCT_FIELD_TAG_CHECK (NODE)->sft.offset)
#define SFT_SIZE(NODE) (STRUCT_FIELD_TAG_CHECK (NODE)->sft.size)
/* Memory Partition Tags (MPTs) group memory symbols under one
common name for the purposes of placing memory PHI nodes. */
struct tree_memory_partition_tag GTY(())
{
struct tree_memory_tag common;
/* Set of symbols grouped under this MPT. */
bitmap symbols;
};
#define MPT_SYMBOLS(NODE) (MEMORY_PARTITION_TAG_CHECK (NODE)->mpt.symbols)
/* For any sort of a ..._DECL node, this points to the original (abstract)
decl node which this decl is an instance of, or else it is NULL indicating
that this decl is not an instance of some other decl. For example,
@ -3264,6 +3280,7 @@ union tree_node GTY ((ptr_alias (union lang_tree_node),
struct tree_memory_tag GTY ((tag ("TS_MEMORY_TAG"))) mtag;
struct tree_struct_field_tag GTY ((tag ("TS_STRUCT_FIELD_TAG"))) sft;
struct tree_omp_clause GTY ((tag ("TS_OMP_CLAUSE"))) omp_clause;
struct tree_memory_partition_tag GTY ((tag ("TS_MEMORY_PARTITION_TAG"))) mpt;
};
/* Standard named or nameless data types of the C compiler. */

View File

@ -64,3 +64,4 @@ DEFTREESTRUCT(TS_CONSTRUCTOR, "constructor")
DEFTREESTRUCT(TS_MEMORY_TAG, "memory tag")
DEFTREESTRUCT(TS_STRUCT_FIELD_TAG, "struct field tag")
DEFTREESTRUCT(TS_OMP_CLAUSE, "omp clause")
DEFTREESTRUCT(TS_MEMORY_PARTITION_TAG, "memory partition tag")