re PR middle-end/13146 (inheritance for nonoverlapping_component_refs_p)

2009-04-03  Richard Guenther  <rguenther@suse.de>

	PR middle-end/13146
	PR tree-optimization/23940
	PR tree-optimization/33237
	PR middle-end/33974
	PR middle-end/34093
	PR tree-optimization/36201
	PR tree-optimization/36230
	PR tree-optimization/38049
	PR tree-optimization/38207
	PR tree-optimization/38230
	PR tree-optimization/38301
	PR tree-optimization/38585
	PR middle-end/38895
	PR tree-optimization/38985
	PR tree-optimization/39299
	* tree-ssa-structalias.h: Remove.
	* tree-ssa-operands.h (NULL_USE_OPERAND_P): Make of type use_operand_p.
	(NULL_DEF_OPERAND_P): Make of type def_operand_p.
	(struct vuse_element_d): Remove.
	(struct vuse_vec_d): Likewise.
	(VUSE_VECT_NUM_ELEM, VUSE_VECT_ELEMENT_NC, VUSE_ELEMENT_PTR_NC,
	VUSE_ELEMENT_VAR_NC, VUSE_VECT_ELEMENT, VUSE_ELEMENT_PTR,
	SET_VUSE_VECT_ELEMENT, SET_VUSE_ELEMENT_VAR, SET_VUSE_ELEMENT_PTR,
	VUSE_ELEMENT_VAR): Likewise.
	(struct voptype_d): Likewise.
	(NUM_VOP_FREE_BUCKETS): Likewise.
	(struct ssa_operands): Remove vop_free_buckets and mpt_table fields.
	(struct stmt_operands_d): Remove.
	(VUSE_OP_PTR, VUSE_OP, SET_VUSE_OP, VUSE_NUM, VUSE_VECT,
	VDEF_RESULT_PTR, VDEF_RESULT, VDEF_OP_PTR, VDEF_OP, SET_VDEF_OP,
	VDEF_NUM, VDEF_VECT): Likewise.
	(copy_virtual_operands): Remove.
	(operand_build_cmp): Likewise.
	(create_ssa_artificial_load_stmt): Likewise.
	(enum ssa_op_iter_type): Remove ssa_op_iter_vdef.
	(struct ssa_operand_iterator_d): Remove vuses, vdefs, mayusesm
	vuse_index and mayuse_index members.  Pack and move done and iter_type
	members to the front.
	(SSA_OP_VMAYUSE): Remove.
	(SSA_OP_VIRTUAL_USES): Adjust.
	(FOR_EACH_SSA_VDEF_OPERAND): Remove.
	(unlink_stmt_vdef): Declare.
	(add_to_addressable_set): Remove.
	* tree-vrp.c (stmt_interesting_for_vrp): Adjust.
	(vrp_visit_stmt): Likewise.
	* doc/tree-ssa.texi (Alias analysis): Update.
	* doc/invoke.texi (max-aliased-vops): Remove docs.
	(avg-aliased-vops): Likewise.
	* tree-into-ssa.c (syms_to_rename): Remove.
	(need_to_update_vops_p): Likewise.
	(need_to_initialize_update_ssa_p): Rename to ...
	(update_ssa_initialized_fn): ... this.  Track function we are
	initialized for.
	(symbol_marked_for_renaming): Simplify.
	(add_new_name_mapping): Do not set need_to_update_vops_p.
	(dump_currdefs): Use SYMS_TO_RENAME.
	(rewrite_update_stmt): Always walk all uses/defs.
	(dump_update_ssa): Adjust.
	(init_update_ssa): Take function argument.  Track what we are
	initialized for.
	(delete_update_ssa): Reset SYMS_TO_RENAME and update_ssa_initialized_fn.
	(create_new_def_for): Initialize for cfun, assert we are initialized
	for cfun.
	(mark_sym_for_renaming): Simplify.
	(mark_set_for_renaming): Do not initialize update-ssa.
	(need_ssa_update_p): Simplify.  Take function argument.
	(name_mappings_registered_p): Assert we ask for the correct function.
	(name_registered_for_update_p): Likewise.
	(ssa_names_to_replace): Likewise.
	(release_ssa_name_after_update_ssa): Likewise.
	(update_ssa): Likewise.  Use SYMS_TO_RENAME.
	(dump_decl_set): Do not print a newline.
	(debug_decl_set): Do it here.
	(dump_update_ssa): And here.
	* tree-ssa-loop-im.c (move_computations): Adjust.
	(movement_possibility): Likewise.
	(determine_max_movement): Likewise.
	(gather_mem_refs_stmt): Likewise.
	* tree-dump.c (dequeue_and_dump): Do not handle SYMBOL_MEMORY_TAG
	or NAME_MEMORY_TAG.
	* tree-complex.c (update_all_vops): Remove.
	(expand_complex_move): Adjust.
	* tree-ssa-loop-niter.c (chain_of_csts_start): Use NULL_TREE.
	Simplify test for memory referencing statement.  Exclude
	non-invariant ADDR_EXPRs.
	* tree-pretty-print.c (dump_generic_node): Do not handle memory tags.
	* tree-loop-distribution.c (generate_memset_zero): Adjust.
	(rdg_flag_uses): Likewise.
	* tree-tailcall.c (suitable_for_tail_opt_p): Remove memory-tag
	related code.
	(tree_optimize_tail_calls_1): Also split the
	edge from the entry block if we have degenerate PHI nodes in
	the first basic block.
	* tree.c (init_ttree): Remove memory-tag related code.
	(tree_code_size): Likewise.
	(tree_node_structure): Likewise.
	(build7_stat): Re-write to be build6_stat.
	* tree.h (MTAG_P, TREE_MEMORY_TAG_CHECK, TMR_TAG): Remove.
	(SSA_VAR_P): Adjust.
	(struct tree_memory_tag): Remove.
	(struct tree_memory_partition_tag): Likewise.
	(union tree_node): Adjust.
	(build7): Re-write to be build6.
	* tree-pass.h (pass_reset_cc_flags): Remove.
	(TODO_update_address_taken): New flag.
	(pass_simple_dse): Remove.
	* ipa-cp.c (ipcp_update_callgraph): Update SSA form.
	* params.h (MAX_ALIASED_VOPS): Remove.
	(AVG_ALIASED_VOPS): Likewise.
	* omp-low.c (expand_omp_taskreg): Update SSA form.
	* tree-ssa-dse.c (dse_optimize_stmt): Properly query if the rhs
	aliases the lhs in a copy stmt.
	* tree-ssa-dse.c (struct address_walk_data): Remove.
	(memory_ssa_name_same): Likewise.
	(memory_address_same): Likewise.
	(get_kill_of_stmt_lhs): Likewise.
	(dse_possible_dead_store_p): Simplify, use the oracle.  Handle
	unused stores.  Look through PHI nodes into post-dominated regions.
	(dse_optimize_stmt): Simplify.  Properly remove stores.
	(tree_ssa_dse): Compute dominators.
	(execute_simple_dse): Remove.
	(pass_simple_dse): Likewise.
	* ipa-reference.c (scan_stmt_for_static_refs): Open-code
	gimple_loaded_syms and gimple_stored_syms computation.
	* toplev.c (dump_memory_report): Dump alias and pta stats.
	* tree-ssa-sccvn.c (vn_reference_compute_hash): Simplify.
	(vn_reference_eq): Likewise.
	(vuses_to_vec, copy_vuses_from_stmt, vdefs_to_vec,
	copy_vdefs_from_stmt, shared_lookup_vops, shared_vuses_from_stmt,
	valueize_vuses): Remove.
	(get_def_ref_stmt_vuses): Simplify.  Rename to ...
	(get_def_ref_stmt_vuse): ... this.
	(vn_reference_lookup_2): New function.
	(vn_reference_lookup_pieces): Use walk_non_aliased_vuses for
	walking equivalent vuses.  Simplify.
	(vn_reference_lookup): Likewise.
	(vn_reference_insert): Likewise.
	(vn_reference_insert_pieces): Likewise.
	(visit_reference_op_call): Simplify.
	(visit_reference_op_load): Likewise.
	(visit_reference_op_store): Likewise.
	(init_scc_vn): Remove shared_lookup_vuses initialization.
	(free_scc_vn): Remove shared_lookup_vuses freeing.
	(sort_vuses, sort_vuses_heap): Remove.
	(get_ref_from_reference_ops): Export.
	* tree-ssa-sccvn.h (struct vn_reference_s): Replace vuses
	vector with single vuse pointer.
	(vn_reference_lookup_pieces, vn_reference_lookup,
	vn_reference_insert, vn_reference_insert_pieces): Adjust prototypes.
	(shared_vuses_from_stmt): Remove.
	(get_ref_from_reference_ops): Declare.
	* tree-ssa-loop-manip.c (slpeel_can_duplicate_loop_p): Adjust.
	* tree-ssa-copyrename.c (copy_rename_partition_coalesce): Remove
	memory-tag related code.
	* tree-ssa-ccp.c (get_symbol_constant_value): Remove memory-tag code.
	(likely_value): Add comment, skip static-chain of call statements.
	(surely_varying_stmt_p): Adjust.
	(gimplify_and_update_call_from_tree): Likewise.
	(execute_fold_all_builtins): Do not rebuild alias info.
	(gimplify_and_update_call_from_tree): Properly update VOPs.
	* tree-ssa-loop-ivopts.c (get_ref_tag): Remove.
	(copy_ref_info): Remove memory-tag related code.
	* tree-call-cdce.c (tree_call_cdce): Rename the VOP.
	* ipa-pure-const.c (check_decl): Remove memory-tag related code.
	(check_stmt): Open-code gimple_loaded_syms and gimple_stored_syms
	computation.
	* tree-ssa-dom.c (gimple_p): Remove typedef.
	(eliminate_redundant_computations): Adjust.
	(record_equivalences_from_stmt): Likewise.
	(avail_expr_hash): Likewise.
	(avail_expr_eq): Likewise.
	* tree-ssa-propagate.c (update_call_from_tree): Properly
	update VOPs.
	(stmt_makes_single_load): Likewise.
	(stmt_makes_single_store): Likewise.
	* tree-ssa-alias.c: Rewrite completely.
	(debug_memory_partitions, dump_mem_ref_stats, debug_mem_ref_stats,
	debug_mem_sym_stats, dump_mem_sym_stats_for_var,
	debug_all_mem_sym_stats, debug_mp_info, update_mem_sym_stats_from_stmt,
	delete_mem_ref_stats, create_tag_raw, dump_points_to_info,
	dump_may_aliases_for, debug_may_aliases_for, new_type_alias):
	Remove public functions.
	(pass_reset_cc_flags): Remove.
	(pass_build_alias): Move ...
	* tree-ssa-structalias.c (pass_build_alias): ... here.
	* tree-ssa-alias.c (may_be_aliased): Move ...
	* tree-flow-inline.h (may_be_aliased): ... here.
	tree-ssa-alias.c (struct count_ptr_d, count_ptr_derefs,
	count_uses_and_derefs): Move ...
	* gimple.c: ... here.
	* gimple.h (count_uses_and_derefs): Declare.
	* tree-ssa-alias.c (dump_alias_stats, ptr_deref_may_alias_global_p,
	ptr_deref_may_alias_decl_p, ptr_derefs_may_alias_p,
	same_type_for_tbaa, nonaliasing_component_refs_p, decl_refs_may_alias_p,
	indirect_ref_may_alias_decl_p, indirect_refs_may_alias_p,
	ref_maybe_used_by_call_p, ref_maybe_used_by_stmt_p,
	call_may_clobber_ref_p, stmt_may_clobber_ref_p, maybe_skip_until,
	get_continuation_for_phi, walk_non_aliased_vuses, walk_aliased_vdefs):
	New functions.
	* tree-dfa.c (refs_may_alias_p): Move ...
	* tree-ssa-alias.c (refs_may_alias_p): ... here.  Extend.
	* tree-ssa-alias.h: New file.
	* tree-ssa-sink.c (is_hidden_global_store): Adjust.
	(statement_sink_location): Likewise.
	* opts.c (decode_options): Do not adjust max-aliased-vops or
	avg-aliased-vops values.
	* timevar.def (TV_TREE_MAY_ALIAS): Remove.
	(TV_CALL_CLOBBER): Likewise.
	(TV_FLOW_SENSITIVE): Likewise.
	(TV_FLOW_INSENSITIVE): Likewise.
	(TV_MEMORY_PARTITIONING): Likewise.
	(TV_ALIAS_STMT_WALK): New timevar.
	* tree-ssa-loop-ivcanon.c (empty_loop_p): Adjust.
	* tree-ssa-address.c (create_mem_ref_raw): Use build6.
	(get_address_description): Remove memory-tag related code.
	* tree-ssa-ifcombine.c (bb_no_side_effects_p): Adjust.
	* treestruct.def (TS_MEMORY_TAG, TS_MEMORY_PARTITION_TAG): Remove.
	* tree-eh.c (cleanup_empty_eh): Do not leave stale SSA_NAMEs
	and immediate uses in statements.  Document.
	* gimple-pretty-print.c (dump_gimple_mem_ops): Adjust.
	(dump_symbols): Remove.
	(dump_gimple_mem_ops): Do not dump loaded or stored syms.
	* alias.c (get_deref_alias_set): New function split out from ...
	(get_alias_set): ... here.
	* alias.h (get_deref_alias_set): Declare.
	* tree-vect-data-refs.c (vect_create_data_ref_ptr): Remove unused
	type parameter.  Remove restrict pointer handling.  Create a
	ref-all pointer in case type-based alias sets do not conflict.
	(vect_analyze_data_refs): Remove SMT related code.
	* tree-vect-stmts.c (vectorizable_store): Re-instantiate TBAA assert.
	(vectorizable_load): Likewise.
	* tree-data-ref.h (struct dr_alias): Remove symbol_tag field.
	(DR_SYMBOL_TAG, DR_VOPS): Remove.
	* tree-data-ref.c (dr_may_alias_p): Use the alias-oracle.
	Ignore vops and SMTs.
	(dr_analyze_alias): Likewise..
	(free_data_ref): Likewise.
	(create_data_ref): Likewise.
	(analyze_all_data_dependences): Likewise.
	(get_references_in_stmt): Adjust.
	* tree-flow-inline.h (gimple_aliases_computed_p,
	gimple_addressable_vars, gimple_call_clobbered_vars,
	gimple_call_used_vars, gimple_global_var, may_aliases, memory_partition,
	factoring_name_p, mark_call_clobbered, clear_call_clobbered,
	compare_ssa_operands_equal, symbol_mem_tag, set_symbol_mem_tag,
	gimple_mem_ref_stats): Remove.
	(gimple_vop): New function.
	(op_iter_next_use): Remove vuses and mayuses cases.
	(op_iter_next_def): Remove vdefs case.
	(op_iter_next_tree): Remove vuses, mayuses and vdefs cases.
	(clear_and_done_ssa_iter): Do not set removed fields.
	(op_iter_init): Likewise.  Skip vuse and/or vdef if requested.
	Assert we are not iterating over vuses or vdefs if not also
	iterating over uses or defs.
	(op_iter_init_use): Likewise.
	(op_iter_init_def): Likewise.
	(op_iter_next_vdef): Remove.
	(op_iter_next_mustdef): Likewise.
	(op_iter_init_vdef): Likewise.
	(compare_ssa_operands_equal): Likewise.
	(link_use_stmts_after): Handle vuse operand.
	(is_call_used): Use is_call_clobbered.
	(is_call_clobbered): Global variables are always call clobbered,
	query the call-clobbers bitmap.
	(mark_call_clobbered): Ignore global variables.
	(clear_call_clobbered): Likewise.
	* tree-ssa-coalesce.c (create_outofssa_var_map): Adjust
	virtual operands sanity check.
	* tree.def (NAME_MEMORY_TAG, SYMBOL_MEMORY_TAG, MEMORY_PARTITION_TAG):
	Remove.
	(TARGET_MEM_REF): Remove TMR_TAG operand.
	* tree-dfa.c (add_referenced_var): Initialize call-clobber state.
	Remove call-clobber related code.
	(remove_referenced_var): Likewise.  Do not clear mpt or symbol_mem_tag.
	(dump_variable): Do not dump SMTs, memory stats, may-aliases or
	partitions or escape reason.
	(get_single_def_stmt, get_single_def_stmt_from_phi,
	get_single_def_stmt_with_phi): Remove.
	(dump_referenced_vars): Tidy.
	(get_ref_base_and_extent): Allow bare decls.
	(collect_dfa_stats): Adjust.
	* graphite.c (rename_variables_in_stmt): Adjust.
	(graphite_copy_stmts_from_block): Likewise.
	(translate_clast): Likewise.
	* tree-ssa-pre.c (struct bb_bitmap_sets): Add expr_dies bitmap.
	(EXPR_DIES): New.
	(translate_vuse_through_block): Use the oracle.
	(phi_translate_1): Adjust.
	(value_dies_in_block_x): Use the oracle.  Cache the outcome
	in EXPR_DIES.
	(valid_in_sets): Check if the VUSE for
	a REFERENCE is available.
	(eliminate): Do not remove stmts during elimination,
	instead queue and remove them afterwards.
	(do_pre): Do not rebuild alias info.
	(pass_pre): Run TODO_rebuild_alias before PRE.
	* tree-ssa-live.c (remove_unused_locals): Remove memory-tag code.
	* tree-sra.c (sra_walk_function): Use gimple_references_memory_p.
	(mark_all_v_defs_stmt): Remove.
	(mark_all_v_defs_seq): Adjust.
	(sra_replace): Likewise.
	(scalarize_use): Likewise.
	(scalarize_copy): Likewise.
	(scalarize_init): Likewise.
	(scalarize_ldst): Likewise.
	(todoflags): Remove.
	(tree_sra): Do not rebuild alias info.
	(tree_sra_early): Adjust.
	(pass_sra): Run TODO_update_address_taken before SRA.
	* tree-predcom.c (set_alias_info): Remove.
	(prepare_initializers_chain): Do not call it.
	(mark_virtual_ops_for_renaming): Adjust.
	(mark_virtual_ops_for_renaming_list): Remove.
	(initialize_root_vars): Adjust.
	(initialize_root_vars_lm): Likewise.
	(prepare_initializers_chain): Likewise.
	* tree-ssa-copy.c (may_propagate_copy): Remove memory-tag related code.
	(may_propagate_copy_into_stmt): Likewise.
	(merge_alias_info): Do nothing for now.
	(propagate_tree_value_into_stmt): Adjust.
	(stmt_may_generate_copy): Likewise.
	* tree-ssa-forwprop.c (tidy_after_forward_propagate_addr): Do
	not mark symbols for renaming.
	(forward_propagate_addr_expr): Match up push/pop_stmt_changes
	with the same statement, make sure to update the new pointed-to one.
	* tree-ssa-dce.c (eliminate_unnecessary_stmts): Do not copy
	call statements, do not mark symbols for renaming.
	(mark_operand_necessary): Dump something.
	(ref_may_be_aliased): New function.
	(mark_aliased_reaching_defs_necessary_1): New helper function.
	(mark_aliased_reaching_defs_necessary): Likewise.
	(mark_all_reaching_defs_necessary_1): Likewise.
	(mark_all_reaching_defs_necessary): Likewise.
	(propagate_necessity): Do not process virtual PHIs.  For
	non-aliased loads mark all reaching definitions as necessary.
	For aliased loads and stores mark the immediate dominating
	aliased clobbers as necessary.
	(visited): New global static.
	(perform_tree_ssa_dce): Free visited bitmap after propagating
	necessity.
	(remove_dead_phis): Perform simple dead virtual PHI removal.
	(remove_dead_stmt): Properly unlink virtual operands when
	removing stores.
	(eliminate_unnecessary_stmts): Schedule PHI removal after
	stmt removal.
	* tree-ssa-ter.c (is_replaceable_p): Adjust.
	(process_replaceable): Likewise.
	(find_replaceable_in_bb): Likewise.
	* tree-ssa.c (verify_ssa_name): Verify all VOPs are
	based on the single gimple vop.
	(verify_flow_insensitive_alias_info): Remove.
	(verify_flow_sensitive_alias_info): Likewise.
	(verify_call_clobbering): Likewise.
	(verify_memory_partitions): Likewise.
	(verify_alias_info): Likewise.
	(verify_ssa): Adjust..
	(execute_update_addresses_taken): Export.  Update SSA
	manually.  Optimize only when optimizing.  Use a local bitmap.
	(pass_update_address_taken): Remove TODO_update_ssa, add
	TODO_dump_func.
	(pass_update_address_taken): Just use TODO_update_address_taken.
	(init_tree_ssa): Do not initialize addressable_vars.
	(verify_ssa): Verify new VUSE / VDEF properties.
	Verify that all stmts definitions have the stmt as SSA_NAME_DEF_STMT.
	Do not call verify_alias_info.
	(delete_tree_ssa): Clear the VUSE, VDEF operands.
	Do not free the loaded and stored syms bitmaps.  Reset the escaped
	and callused solutions.  Do not free addressable_vars.
	Remove memory-tag related code.
	(warn_uninitialized_var): Aliases are always available.
	* tree-ssa-loop-prefetch.c (gather_memory_references): Adjust.
	* lambda-code.c (can_put_in_inner_loop): Adjust.
	(can_put_after_inner_loop): Likewise.
	(perfect_nestify): Likewise.
	* tree-vect-stmts.c (vect_stmt_relevant_p): Adjust.
	(vect_gen_widened_results_half): Remove CALL_EXPR handling.
	(vectorizable_conversion): Do not mark symbols for renaming.
	* tree-inline.c (remap_gimple_stmt): Clear VUSE/VDEF.
	(expand_call_inline): Unlink the calls virtual operands before
	replacing it.
	(tree_function_versioning): Do not call update_ssa if we are not
	updating clones.  Simplify.
	* tree-ssa-phiprop.c (phivn_valid_p): Adjust.
	(propagate_with_phi): Likewise..
	* tree-outof-ssa.c (create_temp): Remove memory tag and call
	clobber code.  Assert we are not aliased or global.
	* tree-flow.h: Include tree-ssa-alias.h
	(enum escape_type): Remove.
	(struct mem_sym_stats_d): Likewise.
	(struct mem_ref_stats_d): Likewise.
	(struct gimple_df): Add vop member.  Remove global_var,
	call_clobbered_vars, call_used_vars, addressable_vars,
	aliases_compted_p and mem_ref_stats members.  Add syms_to_rename,
	escaped and callused members.
	(struct ptr_info_def): Remove all members, add points-to solution
	member pt.
	(struct var_ann_d): Remove in_vuse_list, in_vdef_list,
	call_clobbered, escape_mask, mpt and symbol_mem_tag members.
	* Makefile.in (TREE_FLOW_H): Add tree-ssa-alias.h.
	(tree-ssa-structalias.o): Remove tree-ssa-structalias.h.
	(tree-ssa-alias.o): Likewise.
	(toplev.o): Add tree-ssa-alias.h
	(GTFILES): Remove tree-ssa-structalias.h, add tree-ssa-alias.h.
	* gimple.c (gimple_set_bb): Fix off-by-one error.
	(is_gimple_reg): Do not handle memory tags.
	(gimple_copy): Also copy virtual operands.
	Delay updating the statement.  Do not reset loaded and stored syms.
	(gimple_set_stored_syms): Remove.
	(gimple_set_loaded_syms): Likewise.
	(gimple_call_copy_skip_args): Copy the virtual operands
	and mark the new statement modified.
	* tree-ssa-structalias.c (may_alias_p): Remove.
	(set_uids_in_ptset): Take the alias set to prune with as
	parameter.  Fold in the alias test of may_alias_p.
	(compute_points_to_sets): Compute whether a ptr is dereferenced
	in a local sbitmap.
	(process_constraint): Deal with &ANYTHING on the lhs, reject all
	other ADDRESSOF constraints on the lhs.
	(get_constraint_for_component_ref): Assert that we don't get
	ADDRESSOF constraints from the base of the reference.
	Properly generate UNKNOWN_OFFSET for DEREF if needed.
	(struct variable_info): Remove collapsed_to member.
	(get_varinfo_fc): Remove.
	(new_var_info): Do not set collapsed_to.
	(dump_constraint): Do not follow cycles.
	(dump_constraint_graph): Likewise.
	(build_pred_graph): Likewise.
	(build_succ_graph): Likewise.
	(rewrite_constraints): Likewise.
	(do_simple_structure_copy): Remove.
	(do_rhs_deref_structure_copy): Remove.
	(do_lhs_deref_structure_copy): Remove.
	(collapse_rest_of_var): Remove.
	(do_structure_copy): Re-implement.
	(pta_stats): New global variable.
	(dump_pta_stats): New function.
	(struct constraint_expr): Make offset signed.
	(UNKNOWN_OFFSET): Define special value.
	(dump_constraint): Dump UNKNOWN_OFFSET as UNKNOWN.
	(solution_set_expand): New helper function split out from ...
	(do_sd_constraint): ... here.
	(solution_set_add): Handle UNKNOWN_OFFSET.  Handle negative offsets.
	(do_ds_constraint): Likewise.
	(do_sd_constraint): Likewise.  Do not special-case ESCAPED = *ESCAPED
	and CALLUSED = *CALLUSED.
	(set_union_with_increment): Make inc argument signed.
	(type_safe): Remove.
	(get_constraint_for_ptr_offset): Handle unknown and negative
	constant offsets.
	(first_vi_for_offset): Handle offsets before start.  Bail
	out early for offsets beyond the variable extent.
	(first_or_preceding_vi_for_offset): New function.
	(init_base_vars): Add ESCAPED = ESCAPED + UNKNOWN_OFFSET constraint.
	Together with ESCAPED = *ESCAPED this properly computes reachability.
	(find_what_var_points_to): New function.
	(find_what_p_points_to): Implement in terms of find_what_var_points_to.
	(pt_solution_reset, pt_solution_empty_p, pt_solution_includes_global,
	pt_solution_includes_1, pt_solution_includes, pt_solutions_intersect_1,
	pt_solutions_intersect): New functions.
	(compute_call_used_vars): Remove.
	(compute_may_aliases): New main entry into PTA computation.
	* gimple.h (gimple_p): New typedef.
	(struct gimple_statement_base): Remove references_memory_p.
	(struct gimple_statement_with_memory_ops_base): Remove
	vdef_ops, vuse_ops, stores and loads members.  Add vdef and vuse
	members.
	(gimple_vuse_ops, gimple_set_vuse_ops, gimple_vdef_ops,
	gimple_set_vdef_ops, gimple_loaded_syms, gimple_stored_syms,
	gimple_set_references_memory): Remove.
	(gimple_vuse_op, gimple_vdef_op, gimple_vuse, gimple_vdef,
	gimple_vuse_ptr, gimple_vdef_ptri, gimple_set_vuse, gimple_set_vdef):
	New functions.
	* tree-cfg.c (move_block_to_fn): Fix off-by-one error.
	(verify_expr): Allow RESULT_DECL.
	(gimple_duplicate_bb): Do not copy virtual operands.
	(gimple_duplicate_sese_region): Adjust.
	(gimple_duplicate_sese_tail): Likewise.
	(mark_virtual_ops_in_region): Remove.
	(move_sese_region_to_fn): Do not call it.
	* passes.c (init_optimization_passes): Remove pass_reset_cc_flags
	and pass_simple_dse.
	(execute_function_todo): Handle TODO_update_address_taken,
	call execute_update_addresses_taken for TODO_rebuild_alias.
	(execute_todo): Adjust.
	(execute_one_pass): Init dump files early.
	* ipa-struct-reorg.c (finalize_var_creation): Do not mark vars
	call-clobbered.
	(create_general_new_stmt): Clear vops.
	* tree-ssa-reassoc.c (get_rank): Adjust.
	* tree-vect-slp.c (vect_create_mask_and_perm): Do not mark
	symbols for renaming.
	* params.def (PARAM_MAX_ALIASED_VOPS): Remove.
	(PARAM_AVG_ALIASED_VOPS): Likewise.
	* tree-ssanames.c (init_ssanames): Allocate SYMS_TO_RENAME.
	(duplicate_ssa_name_ptr_info): No need to copy the shared bitmaps.
	* tree-ssa-operands.c: Simplify for new virtual operand
	representation.
	(operand_build_cmp, copy_virtual_operands,
	create_ssa_artificial_load_stmt, add_to_addressable_set,
	gimple_add_to_addresses_taken): Remove public functions.
	(unlink_stmt_vdef): New function.

	* gcc.dg/pr19633-1.c: Adjust.
	* gcc.dg/torture/pta-callused-1.c: Likewise.
	* gcc.dg/torture/pr39074-2.c: Likewise.
	* gcc.dg/torture/pr39074.c: Likewise.
	* gcc.dg/torture/pta-ptrarith-3.c: New testcase.
	* gcc.dg/torture/pr30375.c: Adjust.
	* gcc.dg/torture/pr33563.c: Likewise.
	* gcc.dg/torture/pr33870.c: Likewise.
	* gcc.dg/torture/pr33560.c: Likewise.
	* gcc.dg/torture/pta-structcopy-1.c: New testcase.
	* gcc.dg/torture/ssa-pta-fn-1.c: Likewise.
	* gcc.dg/tree-ssa/alias-15.c: Remove.
	* gcc.dg/tree-ssa/ssa-dce-4.c: New testcase.
	* gcc.dg/tree-ssa/pr26421.c: Adjust.
	* gcc.dg/tree-ssa/ssa-fre-10.c: XFAIL.
	* gcc.dg/tree-ssa/ssa-dce-5.c: New testcase.
	* gcc.dg/tree-ssa/pr23382.c: Adjust.
	* gcc.dg/tree-ssa/ssa-fre-20.c: New testcase.
	* gcc.dg/tree-ssa/alias-16.c: Adjust.
	* gcc.dg/tree-ssa/ssa-fre-13.c: Likewise.
	* gcc.dg/tree-ssa/ssa-fre-14.c: Likewise.
	* gcc.dg/tree-ssa/alias-18.c: Likewise.
	* gcc.dg/tree-ssa/ssa-fre-15.c: Likewise.
	* gcc.dg/tree-ssa/ssa-lim-3.c: Likewise.
	* gcc.dg/tree-ssa/alias-19.c: Likewise.
	* gcc.dg/tree-ssa/pta-ptrarith-1.c: New testcase.
	* gcc.dg/tree-ssa/pr13146.c: Likewise.
	* gcc.dg/tree-ssa/ssa-pre-23.c: Likewise.
	* gcc.dg/tree-ssa/pta-ptrarith-2.c: Likewise.
	* gcc.dg/tree-ssa/ssa-fre-18.c: Likewise.
	* gcc.dg/tree-ssa/ssa-pre-24.c: New XFAILed testcase.
	* gcc.dg/tree-ssa/ssa-fre-19.c: New testcase.
	* gcc.dg/tree-ssa/alias-20.c: Likewise.
	* gcc.dg/tree-ssa/ssa-dse-12.c: Likewise.
	* gcc.dg/tree-ssa/pr38895.c: Likewise.
	* gcc.dg/uninit-B.c: XFAIL.
	* gcc.dg/vect/no-vfa-vect-43.c: Adjust.
	* gcc.dg/uninit-pr19430.c: XFAIL.
	* g++.dg/tree-ssa/pr13146.C: New testcase.
	* g++.dg/opt/pr36187.C: Adjust.
	* g++.dg/torture/20090329-1.C: New testcase.

From-SVN: r145494
This commit is contained in:
Richard Guenther 2009-04-03 10:24:28 +00:00 committed by Richard Biener
parent 95fe602ebe
commit 5006671f1a
125 changed files with 4923 additions and 9519 deletions

View File

@ -1,3 +1,507 @@
2009-04-03 Richard Guenther <rguenther@suse.de>
PR middle-end/13146
PR tree-optimization/23940
PR tree-optimization/33237
PR middle-end/33974
PR middle-end/34093
PR tree-optimization/36201
PR tree-optimization/36230
PR tree-optimization/38049
PR tree-optimization/38207
PR tree-optimization/38230
PR tree-optimization/38301
PR tree-optimization/38585
PR middle-end/38895
PR tree-optimization/38985
PR tree-optimization/39299
* tree-ssa-structalias.h: Remove.
* tree-ssa-operands.h (NULL_USE_OPERAND_P): Make of type use_operand_p.
(NULL_DEF_OPERAND_P): Make of type def_operand_p.
(struct vuse_element_d): Remove.
(struct vuse_vec_d): Likewise.
(VUSE_VECT_NUM_ELEM, VUSE_VECT_ELEMENT_NC, VUSE_ELEMENT_PTR_NC,
VUSE_ELEMENT_VAR_NC, VUSE_VECT_ELEMENT, VUSE_ELEMENT_PTR,
SET_VUSE_VECT_ELEMENT, SET_VUSE_ELEMENT_VAR, SET_VUSE_ELEMENT_PTR,
VUSE_ELEMENT_VAR): Likewise.
(struct voptype_d): Likewise.
(NUM_VOP_FREE_BUCKETS): Likewise.
(struct ssa_operands): Remove vop_free_buckets and mpt_table fields.
(struct stmt_operands_d): Remove.
(VUSE_OP_PTR, VUSE_OP, SET_VUSE_OP, VUSE_NUM, VUSE_VECT,
VDEF_RESULT_PTR, VDEF_RESULT, VDEF_OP_PTR, VDEF_OP, SET_VDEF_OP,
VDEF_NUM, VDEF_VECT): Likewise.
(copy_virtual_operands): Remove.
(operand_build_cmp): Likewise.
(create_ssa_artificial_load_stmt): Likewise.
(enum ssa_op_iter_type): Remove ssa_op_iter_vdef.
(struct ssa_operand_iterator_d): Remove vuses, vdefs, mayusesm
vuse_index and mayuse_index members. Pack and move done and iter_type
members to the front.
(SSA_OP_VMAYUSE): Remove.
(SSA_OP_VIRTUAL_USES): Adjust.
(FOR_EACH_SSA_VDEF_OPERAND): Remove.
(unlink_stmt_vdef): Declare.
(add_to_addressable_set): Remove.
* tree-vrp.c (stmt_interesting_for_vrp): Adjust.
(vrp_visit_stmt): Likewise.
* doc/tree-ssa.texi (Alias analysis): Update.
* doc/invoke.texi (max-aliased-vops): Remove docs.
(avg-aliased-vops): Likewise.
* tree-into-ssa.c (syms_to_rename): Remove.
(need_to_update_vops_p): Likewise.
(need_to_initialize_update_ssa_p): Rename to ...
(update_ssa_initialized_fn): ... this. Track function we are
initialized for.
(symbol_marked_for_renaming): Simplify.
(add_new_name_mapping): Do not set need_to_update_vops_p.
(dump_currdefs): Use SYMS_TO_RENAME.
(rewrite_update_stmt): Always walk all uses/defs.
(dump_update_ssa): Adjust.
(init_update_ssa): Take function argument. Track what we are
initialized for.
(delete_update_ssa): Reset SYMS_TO_RENAME and update_ssa_initialized_fn.
(create_new_def_for): Initialize for cfun, assert we are initialized
for cfun.
(mark_sym_for_renaming): Simplify.
(mark_set_for_renaming): Do not initialize update-ssa.
(need_ssa_update_p): Simplify. Take function argument.
(name_mappings_registered_p): Assert we ask for the correct function.
(name_registered_for_update_p): Likewise.
(ssa_names_to_replace): Likewise.
(release_ssa_name_after_update_ssa): Likewise.
(update_ssa): Likewise. Use SYMS_TO_RENAME.
(dump_decl_set): Do not print a newline.
(debug_decl_set): Do it here.
(dump_update_ssa): And here.
* tree-ssa-loop-im.c (move_computations): Adjust.
(movement_possibility): Likewise.
(determine_max_movement): Likewise.
(gather_mem_refs_stmt): Likewise.
* tree-dump.c (dequeue_and_dump): Do not handle SYMBOL_MEMORY_TAG
or NAME_MEMORY_TAG.
* tree-complex.c (update_all_vops): Remove.
(expand_complex_move): Adjust.
* tree-ssa-loop-niter.c (chain_of_csts_start): Use NULL_TREE.
Simplify test for memory referencing statement. Exclude
non-invariant ADDR_EXPRs.
* tree-pretty-print.c (dump_generic_node): Do not handle memory tags.
* tree-loop-distribution.c (generate_memset_zero): Adjust.
(rdg_flag_uses): Likewise.
* tree-tailcall.c (suitable_for_tail_opt_p): Remove memory-tag
related code.
(tree_optimize_tail_calls_1): Also split the
edge from the entry block if we have degenerate PHI nodes in
the first basic block.
* tree.c (init_ttree): Remove memory-tag related code.
(tree_code_size): Likewise.
(tree_node_structure): Likewise.
(build7_stat): Re-write to be build6_stat.
* tree.h (MTAG_P, TREE_MEMORY_TAG_CHECK, TMR_TAG): Remove.
(SSA_VAR_P): Adjust.
(struct tree_memory_tag): Remove.
(struct tree_memory_partition_tag): Likewise.
(union tree_node): Adjust.
(build7): Re-write to be build6.
* tree-pass.h (pass_reset_cc_flags): Remove.
(TODO_update_address_taken): New flag.
(pass_simple_dse): Remove.
* ipa-cp.c (ipcp_update_callgraph): Update SSA form.
* params.h (MAX_ALIASED_VOPS): Remove.
(AVG_ALIASED_VOPS): Likewise.
* omp-low.c (expand_omp_taskreg): Update SSA form.
* tree-ssa-dse.c (dse_optimize_stmt): Properly query if the rhs
aliases the lhs in a copy stmt.
* tree-ssa-dse.c (struct address_walk_data): Remove.
(memory_ssa_name_same): Likewise.
(memory_address_same): Likewise.
(get_kill_of_stmt_lhs): Likewise.
(dse_possible_dead_store_p): Simplify, use the oracle. Handle
unused stores. Look through PHI nodes into post-dominated regions.
(dse_optimize_stmt): Simplify. Properly remove stores.
(tree_ssa_dse): Compute dominators.
(execute_simple_dse): Remove.
(pass_simple_dse): Likewise.
* ipa-reference.c (scan_stmt_for_static_refs): Open-code
gimple_loaded_syms and gimple_stored_syms computation.
* toplev.c (dump_memory_report): Dump alias and pta stats.
* tree-ssa-sccvn.c (vn_reference_compute_hash): Simplify.
(vn_reference_eq): Likewise.
(vuses_to_vec, copy_vuses_from_stmt, vdefs_to_vec,
copy_vdefs_from_stmt, shared_lookup_vops, shared_vuses_from_stmt,
valueize_vuses): Remove.
(get_def_ref_stmt_vuses): Simplify. Rename to ...
(get_def_ref_stmt_vuse): ... this.
(vn_reference_lookup_2): New function.
(vn_reference_lookup_pieces): Use walk_non_aliased_vuses for
walking equivalent vuses. Simplify.
(vn_reference_lookup): Likewise.
(vn_reference_insert): Likewise.
(vn_reference_insert_pieces): Likewise.
(visit_reference_op_call): Simplify.
(visit_reference_op_load): Likewise.
(visit_reference_op_store): Likewise.
(init_scc_vn): Remove shared_lookup_vuses initialization.
(free_scc_vn): Remove shared_lookup_vuses freeing.
(sort_vuses, sort_vuses_heap): Remove.
(get_ref_from_reference_ops): Export.
* tree-ssa-sccvn.h (struct vn_reference_s): Replace vuses
vector with single vuse pointer.
(vn_reference_lookup_pieces, vn_reference_lookup,
vn_reference_insert, vn_reference_insert_pieces): Adjust prototypes.
(shared_vuses_from_stmt): Remove.
(get_ref_from_reference_ops): Declare.
* tree-ssa-loop-manip.c (slpeel_can_duplicate_loop_p): Adjust.
* tree-ssa-copyrename.c (copy_rename_partition_coalesce): Remove
memory-tag related code.
* tree-ssa-ccp.c (get_symbol_constant_value): Remove memory-tag code.
(likely_value): Add comment, skip static-chain of call statements.
(surely_varying_stmt_p): Adjust.
(gimplify_and_update_call_from_tree): Likewise.
(execute_fold_all_builtins): Do not rebuild alias info.
(gimplify_and_update_call_from_tree): Properly update VOPs.
* tree-ssa-loop-ivopts.c (get_ref_tag): Remove.
(copy_ref_info): Remove memory-tag related code.
* tree-call-cdce.c (tree_call_cdce): Rename the VOP.
* ipa-pure-const.c (check_decl): Remove memory-tag related code.
(check_stmt): Open-code gimple_loaded_syms and gimple_stored_syms
computation.
* tree-ssa-dom.c (gimple_p): Remove typedef.
(eliminate_redundant_computations): Adjust.
(record_equivalences_from_stmt): Likewise.
(avail_expr_hash): Likewise.
(avail_expr_eq): Likewise.
* tree-ssa-propagate.c (update_call_from_tree): Properly
update VOPs.
(stmt_makes_single_load): Likewise.
(stmt_makes_single_store): Likewise.
* tree-ssa-alias.c: Rewrite completely.
(debug_memory_partitions, dump_mem_ref_stats, debug_mem_ref_stats,
debug_mem_sym_stats, dump_mem_sym_stats_for_var,
debug_all_mem_sym_stats, debug_mp_info, update_mem_sym_stats_from_stmt,
delete_mem_ref_stats, create_tag_raw, dump_points_to_info,
dump_may_aliases_for, debug_may_aliases_for, new_type_alias):
Remove public functions.
(pass_reset_cc_flags): Remove.
(pass_build_alias): Move ...
* tree-ssa-structalias.c (pass_build_alias): ... here.
* tree-ssa-alias.c (may_be_aliased): Move ...
* tree-flow-inline.h (may_be_aliased): ... here.
tree-ssa-alias.c (struct count_ptr_d, count_ptr_derefs,
count_uses_and_derefs): Move ...
* gimple.c: ... here.
* gimple.h (count_uses_and_derefs): Declare.
* tree-ssa-alias.c (dump_alias_stats, ptr_deref_may_alias_global_p,
ptr_deref_may_alias_decl_p, ptr_derefs_may_alias_p,
same_type_for_tbaa, nonaliasing_component_refs_p, decl_refs_may_alias_p,
indirect_ref_may_alias_decl_p, indirect_refs_may_alias_p,
ref_maybe_used_by_call_p, ref_maybe_used_by_stmt_p,
call_may_clobber_ref_p, stmt_may_clobber_ref_p, maybe_skip_until,
get_continuation_for_phi, walk_non_aliased_vuses, walk_aliased_vdefs):
New functions.
* tree-dfa.c (refs_may_alias_p): Move ...
* tree-ssa-alias.c (refs_may_alias_p): ... here. Extend.
* tree-ssa-alias.h: New file.
* tree-ssa-sink.c (is_hidden_global_store): Adjust.
(statement_sink_location): Likewise.
* opts.c (decode_options): Do not adjust max-aliased-vops or
avg-aliased-vops values.
* timevar.def (TV_TREE_MAY_ALIAS): Remove.
(TV_CALL_CLOBBER): Likewise.
(TV_FLOW_SENSITIVE): Likewise.
(TV_FLOW_INSENSITIVE): Likewise.
(TV_MEMORY_PARTITIONING): Likewise.
(TV_ALIAS_STMT_WALK): New timevar.
* tree-ssa-loop-ivcanon.c (empty_loop_p): Adjust.
* tree-ssa-address.c (create_mem_ref_raw): Use build6.
(get_address_description): Remove memory-tag related code.
* tree-ssa-ifcombine.c (bb_no_side_effects_p): Adjust.
* treestruct.def (TS_MEMORY_TAG, TS_MEMORY_PARTITION_TAG): Remove.
* tree-eh.c (cleanup_empty_eh): Do not leave stale SSA_NAMEs
and immediate uses in statements. Document.
* gimple-pretty-print.c (dump_gimple_mem_ops): Adjust.
(dump_symbols): Remove.
(dump_gimple_mem_ops): Do not dump loaded or stored syms.
* alias.c (get_deref_alias_set): New function split out from ...
(get_alias_set): ... here.
* alias.h (get_deref_alias_set): Declare.
* tree-vect-data-refs.c (vect_create_data_ref_ptr): Remove unused
type parameter. Remove restrict pointer handling. Create a
ref-all pointer in case type-based alias sets do not conflict.
(vect_analyze_data_refs): Remove SMT related code.
* tree-vect-stmts.c (vectorizable_store): Re-instantiate TBAA assert.
(vectorizable_load): Likewise.
* tree-data-ref.h (struct dr_alias): Remove symbol_tag field.
(DR_SYMBOL_TAG, DR_VOPS): Remove.
* tree-data-ref.c (dr_may_alias_p): Use the alias-oracle.
Ignore vops and SMTs.
(dr_analyze_alias): Likewise..
(free_data_ref): Likewise.
(create_data_ref): Likewise.
(analyze_all_data_dependences): Likewise.
(get_references_in_stmt): Adjust.
* tree-flow-inline.h (gimple_aliases_computed_p,
gimple_addressable_vars, gimple_call_clobbered_vars,
gimple_call_used_vars, gimple_global_var, may_aliases, memory_partition,
factoring_name_p, mark_call_clobbered, clear_call_clobbered,
compare_ssa_operands_equal, symbol_mem_tag, set_symbol_mem_tag,
gimple_mem_ref_stats): Remove.
(gimple_vop): New function.
(op_iter_next_use): Remove vuses and mayuses cases.
(op_iter_next_def): Remove vdefs case.
(op_iter_next_tree): Remove vuses, mayuses and vdefs cases.
(clear_and_done_ssa_iter): Do not set removed fields.
(op_iter_init): Likewise. Skip vuse and/or vdef if requested.
Assert we are not iterating over vuses or vdefs if not also
iterating over uses or defs.
(op_iter_init_use): Likewise.
(op_iter_init_def): Likewise.
(op_iter_next_vdef): Remove.
(op_iter_next_mustdef): Likewise.
(op_iter_init_vdef): Likewise.
(compare_ssa_operands_equal): Likewise.
(link_use_stmts_after): Handle vuse operand.
(is_call_used): Use is_call_clobbered.
(is_call_clobbered): Global variables are always call clobbered,
query the call-clobbers bitmap.
(mark_call_clobbered): Ignore global variables.
(clear_call_clobbered): Likewise.
* tree-ssa-coalesce.c (create_outofssa_var_map): Adjust
virtual operands sanity check.
* tree.def (NAME_MEMORY_TAG, SYMBOL_MEMORY_TAG, MEMORY_PARTITION_TAG):
Remove.
(TARGET_MEM_REF): Remove TMR_TAG operand.
* tree-dfa.c (add_referenced_var): Initialize call-clobber state.
Remove call-clobber related code.
(remove_referenced_var): Likewise. Do not clear mpt or symbol_mem_tag.
(dump_variable): Do not dump SMTs, memory stats, may-aliases or
partitions or escape reason.
(get_single_def_stmt, get_single_def_stmt_from_phi,
get_single_def_stmt_with_phi): Remove.
(dump_referenced_vars): Tidy.
(get_ref_base_and_extent): Allow bare decls.
(collect_dfa_stats): Adjust.
* graphite.c (rename_variables_in_stmt): Adjust.
(graphite_copy_stmts_from_block): Likewise.
(translate_clast): Likewise.
* tree-ssa-pre.c (struct bb_bitmap_sets): Add expr_dies bitmap.
(EXPR_DIES): New.
(translate_vuse_through_block): Use the oracle.
(phi_translate_1): Adjust.
(value_dies_in_block_x): Use the oracle. Cache the outcome
in EXPR_DIES.
(valid_in_sets): Check if the VUSE for
a REFERENCE is available.
(eliminate): Do not remove stmts during elimination,
instead queue and remove them afterwards.
(do_pre): Do not rebuild alias info.
(pass_pre): Run TODO_rebuild_alias before PRE.
* tree-ssa-live.c (remove_unused_locals): Remove memory-tag code.
* tree-sra.c (sra_walk_function): Use gimple_references_memory_p.
(mark_all_v_defs_stmt): Remove.
(mark_all_v_defs_seq): Adjust.
(sra_replace): Likewise.
(scalarize_use): Likewise.
(scalarize_copy): Likewise.
(scalarize_init): Likewise.
(scalarize_ldst): Likewise.
(todoflags): Remove.
(tree_sra): Do not rebuild alias info.
(tree_sra_early): Adjust.
(pass_sra): Run TODO_update_address_taken before SRA.
* tree-predcom.c (set_alias_info): Remove.
(prepare_initializers_chain): Do not call it.
(mark_virtual_ops_for_renaming): Adjust.
(mark_virtual_ops_for_renaming_list): Remove.
(initialize_root_vars): Adjust.
(initialize_root_vars_lm): Likewise.
(prepare_initializers_chain): Likewise.
* tree-ssa-copy.c (may_propagate_copy): Remove memory-tag related code.
(may_propagate_copy_into_stmt): Likewise.
(merge_alias_info): Do nothing for now.
(propagate_tree_value_into_stmt): Adjust.
(stmt_may_generate_copy): Likewise.
* tree-ssa-forwprop.c (tidy_after_forward_propagate_addr): Do
not mark symbols for renaming.
(forward_propagate_addr_expr): Match up push/pop_stmt_changes
with the same statement, make sure to update the new pointed-to one.
* tree-ssa-dce.c (eliminate_unnecessary_stmts): Do not copy
call statements, do not mark symbols for renaming.
(mark_operand_necessary): Dump something.
(ref_may_be_aliased): New function.
(mark_aliased_reaching_defs_necessary_1): New helper function.
(mark_aliased_reaching_defs_necessary): Likewise.
(mark_all_reaching_defs_necessary_1): Likewise.
(mark_all_reaching_defs_necessary): Likewise.
(propagate_necessity): Do not process virtual PHIs. For
non-aliased loads mark all reaching definitions as necessary.
For aliased loads and stores mark the immediate dominating
aliased clobbers as necessary.
(visited): New global static.
(perform_tree_ssa_dce): Free visited bitmap after propagating
necessity.
(remove_dead_phis): Perform simple dead virtual PHI removal.
(remove_dead_stmt): Properly unlink virtual operands when
removing stores.
(eliminate_unnecessary_stmts): Schedule PHI removal after
stmt removal.
* tree-ssa-ter.c (is_replaceable_p): Adjust.
(process_replaceable): Likewise.
(find_replaceable_in_bb): Likewise.
* tree-ssa.c (verify_ssa_name): Verify all VOPs are
based on the single gimple vop.
(verify_flow_insensitive_alias_info): Remove.
(verify_flow_sensitive_alias_info): Likewise.
(verify_call_clobbering): Likewise.
(verify_memory_partitions): Likewise.
(verify_alias_info): Likewise.
(verify_ssa): Adjust..
(execute_update_addresses_taken): Export. Update SSA
manually. Optimize only when optimizing. Use a local bitmap.
(pass_update_address_taken): Remove TODO_update_ssa, add
TODO_dump_func.
(pass_update_address_taken): Just use TODO_update_address_taken.
(init_tree_ssa): Do not initialize addressable_vars.
(verify_ssa): Verify new VUSE / VDEF properties.
Verify that all stmts definitions have the stmt as SSA_NAME_DEF_STMT.
Do not call verify_alias_info.
(delete_tree_ssa): Clear the VUSE, VDEF operands.
Do not free the loaded and stored syms bitmaps. Reset the escaped
and callused solutions. Do not free addressable_vars.
Remove memory-tag related code.
(warn_uninitialized_var): Aliases are always available.
* tree-ssa-loop-prefetch.c (gather_memory_references): Adjust.
* lambda-code.c (can_put_in_inner_loop): Adjust.
(can_put_after_inner_loop): Likewise.
(perfect_nestify): Likewise.
* tree-vect-stmts.c (vect_stmt_relevant_p): Adjust.
(vect_gen_widened_results_half): Remove CALL_EXPR handling.
(vectorizable_conversion): Do not mark symbols for renaming.
* tree-inline.c (remap_gimple_stmt): Clear VUSE/VDEF.
(expand_call_inline): Unlink the calls virtual operands before
replacing it.
(tree_function_versioning): Do not call update_ssa if we are not
updating clones. Simplify.
* tree-ssa-phiprop.c (phivn_valid_p): Adjust.
(propagate_with_phi): Likewise..
* tree-outof-ssa.c (create_temp): Remove memory tag and call
clobber code. Assert we are not aliased or global.
* tree-flow.h: Include tree-ssa-alias.h
(enum escape_type): Remove.
(struct mem_sym_stats_d): Likewise.
(struct mem_ref_stats_d): Likewise.
(struct gimple_df): Add vop member. Remove global_var,
call_clobbered_vars, call_used_vars, addressable_vars,
aliases_compted_p and mem_ref_stats members. Add syms_to_rename,
escaped and callused members.
(struct ptr_info_def): Remove all members, add points-to solution
member pt.
(struct var_ann_d): Remove in_vuse_list, in_vdef_list,
call_clobbered, escape_mask, mpt and symbol_mem_tag members.
* Makefile.in (TREE_FLOW_H): Add tree-ssa-alias.h.
(tree-ssa-structalias.o): Remove tree-ssa-structalias.h.
(tree-ssa-alias.o): Likewise.
(toplev.o): Add tree-ssa-alias.h
(GTFILES): Remove tree-ssa-structalias.h, add tree-ssa-alias.h.
* gimple.c (gimple_set_bb): Fix off-by-one error.
(is_gimple_reg): Do not handle memory tags.
(gimple_copy): Also copy virtual operands.
Delay updating the statement. Do not reset loaded and stored syms.
(gimple_set_stored_syms): Remove.
(gimple_set_loaded_syms): Likewise.
(gimple_call_copy_skip_args): Copy the virtual operands
and mark the new statement modified.
* tree-ssa-structalias.c (may_alias_p): Remove.
(set_uids_in_ptset): Take the alias set to prune with as
parameter. Fold in the alias test of may_alias_p.
(compute_points_to_sets): Compute whether a ptr is dereferenced
in a local sbitmap.
(process_constraint): Deal with &ANYTHING on the lhs, reject all
other ADDRESSOF constraints on the lhs.
(get_constraint_for_component_ref): Assert that we don't get
ADDRESSOF constraints from the base of the reference.
Properly generate UNKNOWN_OFFSET for DEREF if needed.
(struct variable_info): Remove collapsed_to member.
(get_varinfo_fc): Remove.
(new_var_info): Do not set collapsed_to.
(dump_constraint): Do not follow cycles.
(dump_constraint_graph): Likewise.
(build_pred_graph): Likewise.
(build_succ_graph): Likewise.
(rewrite_constraints): Likewise.
(do_simple_structure_copy): Remove.
(do_rhs_deref_structure_copy): Remove.
(do_lhs_deref_structure_copy): Remove.
(collapse_rest_of_var): Remove.
(do_structure_copy): Re-implement.
(pta_stats): New global variable.
(dump_pta_stats): New function.
(struct constraint_expr): Make offset signed.
(UNKNOWN_OFFSET): Define special value.
(dump_constraint): Dump UNKNOWN_OFFSET as UNKNOWN.
(solution_set_expand): New helper function split out from ...
(do_sd_constraint): ... here.
(solution_set_add): Handle UNKNOWN_OFFSET. Handle negative offsets.
(do_ds_constraint): Likewise.
(do_sd_constraint): Likewise. Do not special-case ESCAPED = *ESCAPED
and CALLUSED = *CALLUSED.
(set_union_with_increment): Make inc argument signed.
(type_safe): Remove.
(get_constraint_for_ptr_offset): Handle unknown and negative
constant offsets.
(first_vi_for_offset): Handle offsets before start. Bail
out early for offsets beyond the variable extent.
(first_or_preceding_vi_for_offset): New function.
(init_base_vars): Add ESCAPED = ESCAPED + UNKNOWN_OFFSET constraint.
Together with ESCAPED = *ESCAPED this properly computes reachability.
(find_what_var_points_to): New function.
(find_what_p_points_to): Implement in terms of find_what_var_points_to.
(pt_solution_reset, pt_solution_empty_p, pt_solution_includes_global,
pt_solution_includes_1, pt_solution_includes, pt_solutions_intersect_1,
pt_solutions_intersect): New functions.
(compute_call_used_vars): Remove.
(compute_may_aliases): New main entry into PTA computation.
* gimple.h (gimple_p): New typedef.
(struct gimple_statement_base): Remove references_memory_p.
(struct gimple_statement_with_memory_ops_base): Remove
vdef_ops, vuse_ops, stores and loads members. Add vdef and vuse
members.
(gimple_vuse_ops, gimple_set_vuse_ops, gimple_vdef_ops,
gimple_set_vdef_ops, gimple_loaded_syms, gimple_stored_syms,
gimple_set_references_memory): Remove.
(gimple_vuse_op, gimple_vdef_op, gimple_vuse, gimple_vdef,
gimple_vuse_ptr, gimple_vdef_ptri, gimple_set_vuse, gimple_set_vdef):
New functions.
* tree-cfg.c (move_block_to_fn): Fix off-by-one error.
(verify_expr): Allow RESULT_DECL.
(gimple_duplicate_bb): Do not copy virtual operands.
(gimple_duplicate_sese_region): Adjust.
(gimple_duplicate_sese_tail): Likewise.
(mark_virtual_ops_in_region): Remove.
(move_sese_region_to_fn): Do not call it.
* passes.c (init_optimization_passes): Remove pass_reset_cc_flags
and pass_simple_dse.
(execute_function_todo): Handle TODO_update_address_taken,
call execute_update_addresses_taken for TODO_rebuild_alias.
(execute_todo): Adjust.
(execute_one_pass): Init dump files early.
* ipa-struct-reorg.c (finalize_var_creation): Do not mark vars
call-clobbered.
(create_general_new_stmt): Clear vops.
* tree-ssa-reassoc.c (get_rank): Adjust.
* tree-vect-slp.c (vect_create_mask_and_perm): Do not mark
symbols for renaming.
* params.def (PARAM_MAX_ALIASED_VOPS): Remove.
(PARAM_AVG_ALIASED_VOPS): Likewise.
* tree-ssanames.c (init_ssanames): Allocate SYMS_TO_RENAME.
(duplicate_ssa_name_ptr_info): No need to copy the shared bitmaps.
* tree-ssa-operands.c: Simplify for new virtual operand
representation.
(operand_build_cmp, copy_virtual_operands,
create_ssa_artificial_load_stmt, add_to_addressable_set,
gimple_add_to_addresses_taken): Remove public functions.
(unlink_stmt_vdef): New function.
2009-04-03 Alan Modra <amodra@bigpond.net.au>
* config.gcc (powerpc-*-linux*): Merge variants.

View File

@ -856,7 +856,8 @@ CPP_INTERNAL_H = $(srcdir)/../libcpp/internal.h $(CPP_ID_DATA_H)
TREE_DUMP_H = tree-dump.h $(SPLAY_TREE_H) tree-pass.h
TREE_FLOW_H = tree-flow.h tree-flow-inline.h tree-ssa-operands.h \
$(BITMAP_H) $(BASIC_BLOCK_H) hard-reg-set.h $(GIMPLE_H) \
$(HASHTAB_H) $(CGRAPH_H) $(IPA_REFERENCE_H)
$(HASHTAB_H) $(CGRAPH_H) $(IPA_REFERENCE_H) \
tree-ssa-alias.h
TREE_SSA_LIVE_H = tree-ssa-live.h $(PARTITION_H) vecprim.h
PRETTY_PRINT_H = pretty-print.h $(INPUT_H) $(OBSTACK_H)
DIAGNOSTIC_H = diagnostic.h diagnostic.def $(PRETTY_PRINT_H) options.h
@ -2069,7 +2070,7 @@ stor-layout.o : stor-layout.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(TREE_H) $(PARAMS_H) $(FLAGS_H) $(FUNCTION_H) $(EXPR_H) output.h $(RTL_H) \
$(GGC_H) $(TM_P_H) $(TARGET_H) langhooks.h $(REGS_H) gt-stor-layout.h \
$(TOPLEV_H)
tree-ssa-structalias.o: tree-ssa-structalias.c tree-ssa-structalias.h \
tree-ssa-structalias.o: tree-ssa-structalias.c \
$(SYSTEM_H) $(CONFIG_H) coretypes.h $(TM_H) $(GGC_H) $(OBSTACK_H) $(BITMAP_H) \
$(FLAGS_H) $(RTL_H) $(TM_P_H) hard-reg-set.h $(BASIC_BLOCK_H) output.h \
$(DIAGNOSTIC_H) $(TREE_H) $(C_COMMON_H) $(TREE_FLOW_H) $(TREE_INLINE_H) varray.h \
@ -2291,7 +2292,7 @@ tree-ssa-alias.o : tree-ssa-alias.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
$(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) $(TREE_INLINE_H) $(FLAGS_H) \
$(FUNCTION_H) $(TIMEVAR_H) convert.h $(TM_H) coretypes.h langhooks.h \
$(TREE_DUMP_H) tree-pass.h $(PARAMS_H) $(BASIC_BLOCK_H) $(DIAGNOSTIC_H) \
hard-reg-set.h $(GIMPLE_H) vec.h tree-ssa-structalias.h \
hard-reg-set.h $(GIMPLE_H) vec.h \
$(IPA_TYPE_ESCAPE_H) vecprim.h pointer-set.h alloc-pool.h
tree-ssa-reassoc.o : tree-ssa-reassoc.c $(TREE_FLOW_H) $(CONFIG_H) \
$(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) \
@ -2442,7 +2443,8 @@ toplev.o : toplev.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
value-prof.h $(PARAMS_H) $(TM_P_H) reload.h ira.h dwarf2asm.h $(TARGET_H) \
langhooks.h insn-flags.h $(CFGLAYOUT_H) $(CFGLOOP_H) hosthooks.h \
$(CGRAPH_H) $(COVERAGE_H) alloc-pool.h $(GGC_H) $(INTEGRATE_H) \
opts.h params.def tree-mudflap.h $(REAL_H) tree-pass.h $(GIMPLE_H)
opts.h params.def tree-mudflap.h $(REAL_H) tree-pass.h $(GIMPLE_H) \
tree-ssa-alias.h
$(CC) $(ALL_CFLAGS) $(ALL_CPPFLAGS) \
-DTARGET_NAME=\"$(target_noncanonical)\" \
-c $(srcdir)/toplev.c $(OUTPUT_OPTION)
@ -3304,8 +3306,9 @@ GTFILES = $(CPP_ID_DATA_H) $(srcdir)/input.h $(srcdir)/coretypes.h \
$(srcdir)/targhooks.c $(out_file) $(srcdir)/passes.c $(srcdir)/cgraphunit.c \
$(srcdir)/tree-ssa-propagate.c \
$(srcdir)/tree-phinodes.c \
$(srcdir)/ipa-reference.c $(srcdir)/tree-ssa-structalias.h \
$(srcdir)/ipa-reference.c \
$(srcdir)/tree-ssa-structalias.c $(srcdir)/tree-inline.c \
$(srcdir)/tree-ssa-alias.h \
@all_gtfiles@
# Compute the list of GT header files from the corresponding C sources,

View File

@ -518,6 +518,98 @@ component_uses_parent_alias_set (const_tree t)
}
}
/* Return the alias set for the memory pointed to by T, which may be
either a type or an expression. Return -1 if there is nothing
special about dereferencing T. */
static alias_set_type
get_deref_alias_set_1 (tree t)
{
/* If we're not doing any alias analysis, just assume everything
aliases everything else. */
if (!flag_strict_aliasing)
return 0;
if (! TYPE_P (t))
{
tree decl = find_base_decl (t);
if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
{
/* If we haven't computed the actual alias set, do it now. */
if (DECL_POINTER_ALIAS_SET (decl) == -2)
{
tree pointed_to_type = TREE_TYPE (TREE_TYPE (decl));
/* No two restricted pointers can point at the same thing.
However, a restricted pointer can point at the same thing
as an unrestricted pointer, if that unrestricted pointer
is based on the restricted pointer. So, we make the
alias set for the restricted pointer a subset of the
alias set for the type pointed to by the type of the
decl. */
alias_set_type pointed_to_alias_set
= get_alias_set (pointed_to_type);
if (pointed_to_alias_set == 0)
/* It's not legal to make a subset of alias set zero. */
DECL_POINTER_ALIAS_SET (decl) = 0;
else if (AGGREGATE_TYPE_P (pointed_to_type))
/* For an aggregate, we must treat the restricted
pointer the same as an ordinary pointer. If we
were to make the type pointed to by the
restricted pointer a subset of the pointed-to
type, then we would believe that other subsets
of the pointed-to type (such as fields of that
type) do not conflict with the type pointed to
by the restricted pointer. */
DECL_POINTER_ALIAS_SET (decl)
= pointed_to_alias_set;
else
{
DECL_POINTER_ALIAS_SET (decl) = new_alias_set ();
record_alias_subset (pointed_to_alias_set,
DECL_POINTER_ALIAS_SET (decl));
}
}
/* We use the alias set indicated in the declaration. */
return DECL_POINTER_ALIAS_SET (decl);
}
/* Now all we care about is the type. */
t = TREE_TYPE (t);
}
/* If we have an INDIRECT_REF via a void pointer, we don't
know anything about what that might alias. Likewise if the
pointer is marked that way. */
if (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE
|| TYPE_REF_CAN_ALIAS_ALL (t))
return 0;
return -1;
}
/* Return the alias set for the memory pointed to by T, which may be
either a type or an expression. */
alias_set_type
get_deref_alias_set (tree t)
{
alias_set_type set = get_deref_alias_set_1 (t);
/* Fall back to the alias-set of the pointed-to type. */
if (set == -1)
{
if (! TYPE_P (t))
t = TREE_TYPE (t);
set = get_alias_set (TREE_TYPE (t));
}
return set;
}
/* Return the alias set for T, which may be either a type or an
expression. Call language-specific routine for help, if needed. */
@ -558,66 +650,11 @@ get_alias_set (tree t)
STRIP_NOPS (inner);
}
/* Check for accesses through restrict-qualified pointers. */
if (INDIRECT_REF_P (inner))
{
tree decl;
if (TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME)
decl = SSA_NAME_VAR (TREE_OPERAND (inner, 0));
else
decl = find_base_decl (TREE_OPERAND (inner, 0));
if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
{
/* If we haven't computed the actual alias set, do it now. */
if (DECL_POINTER_ALIAS_SET (decl) == -2)
{
tree pointed_to_type = TREE_TYPE (TREE_TYPE (decl));
/* No two restricted pointers can point at the same thing.
However, a restricted pointer can point at the same thing
as an unrestricted pointer, if that unrestricted pointer
is based on the restricted pointer. So, we make the
alias set for the restricted pointer a subset of the
alias set for the type pointed to by the type of the
decl. */
alias_set_type pointed_to_alias_set
= get_alias_set (pointed_to_type);
if (pointed_to_alias_set == 0)
/* It's not legal to make a subset of alias set zero. */
DECL_POINTER_ALIAS_SET (decl) = 0;
else if (AGGREGATE_TYPE_P (pointed_to_type))
/* For an aggregate, we must treat the restricted
pointer the same as an ordinary pointer. If we
were to make the type pointed to by the
restricted pointer a subset of the pointed-to
type, then we would believe that other subsets
of the pointed-to type (such as fields of that
type) do not conflict with the type pointed to
by the restricted pointer. */
DECL_POINTER_ALIAS_SET (decl)
= pointed_to_alias_set;
else
{
DECL_POINTER_ALIAS_SET (decl) = new_alias_set ();
record_alias_subset (pointed_to_alias_set,
DECL_POINTER_ALIAS_SET (decl));
}
}
/* We use the alias set indicated in the declaration. */
return DECL_POINTER_ALIAS_SET (decl);
}
/* If we have an INDIRECT_REF via a void pointer, we don't
know anything about what that might alias. Likewise if the
pointer is marked that way. */
else if (TREE_CODE (TREE_TYPE (inner)) == VOID_TYPE
|| (TYPE_REF_CAN_ALIAS_ALL
(TREE_TYPE (TREE_OPERAND (inner, 0)))))
return 0;
set = get_deref_alias_set_1 (TREE_OPERAND (inner, 0));
if (set != -1)
return set;
}
/* Otherwise, pick up the outermost object that we could have a pointer

View File

@ -32,6 +32,7 @@ typedef int alias_set_type;
extern alias_set_type new_alias_set (void);
extern alias_set_type get_alias_set (tree);
extern alias_set_type get_deref_alias_set (tree);
extern alias_set_type get_varargs_alias_set (void);
extern alias_set_type get_frame_alias_set (void);
extern bool component_uses_parent_alias_set (const_tree);

View File

@ -7584,32 +7584,6 @@ Maximum number of basic blocks on path that cse considers. The default is 10.
@item max-cse-insns
The maximum instructions CSE process before flushing. The default is 1000.
@item max-aliased-vops
Maximum number of virtual operands per function allowed to represent
aliases before triggering the alias partitioning heuristic. Alias
partitioning reduces compile times and memory consumption needed for
aliasing at the expense of precision loss in alias information. The
default value for this parameter is 100 for -O1, 500 for -O2 and 1000
for -O3.
Notice that if a function contains more memory statements than the
value of this parameter, it is not really possible to achieve this
reduction. In this case, the compiler will use the number of memory
statements as the value for @option{max-aliased-vops}.
@item avg-aliased-vops
Average number of virtual operands per statement allowed to represent
aliases before triggering the alias partitioning heuristic. This
works in conjunction with @option{max-aliased-vops}. If a function
contains more than @option{max-aliased-vops} virtual operators, then
memory symbols will be grouped into memory partitions until either the
total number of virtual operators is below @option{max-aliased-vops}
or the average number of virtual operators per memory statement is
below @option{avg-aliased-vops}. The default value for this parameter
is 1 for -O1 and -O2, and 3 for -O3.
@item ggc-min-expand
GCC uses a garbage collector to manage its own memory allocation. This

View File

@ -795,230 +795,100 @@ is popped.
@cindex flow-sensitive alias analysis
@cindex flow-insensitive alias analysis
Alias analysis proceeds in 4 main phases:
Alias analysis in GIMPLE SSA form consists of two pieces. First
the virtual SSA web ties conflicting memory accesses and provides
a SSA use-def chain and SSA immediate-use chains for walking
possibly dependent memory accesses. Second an alias-oracle can
be queried to disambiguate explicit and implicit memory references.
@enumerate
@item Structural alias analysis.
@item Memory SSA form.
This phase walks the types for structure variables, and determines which
of the fields can overlap using offset and size of each field. For each
field, a ``subvariable'' called a ``Structure field tag'' (SFT)@ is
created, which represents that field as a separate variable. All
accesses that could possibly overlap with a given field will have
virtual operands for the SFT of that field.
All statements that may use memory have exactly one accompanied use of
a virtual SSA name that represents the state of memory at the
given point in the IL.
All statements that may define memory have exactly one accompanied
definition of a virtual SSA name using the previous state of memory
and defining the new state of memory after the given point in the IL.
@smallexample
struct foo
int i;
int foo (void)
@{
int a;
int b;
@}
struct foo temp;
int bar (void)
@{
int tmp1, tmp2, tmp3;
SFT.0_2 = VDEF <SFT.0_1>
temp.a = 5;
SFT.1_4 = VDEF <SFT.1_3>
temp.b = 6;
VUSE <SFT.1_4>
tmp1_5 = temp.b;
VUSE <SFT.0_2>
tmp2_6 = temp.a;
tmp3_7 = tmp1_5 + tmp2_6;
return tmp3_7;
# .MEM_3 = VDEF <.MEM_2(D)>
i = 1;
# VUSE <.MEM_3>
return i;
@}
@end smallexample
If you copy the symbol tag for a variable for some reason, you probably
also want to copy the subvariables for that variable.
The virtual SSA names in this case are @code{.MEM_2(D)} and
@code{.MEM_3}. The store to the global variable @code{i}
defines @code{.MEM_3} invalidating @code{.MEM_2(D)}. The
load from @code{i} uses that new state @code{.MEM_3}.
The virtual SSA web serves as constraints to SSA optimizers
preventing illegitimate code-motion and optimization. It
also provides a way to walk related memory statements.
@item Points-to and escape analysis.
This phase walks the use-def chains in the SSA web looking for
three things:
Points-to analysis builds a set of constraints from the GIMPLE
SSA IL representing all pointer operations and facts we do
or do not know about pointers. Solving this set of constraints
yields a conservatively correct solution for each pointer
variable in the program (though we are only interested in
SSA name pointers) as to what it may possibly point to.
This points-to solution for a given SSA name pointer is stored
in the @code{pt_solution} sub-structure of the
@code{SSA_NAME_PTR_INFO} record. The following accessor
functions are available:
@itemize @bullet
@item Assignments of the form @code{P_i = &VAR}
@item Assignments of the form P_i = malloc()
@item Pointers and ADDR_EXPR that escape the current function.
@item @code{pt_solution_includes}
@item @code{pt_solutions_intersect}
@end itemize
The concept of `escaping' is the same one used in the Java world.
When a pointer or an ADDR_EXPR escapes, it means that it has been
exposed outside of the current function. So, assignment to
global variables, function arguments and returning a pointer are
all escape sites.
Points-to analysis also computes the solution for two special
set of pointers, @code{ESCAPED} and @code{CALLUSED}. Those
represent all memory that has escaped the scope of analysis
or that is used by pure or nested const calls.
This is where we are currently limited. Since not everything is
renamed into SSA, we lose track of escape properties when a
pointer is stashed inside a field in a structure, for instance.
In those cases, we are assuming that the pointer does escape.
@item Type-based alias analysis
We use escape analysis to determine whether a variable is
call-clobbered. Simply put, if an ADDR_EXPR escapes, then the
variable is call-clobbered. If a pointer P_i escapes, then all
the variables pointed-to by P_i (and its memory tag) also escape.
@item Compute flow-sensitive aliases
We have two classes of memory tags. Memory tags associated with
the pointed-to data type of the pointers in the program. These
tags are called ``symbol memory tag'' (SMT)@. The other class are
those associated with SSA_NAMEs, called ``name memory tag'' (NMT)@.
The basic idea is that when adding operands for an INDIRECT_REF
*P_i, we will first check whether P_i has a name tag, if it does
we use it, because that will have more precise aliasing
information. Otherwise, we use the standard symbol tag.
In this phase, we go through all the pointers we found in
points-to analysis and create alias sets for the name memory tags
associated with each pointer P_i. If P_i escapes, we mark
call-clobbered the variables it points to and its tag.
@item Compute flow-insensitive aliases
This pass will compare the alias set of every symbol memory tag and
every addressable variable found in the program. Given a symbol
memory tag SMT and an addressable variable V@. If the alias sets
of SMT and V conflict (as computed by may_alias_p), then V is
marked as an alias tag and added to the alias set of SMT@.
Type-based alias analysis is frontend dependent though generic
support is provided by the middle-end in @code{alias.c}. TBAA
code is used by both tree optimizers and RTL optimizers.
Every language that wishes to perform language-specific alias analysis
should define a function that computes, given a @code{tree}
node, an alias set for the node. Nodes in different alias sets are not
allowed to alias. For an example, see the C front-end function
@code{c_get_alias_set}.
@item Tree alias-oracle
The tree alias-oracle provides means to disambiguate two memory
references and memory references against statements. The following
queries are available:
@itemize @bullet
@item @code{refs_may_alias_p}
@item @code{ref_maybe_used_by_stmt_p}
@item @code{stmt_may_clobber_ref_p}
@end itemize
In addition to those two kind of statement walkers are available
walking statements related to a reference ref.
@code{walk_non_aliased_vuses} walks over dominating memory defining
statements and calls back if the statement does not clobber ref
providing the non-aliased VUSE. The walk stops at
the first clobbering statement or if asked to.
@code{walk_aliased_vdefs} walks over dominating memory defining
statements and calls back on each statement clobbering ref
providing its aliasing VDEF. The walk stops if asked to.
@end enumerate
For instance, consider the following function:
@smallexample
foo (int i)
@{
int *p, *q, a, b;
if (i > 10)
p = &a;
else
q = &b;
*p = 3;
*q = 5;
a = b + 2;
return *p;
@}
@end smallexample
After aliasing analysis has finished, the symbol memory tag for
pointer @code{p} will have two aliases, namely variables @code{a} and
@code{b}.
Every time pointer @code{p} is dereferenced, we want to mark the
operation as a potential reference to @code{a} and @code{b}.
@smallexample
foo (int i)
@{
int *p, a, b;
if (i_2 > 10)
p_4 = &a;
else
p_6 = &b;
# p_1 = PHI <p_4(1), p_6(2)>;
# a_7 = VDEF <a_3>;
# b_8 = VDEF <b_5>;
*p_1 = 3;
# a_9 = VDEF <a_7>
# VUSE <b_8>
a_9 = b_8 + 2;
# VUSE <a_9>;
# VUSE <b_8>;
return *p_1;
@}
@end smallexample
In certain cases, the list of may aliases for a pointer may grow
too large. This may cause an explosion in the number of virtual
operands inserted in the code. Resulting in increased memory
consumption and compilation time.
When the number of virtual operands needed to represent aliased
loads and stores grows too large (configurable with @option{--param
max-aliased-vops}), alias sets are grouped to avoid severe
compile-time slow downs and memory consumption. The alias
grouping heuristic proceeds as follows:
@enumerate
@item Sort the list of pointers in decreasing number of contributed
virtual operands.
@item Take the first pointer from the list and reverse the role
of the memory tag and its aliases. Usually, whenever an
aliased variable Vi is found to alias with a memory tag
T, we add Vi to the may-aliases set for T@. Meaning that
after alias analysis, we will have:
@smallexample
may-aliases(T) = @{ V1, V2, V3, @dots{}, Vn @}
@end smallexample
This means that every statement that references T, will get
@code{n} virtual operands for each of the Vi tags. But, when
alias grouping is enabled, we make T an alias tag and add it
to the alias set of all the Vi variables:
@smallexample
may-aliases(V1) = @{ T @}
may-aliases(V2) = @{ T @}
@dots{}
may-aliases(Vn) = @{ T @}
@end smallexample
This has two effects: (a) statements referencing T will only get
a single virtual operand, and, (b) all the variables Vi will now
appear to alias each other. So, we lose alias precision to
improve compile time. But, in theory, a program with such a high
level of aliasing should not be very optimizable in the first
place.
@item Since variables may be in the alias set of more than one
memory tag, the grouping done in step (2) needs to be extended
to all the memory tags that have a non-empty intersection with
the may-aliases set of tag T@. For instance, if we originally
had these may-aliases sets:
@smallexample
may-aliases(T) = @{ V1, V2, V3 @}
may-aliases(R) = @{ V2, V4 @}
@end smallexample
In step (2) we would have reverted the aliases for T as:
@smallexample
may-aliases(V1) = @{ T @}
may-aliases(V2) = @{ T @}
may-aliases(V3) = @{ T @}
@end smallexample
But note that now V2 is no longer aliased with R@. We could
add R to may-aliases(V2), but we are in the process of
grouping aliases to reduce virtual operands so what we do is
add V4 to the grouping to obtain:
@smallexample
may-aliases(V1) = @{ T @}
may-aliases(V2) = @{ T @}
may-aliases(V3) = @{ T @}
may-aliases(V4) = @{ T @}
@end smallexample
@item If the total number of virtual operands due to aliasing is
still above the threshold set by max-alias-vops, go back to (2).
@end enumerate

View File

@ -1132,33 +1132,6 @@ dump_gimple_asm (pretty_printer *buffer, gimple gs, int spc, int flags)
}
/* Dump the set of decls SYMS. BUFFER, SPC and FLAGS are as in
dump_generic_node. */
static void
dump_symbols (pretty_printer *buffer, bitmap syms, int flags)
{
unsigned i;
bitmap_iterator bi;
if (syms == NULL)
pp_string (buffer, "NIL");
else
{
pp_string (buffer, " { ");
EXECUTE_IF_SET_IN_BITMAP (syms, 0, i, bi)
{
tree sym = referenced_var_lookup (i);
dump_generic_node (buffer, sym, 0, flags, false);
pp_character (buffer, ' ');
}
pp_character (buffer, '}');
}
}
/* Dump a PHI node PHI. BUFFER, SPC and FLAGS are as in
dump_gimple_stmt. */
@ -1379,81 +1352,27 @@ dump_gimple_cdt (pretty_printer *buffer, gimple gs, int spc, int flags)
static void
dump_gimple_mem_ops (pretty_printer *buffer, gimple gs, int spc, int flags)
{
struct voptype_d *vdefs;
struct voptype_d *vuses;
int i, n;
tree vdef = gimple_vdef (gs);
tree vuse = gimple_vuse (gs);
if (!ssa_operands_active () || !gimple_references_memory_p (gs))
return;
/* Even if the statement doesn't have virtual operators yet, it may
contain symbol information (this happens before aliases have been
computed). */
if ((flags & TDF_MEMSYMS)
&& gimple_vuse_ops (gs) == NULL
&& gimple_vdef_ops (gs) == NULL)
{
if (gimple_loaded_syms (gs))
{
pp_string (buffer, "# LOADS: ");
dump_symbols (buffer, gimple_loaded_syms (gs), flags);
newline_and_indent (buffer, spc);
}
if (gimple_stored_syms (gs))
{
pp_string (buffer, "# STORES: ");
dump_symbols (buffer, gimple_stored_syms (gs), flags);
newline_and_indent (buffer, spc);
}
return;
}
vuses = gimple_vuse_ops (gs);
while (vuses)
{
pp_string (buffer, "# VUSE <");
n = VUSE_NUM (vuses);
for (i = 0; i < n; i++)
{
dump_generic_node (buffer, VUSE_OP (vuses, i), spc + 2, flags, false);
if (i < n - 1)
pp_string (buffer, ", ");
}
pp_character (buffer, '>');
if (flags & TDF_MEMSYMS)
dump_symbols (buffer, gimple_loaded_syms (gs), flags);
newline_and_indent (buffer, spc);
vuses = vuses->next;
}
vdefs = gimple_vdef_ops (gs);
while (vdefs)
if (vdef != NULL_TREE)
{
pp_string (buffer, "# ");
dump_generic_node (buffer, VDEF_RESULT (vdefs), spc + 2, flags, false);
dump_generic_node (buffer, vdef, spc + 2, flags, false);
pp_string (buffer, " = VDEF <");
n = VDEF_NUM (vdefs);
for (i = 0; i < n; i++)
{
dump_generic_node (buffer, VDEF_OP (vdefs, i), spc + 2, flags, 0);
if (i < n - 1)
pp_string (buffer, ", ");
}
dump_generic_node (buffer, vuse, spc + 2, flags, false);
pp_character (buffer, '>');
newline_and_indent (buffer, spc);
}
else if (vuse != NULL_TREE)
{
pp_string (buffer, "# VUSE <");
dump_generic_node (buffer, vuse, spc + 2, flags, false);
pp_character (buffer, '>');
if ((flags & TDF_MEMSYMS) && vdefs->next == NULL)
dump_symbols (buffer, gimple_stored_syms (gs), flags);
newline_and_indent (buffer, spc);
vdefs = vdefs->next;
}
}

View File

@ -1902,7 +1902,7 @@ gimple_set_bb (gimple stmt, basic_block bb)
LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
if (old_len <= (unsigned) uid)
{
unsigned new_len = 3 * uid / 2;
unsigned new_len = 3 * uid / 2 + 1;
VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
new_len);
@ -2209,13 +2209,12 @@ gimple_copy (gimple stmt)
if (gimple_has_mem_ops (stmt))
{
gimple_set_vdef_ops (copy, NULL);
gimple_set_vuse_ops (copy, NULL);
copy->gsmem.membase.stores = NULL;
copy->gsmem.membase.loads = NULL;
gimple_set_vdef (copy, gimple_vdef (stmt));
gimple_set_vuse (copy, gimple_vuse (stmt));
}
update_stmt (copy);
/* SSA operands need to be updated. */
gimple_set_modified (copy, true);
}
return copy;
@ -2456,46 +2455,6 @@ dump_gimple_statistics (void)
}
/* Deep copy SYMS into the set of symbols stored by STMT. If SYMS is
NULL or empty, the storage used is freed up. */
void
gimple_set_stored_syms (gimple stmt, bitmap syms, bitmap_obstack *obs)
{
gcc_assert (gimple_has_mem_ops (stmt));
if (syms == NULL || bitmap_empty_p (syms))
BITMAP_FREE (stmt->gsmem.membase.stores);
else
{
if (stmt->gsmem.membase.stores == NULL)
stmt->gsmem.membase.stores = BITMAP_ALLOC (obs);
bitmap_copy (stmt->gsmem.membase.stores, syms);
}
}
/* Deep copy SYMS into the set of symbols loaded by STMT. If SYMS is
NULL or empty, the storage used is freed up. */
void
gimple_set_loaded_syms (gimple stmt, bitmap syms, bitmap_obstack *obs)
{
gcc_assert (gimple_has_mem_ops (stmt));
if (syms == NULL || bitmap_empty_p (syms))
BITMAP_FREE (stmt->gsmem.membase.loads);
else
{
if (stmt->gsmem.membase.loads == NULL)
stmt->gsmem.membase.loads = BITMAP_ALLOC (obs);
bitmap_copy (stmt->gsmem.membase.loads, syms);
}
}
/* Return the number of operands needed on the RHS of a GIMPLE
assignment for an expression with tree code CODE. */
@ -2866,9 +2825,6 @@ is_gimple_reg (tree t)
if (TREE_CODE (t) == SSA_NAME)
t = SSA_NAME_VAR (t);
if (MTAG_P (t))
return false;
if (!is_gimple_variable (t))
return false;
@ -3127,6 +3083,9 @@ gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
if (gimple_call_lhs (stmt))
gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
gimple_set_vuse (new_stmt, gimple_vuse (stmt));
gimple_set_vdef (new_stmt, gimple_vdef (stmt));
gimple_set_block (new_stmt, gimple_block (stmt));
if (gimple_has_location (stmt))
gimple_set_location (new_stmt, gimple_location (stmt));
@ -3138,7 +3097,101 @@ gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
gimple_call_set_return_slot_opt (new_stmt, gimple_call_return_slot_opt_p (stmt));
gimple_call_set_from_thunk (new_stmt, gimple_call_from_thunk_p (stmt));
gimple_call_set_va_arg_pack (new_stmt, gimple_call_va_arg_pack_p (stmt));
gimple_set_modified (new_stmt, true);
return new_stmt;
}
/* Data structure used to count the number of dereferences to PTR
inside an expression. */
struct count_ptr_d
{
tree ptr;
unsigned num_stores;
unsigned num_loads;
};
/* Helper for count_uses_and_derefs. Called by walk_tree to look for
(ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
static tree
count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
{
struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
/* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
pointer 'ptr' is *not* dereferenced, it is simply used to compute
the address of 'fld' as 'ptr + offsetof(fld)'. */
if (TREE_CODE (*tp) == ADDR_EXPR)
{
*walk_subtrees = 0;
return NULL_TREE;
}
if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
{
if (wi_p->is_lhs)
count_p->num_stores++;
else
count_p->num_loads++;
}
return NULL_TREE;
}
/* Count the number of direct and indirect uses for pointer PTR in
statement STMT. The number of direct uses is stored in
*NUM_USES_P. Indirect references are counted separately depending
on whether they are store or load operations. The counts are
stored in *NUM_STORES_P and *NUM_LOADS_P. */
void
count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
unsigned *num_loads_p, unsigned *num_stores_p)
{
ssa_op_iter i;
tree use;
*num_uses_p = 0;
*num_loads_p = 0;
*num_stores_p = 0;
/* Find out the total number of uses of PTR in STMT. */
FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
if (use == ptr)
(*num_uses_p)++;
/* Now count the number of indirect references to PTR. This is
truly awful, but we don't have much choice. There are no parent
pointers inside INDIRECT_REFs, so an expression like
'*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
find all the indirect and direct uses of x_1 inside. The only
shortcut we can take is the fact that GIMPLE only allows
INDIRECT_REFs inside the expressions below. */
if (is_gimple_assign (stmt)
|| gimple_code (stmt) == GIMPLE_RETURN
|| gimple_code (stmt) == GIMPLE_ASM
|| is_gimple_call (stmt))
{
struct walk_stmt_info wi;
struct count_ptr_d count;
count.ptr = ptr;
count.num_stores = 0;
count.num_loads = 0;
memset (&wi, 0, sizeof (wi));
wi.info = &count;
walk_gimple_op (stmt, count_ptr_derefs, &wi);
*num_stores_p = count.num_stores;
*num_loads_p = count.num_loads;
}
gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
}
#include "gt-gimple.h"

View File

@ -34,6 +34,10 @@ DEF_VEC_P(gimple);
DEF_VEC_ALLOC_P(gimple,heap);
DEF_VEC_ALLOC_P(gimple,gc);
typedef gimple *gimple_p;
DEF_VEC_P(gimple_p);
DEF_VEC_ALLOC_P(gimple_p,heap);
DEF_VEC_P(gimple_seq);
DEF_VEC_ALLOC_P(gimple_seq,gc);
DEF_VEC_ALLOC_P(gimple_seq,heap);
@ -288,8 +292,8 @@ struct gimple_statement_base GTY(())
/* Nonzero if this statement contains volatile operands. */
unsigned has_volatile_ops : 1;
/* Nonzero if this statement contains memory refernces. */
unsigned references_memory_p : 1;
/* Padding to get subcode to 16 bit alignment. */
unsigned pad : 1;
/* The SUBCODE field can be used for tuple-specific flags for tuples
that do not require subcodes. Note that SUBCODE should be at
@ -363,15 +367,11 @@ struct gimple_statement_with_memory_ops_base GTY(())
/* [ WORD 1-7 ] */
struct gimple_statement_with_ops_base opbase;
/* [ WORD 8-9 ]
Vectors for virtual operands. */
struct voptype_d GTY((skip (""))) *vdef_ops;
struct voptype_d GTY((skip (""))) *vuse_ops;
/* [ WORD 9-10 ]
Symbols stored/loaded by this statement. */
bitmap GTY((skip (""))) stores;
bitmap GTY((skip (""))) loads;
/* [ WORD 8-9 ]
Virtual operands for this statement. The GC will pick them
up via the ssa_names array. */
tree GTY((skip (""))) vdef;
tree GTY((skip (""))) vuse;
};
@ -379,10 +379,10 @@ struct gimple_statement_with_memory_ops_base GTY(())
struct gimple_statement_with_memory_ops GTY(())
{
/* [ WORD 1-10 ] */
/* [ WORD 1-9 ] */
struct gimple_statement_with_memory_ops_base membase;
/* [ WORD 11 ]
/* [ WORD 10 ]
Operand vector. NOTE! This must always be the last field
of this structure. In particular, this means that this
structure cannot be embedded inside another one. */
@ -545,20 +545,20 @@ struct gimple_statement_wce GTY(())
struct gimple_statement_asm GTY(())
{
/* [ WORD 1-10 ] */
/* [ WORD 1-9 ] */
struct gimple_statement_with_memory_ops_base membase;
/* [ WORD 11 ]
/* [ WORD 10 ]
__asm__ statement. */
const char *string;
/* [ WORD 12 ]
/* [ WORD 11 ]
Number of inputs, outputs and clobbers. */
unsigned char ni;
unsigned char no;
unsigned short nc;
/* [ WORD 13 ]
/* [ WORD 12 ]
Operand vector. NOTE! This must always be the last field
of this structure. In particular, this means that this
structure cannot be embedded inside another one. */
@ -907,6 +907,8 @@ extern bool is_gimple_call_addr (tree);
extern tree get_call_expr_in (tree t);
extern void recalculate_side_effects (tree);
extern void count_uses_and_derefs (tree, gimple, unsigned *, unsigned *,
unsigned *);
/* In gimplify.c */
extern tree create_tmp_var_raw (tree, const char *);
@ -1010,9 +1012,6 @@ extern tree gimple_assign_rhs_to_tree (gimple);
/* In builtins.c */
extern bool validate_gimple_arglist (const_gimple, ...);
/* In tree-ssa-operands.c */
extern void gimple_add_to_addresses_taken (gimple, tree);
/* In tree-ssa.c */
extern bool tree_ssa_useless_type_conversion (tree);
extern bool useless_type_conversion_p (tree, tree);
@ -1314,69 +1313,93 @@ gimple_set_use_ops (gimple g, struct use_optype_d *use)
}
/* Return the set of VUSE operands for statement G. */
/* Return the set of VUSE operand for statement G. */
static inline struct voptype_d *
gimple_vuse_ops (const_gimple g)
static inline use_operand_p
gimple_vuse_op (const_gimple g)
{
struct use_optype_d *ops;
if (!gimple_has_mem_ops (g))
return NULL;
return g->gsmem.membase.vuse_ops;
return NULL_USE_OPERAND_P;
ops = g->gsops.opbase.use_ops;
if (ops
&& USE_OP_PTR (ops)->use == &g->gsmem.membase.vuse)
return USE_OP_PTR (ops);
return NULL_USE_OPERAND_P;
}
/* Return the set of VDEF operand for statement G. */
static inline def_operand_p
gimple_vdef_op (const_gimple g)
{
struct def_optype_d *ops;
if (!gimple_has_mem_ops (g))
return NULL_DEF_OPERAND_P;
ops = g->gsops.opbase.def_ops;
if (ops
&& DEF_OP_PTR (ops) == &g->gsmem.membase.vdef)
return DEF_OP_PTR (ops);
return NULL_DEF_OPERAND_P;
}
/* Set OPS to be the set of VUSE operands for statement G. */
/* Return the single VUSE operand of the statement G. */
static inline tree
gimple_vuse (const_gimple g)
{
if (!gimple_has_mem_ops (g))
return NULL_TREE;
return g->gsmem.membase.vuse;
}
/* Return the single VDEF operand of the statement G. */
static inline tree
gimple_vdef (const_gimple g)
{
if (!gimple_has_mem_ops (g))
return NULL_TREE;
return g->gsmem.membase.vdef;
}
/* Return the single VUSE operand of the statement G. */
static inline tree *
gimple_vuse_ptr (gimple g)
{
if (!gimple_has_mem_ops (g))
return NULL;
return &g->gsmem.membase.vuse;
}
/* Return the single VDEF operand of the statement G. */
static inline tree *
gimple_vdef_ptr (gimple g)
{
if (!gimple_has_mem_ops (g))
return NULL;
return &g->gsmem.membase.vdef;
}
/* Set the single VUSE operand of the statement G. */
static inline void
gimple_set_vuse_ops (gimple g, struct voptype_d *ops)
gimple_set_vuse (gimple g, tree vuse)
{
gcc_assert (gimple_has_mem_ops (g));
g->gsmem.membase.vuse_ops = ops;
g->gsmem.membase.vuse = vuse;
}
/* Return the set of VDEF operands for statement G. */
static inline struct voptype_d *
gimple_vdef_ops (const_gimple g)
{
if (!gimple_has_mem_ops (g))
return NULL;
return g->gsmem.membase.vdef_ops;
}
/* Set OPS to be the set of VDEF operands for statement G. */
/* Set the single VDEF operand of the statement G. */
static inline void
gimple_set_vdef_ops (gimple g, struct voptype_d *ops)
gimple_set_vdef (gimple g, tree vdef)
{
gcc_assert (gimple_has_mem_ops (g));
g->gsmem.membase.vdef_ops = ops;
}
/* Return the set of symbols loaded by statement G. Each element of the
set is the DECL_UID of the corresponding symbol. */
static inline bitmap
gimple_loaded_syms (const_gimple g)
{
if (!gimple_has_mem_ops (g))
return NULL;
return g->gsmem.membase.loads;
}
/* Return the set of symbols stored by statement G. Each element of
the set is the DECL_UID of the corresponding symbol. */
static inline bitmap
gimple_stored_syms (const_gimple g)
{
if (!gimple_has_mem_ops (g))
return NULL;
return g->gsmem.membase.stores;
g->gsmem.membase.vdef = vdef;
}
@ -1486,19 +1509,10 @@ gimple_set_has_volatile_ops (gimple stmt, bool volatilep)
static inline bool
gimple_references_memory_p (gimple stmt)
{
return gimple_has_mem_ops (stmt) && stmt->gsbase.references_memory_p;
return gimple_has_mem_ops (stmt) && gimple_vuse (stmt);
}
/* Set the REFERENCES_MEMORY_P flag for STMT to MEM_P. */
static inline void
gimple_set_references_memory (gimple stmt, bool mem_p)
{
if (gimple_has_mem_ops (stmt))
stmt->gsbase.references_memory_p = (unsigned) mem_p;
}
/* Return the subcode for OMP statement S. */
static inline unsigned

View File

@ -4119,7 +4119,7 @@ rename_variables_in_stmt (gimple stmt, htab_t map)
ssa_op_iter iter;
use_operand_p use_p;
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
{
tree use = USE_FROM_PTR (use_p);
tree new_name = get_new_name_from_old_name (map, use);
@ -4238,8 +4238,6 @@ expand_scalar_variables_expr (tree type, tree op0, enum tree_code code,
tree new_name = force_gimple_operand_gsi (gsi, expr, true, NULL,
true, GSI_SAME_STMT);
set_symbol_mem_tag (SSA_NAME_VAR (new_name),
symbol_mem_tag (SSA_NAME_VAR (old_name)));
return fold_build1 (code, type, new_name);
}
@ -4479,7 +4477,7 @@ graphite_copy_stmts_from_block (basic_block bb, basic_block new_bb, htab_t map)
operands. */
copy = gimple_copy (stmt);
gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
mark_symbols_for_renaming (copy);
mark_sym_for_renaming (gimple_vop (cfun));
region = lookup_stmt_eh_region (stmt);
if (region >= 0)
@ -4488,7 +4486,7 @@ graphite_copy_stmts_from_block (basic_block bb, basic_block new_bb, htab_t map)
/* Create new names for all the definitions created by COPY and
add replacement mappings for each new name. */
FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_DEF)
FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
{
tree old_name = DEF_FROM_PTR (def_p);
tree new_name = create_new_def_for (old_name, copy, def_p);
@ -4708,8 +4706,8 @@ translate_clast (scop_p scop, struct loop *context_loop,
next_e, map);
htab_delete (map);
loop_iv_stack_remove_constants (ivstack);
update_ssa (TODO_update_ssa);
recompute_all_dominators ();
update_ssa (TODO_update_ssa);
graphite_verify ();
return translate_clast (scop, context_loop, stmt->next, next_e, ivstack);
}

View File

@ -969,6 +969,8 @@ ipcp_update_callgraph (void)
new_stmt = gimple_call_copy_skip_args (cs->call_stmt,
args_to_skip);
if (gimple_vdef (new_stmt))
SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
gsi = gsi_for_stmt (cs->call_stmt);
gsi_replace (&gsi, new_stmt, true);
cgraph_set_call_stmt (cs, new_stmt);

View File

@ -140,8 +140,6 @@ static inline void
check_decl (funct_state local,
tree t, bool checking_write)
{
if (MTAG_P (t))
return;
/* Do not want to do anything with volatile except mark any
function that uses one to be not const or pure. */
if (TREE_THIS_VOLATILE (t))
@ -377,26 +375,59 @@ check_call (funct_state local, gimple call, bool ipa)
/* Direct functions calls are handled by IPA propagation. */
}
/* Look into pointer pointed to by GSIP and figure out what interesting side effects
it have. */
/* Look into pointer pointed to by GSIP and figure out what interesting side
effects it has. */
static void
check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
{
gimple stmt = gsi_stmt (*gsip);
unsigned int i = 0;
bitmap_iterator bi;
if (dump_file)
{
fprintf (dump_file, " scanning: ");
print_gimple_stmt (dump_file, stmt, 0, 0);
}
if (gimple_loaded_syms (stmt))
EXECUTE_IF_SET_IN_BITMAP (gimple_loaded_syms (stmt), 0, i, bi)
check_decl (local, referenced_var_lookup (i), false);
if (gimple_stored_syms (stmt))
EXECUTE_IF_SET_IN_BITMAP (gimple_stored_syms (stmt), 0, i, bi)
check_decl (local, referenced_var_lookup (i), true);
/* Look for direct loads and stores. */
if (gimple_has_lhs (stmt))
{
tree lhs = get_base_address (gimple_get_lhs (stmt));
if (lhs && DECL_P (lhs))
check_decl (local, lhs, true);
}
if (gimple_assign_single_p (stmt))
{
tree rhs = get_base_address (gimple_assign_rhs1 (stmt));
if (rhs && DECL_P (rhs))
check_decl (local, rhs, false);
}
else if (is_gimple_call (stmt))
{
for (i = 0; i < gimple_call_num_args (stmt); ++i)
{
tree rhs = get_base_address (gimple_call_arg (stmt, i));
if (rhs && DECL_P (rhs))
check_decl (local, rhs, false);
}
}
else if (gimple_code (stmt) == GIMPLE_ASM)
{
for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
{
tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
op = get_base_address (op);
if (op && DECL_P (op))
check_decl (local, op, false);
}
for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
{
tree op = TREE_VALUE (gimple_asm_output_op (stmt, i));
op = get_base_address (op);
if (op && DECL_P (op))
check_decl (local, op, true);
}
}
if (gimple_code (stmt) != GIMPLE_CALL
&& stmt_could_throw_p (stmt))

View File

@ -433,33 +433,51 @@ scan_stmt_for_static_refs (gimple_stmt_iterator *gsip,
if (fn)
local = get_reference_vars_info (fn)->local;
if (gimple_loaded_syms (stmt))
EXECUTE_IF_SET_IN_BITMAP (gimple_loaded_syms (stmt), 0, i, bi)
mark_load (local, referenced_var_lookup (i));
if (gimple_stored_syms (stmt))
EXECUTE_IF_SET_IN_BITMAP (gimple_stored_syms (stmt), 0, i, bi)
mark_store (local, referenced_var_lookup (i));
/* Look for direct loads and stores. */
if (gimple_has_lhs (stmt))
{
tree lhs = get_base_address (gimple_get_lhs (stmt));
if (lhs && DECL_P (lhs))
mark_store (local, lhs);
}
if (gimple_assign_single_p (stmt))
{
tree rhs = get_base_address (gimple_assign_rhs1 (stmt));
if (rhs && DECL_P (rhs))
mark_load (local, rhs);
}
else if (is_gimple_call (stmt))
{
for (i = 0; i < gimple_call_num_args (stmt); ++i)
{
tree rhs = get_base_address (gimple_call_arg (stmt, i));
if (rhs && DECL_P (rhs))
mark_load (local, rhs);
}
check_call (local, stmt);
}
else if (gimple_code (stmt) == GIMPLE_ASM)
{
for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
{
tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
op = get_base_address (op);
if (op && DECL_P (op))
mark_load (local, op);
}
for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
{
tree op = TREE_VALUE (gimple_asm_output_op (stmt, i));
op = get_base_address (op);
if (op && DECL_P (op))
mark_store (local, op);
}
check_asm_memory_clobber (local, stmt);
}
if (gimple_addresses_taken (stmt))
EXECUTE_IF_SET_IN_BITMAP (gimple_addresses_taken (stmt), 0, i, bi)
mark_address_taken (referenced_var_lookup (i));
switch (gimple_code (stmt))
{
case GIMPLE_CALL:
check_call (local, stmt);
break;
case GIMPLE_ASM:
check_asm_memory_clobber (local, stmt);
break;
/* We used to check nonlocal labels here and set them as potentially modifying
everything. This is not needed, since we can get to nonlocal label only
from callee and thus we will get info propagated. */
default:
break;
}
return NULL;
}

View File

@ -495,8 +495,6 @@ static void
finalize_var_creation (tree new_decl)
{
add_referenced_var (new_decl);
if (is_global_var (new_decl))
mark_call_clobbered (new_decl, ESCAPE_UNKNOWN);
mark_sym_for_renaming (new_decl);
}
@ -1249,6 +1247,13 @@ create_general_new_stmt (struct access_site *acc, tree new_type)
gimple new_stmt = gimple_copy (old_stmt);
unsigned i;
/* We are really building a new stmt, clear the virtual operands. */
if (gimple_has_mem_ops (new_stmt))
{
gimple_set_vuse (new_stmt, NULL_TREE);
gimple_set_vdef (new_stmt, NULL_TREE);
}
for (i = 0; VEC_iterate (tree, acc->vars, i, var); i++)
{
tree *pos;

View File

@ -2142,7 +2142,7 @@ can_put_in_inner_loop (struct loop *inner, gimple stmt)
use_operand_p use_p;
gcc_assert (is_gimple_assign (stmt));
if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)
if (gimple_vuse (stmt)
|| !stmt_invariant_in_loop_p (inner, stmt))
return false;
@ -2167,7 +2167,7 @@ can_put_after_inner_loop (struct loop *loop, gimple stmt)
imm_use_iterator imm_iter;
use_operand_p use_p;
if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
if (gimple_vuse (stmt))
return false;
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, gimple_assign_lhs (stmt))
@ -2536,8 +2536,6 @@ perfect_nestify (struct loop *loop,
incremented when we do. */
for (bsi = gsi_start_bb (bbs[i]); !gsi_end_p (bsi);)
{
ssa_op_iter i;
tree n;
gimple stmt = gsi_stmt (bsi);
if (stmt == exit_condition
@ -2553,12 +2551,12 @@ perfect_nestify (struct loop *loop,
VEC_index (tree, lbounds, 0), replacements, &firstbsi);
gsi_move_before (&bsi, &tobsi);
/* If the statement has any virtual operands, they may
need to be rewired because the original loop may
still reference them. */
FOR_EACH_SSA_TREE_OPERAND (n, stmt, i, SSA_OP_ALL_VIRTUALS)
mark_sym_for_renaming (SSA_NAME_VAR (n));
if (gimple_vuse (stmt))
mark_sym_for_renaming (gimple_vop (cfun));
}
}

View File

@ -3495,6 +3495,8 @@ expand_omp_taskreg (struct omp_region *region)
if (changed)
cleanup_tree_cfg ();
}
if (gimple_in_ssa_p (cfun))
update_ssa (TODO_update_ssa);
current_function_decl = save_current;
pop_cfun ();
}

View File

@ -777,8 +777,6 @@ void
decode_options (unsigned int argc, const char **argv)
{
static bool first_time_p = true;
static int initial_max_aliased_vops;
static int initial_avg_aliased_vops;
static int initial_min_crossjump_insns;
static int initial_max_fields_for_field_sensitive;
static int initial_loop_invariant_max_bbs_in_loop;
@ -798,8 +796,6 @@ decode_options (unsigned int argc, const char **argv)
lang_hooks.initialize_diagnostics (global_dc);
/* Save initial values of parameters we reset. */
initial_max_aliased_vops = MAX_ALIASED_VOPS;
initial_avg_aliased_vops = AVG_ALIASED_VOPS;
initial_min_crossjump_insns
= compiler_params[PARAM_MIN_CROSSJUMP_INSNS].value;
initial_max_fields_for_field_sensitive
@ -907,11 +903,6 @@ decode_options (unsigned int argc, const char **argv)
flag_tree_switch_conversion = 1;
flag_ipa_cp = opt2;
/* Allow more virtual operators to increase alias precision. */
set_param_value ("max-aliased-vops",
(opt2) ? 500 : initial_max_aliased_vops);
/* Track fields in field-sensitive alias analysis. */
set_param_value ("max-fields-for-field-sensitive",
(opt2) ? 100 : initial_max_fields_for_field_sensitive);
@ -931,13 +922,6 @@ decode_options (unsigned int argc, const char **argv)
if (flag_ipa_cp_clone)
flag_ipa_cp = 1;
/* Allow even more virtual operators. Max-aliased-vops was set above for
-O2, so don't reset it unless we are at -O3. */
if (opt3)
set_param_value ("max-aliased-vops", 1000);
set_param_value ("avg-aliased-vops", (opt3) ? 3 : initial_avg_aliased_vops);
/* Just -O1/-O0 optimizations. */
opt1_max = (optimize <= 1);
align_loops = opt1_max;

View File

@ -537,16 +537,6 @@ DEFPARAM(PARAM_MAX_RELOAD_SEARCH_INSNS,
"The maximum number of instructions to search backward when looking for equivalent reload",
100, 0, 0)
DEFPARAM(PARAM_MAX_ALIASED_VOPS,
"max-aliased-vops",
"The maximum number of virtual operators that a function is allowed to have before triggering memory partitioning heuristics",
100, 0, 0)
DEFPARAM(PARAM_AVG_ALIASED_VOPS,
"avg-aliased-vops",
"The average number of virtual operators that memory statements are allowed to have before triggering memory partitioning heuristics",
1, 0, 0)
DEFPARAM(PARAM_MAX_SCHED_REGION_BLOCKS,
"max-sched-region-blocks",
"The maximum number of blocks in a region to be considered for interblock scheduling",

View File

@ -140,10 +140,6 @@ typedef enum compiler_param
PARAM_VALUE (PARAM_SMS_DFA_HISTORY)
#define SMS_LOOP_AVERAGE_COUNT_THRESHOLD \
PARAM_VALUE (PARAM_SMS_LOOP_AVERAGE_COUNT_THRESHOLD)
#define MAX_ALIASED_VOPS \
PARAM_VALUE (PARAM_MAX_ALIASED_VOPS)
#define AVG_ALIASED_VOPS \
PARAM_VALUE (PARAM_AVG_ALIASED_VOPS)
#define INTEGER_SHARE_LIMIT \
PARAM_VALUE (PARAM_INTEGER_SHARE_LIMIT)
#define MAX_LAST_VALUE_RTL \

View File

@ -543,7 +543,6 @@ init_optimization_passes (void)
NEXT_PASS (pass_expand_omp);
NEXT_PASS (pass_referenced_vars);
NEXT_PASS (pass_reset_cc_flags);
NEXT_PASS (pass_build_ssa);
NEXT_PASS (pass_early_warn_uninitialized);
NEXT_PASS (pass_all_early_optimizations);
@ -560,7 +559,6 @@ init_optimization_passes (void)
NEXT_PASS (pass_copy_prop);
NEXT_PASS (pass_merge_phi);
NEXT_PASS (pass_cd_dce);
NEXT_PASS (pass_simple_dse);
NEXT_PASS (pass_tail_recursion);
NEXT_PASS (pass_convert_switch);
NEXT_PASS (pass_cleanup_eh);
@ -937,7 +935,7 @@ execute_function_todo (void *data)
SSA form to become out-of-date (see PR 22037). So, even
if the parent pass had not scheduled an SSA update, we may
still need to do one. */
if (!(flags & TODO_update_ssa_any) && need_ssa_update_p ())
if (!(flags & TODO_update_ssa_any) && need_ssa_update_p (cfun))
flags |= TODO_update_ssa;
}
@ -948,8 +946,13 @@ execute_function_todo (void *data)
cfun->last_verified &= ~TODO_verify_ssa;
}
if (flags & TODO_update_address_taken)
execute_update_addresses_taken (true);
if (flags & TODO_rebuild_alias)
{
if (!(flags & TODO_update_address_taken))
execute_update_addresses_taken (true);
compute_may_aliases ();
cfun->curr_properties |= PROP_alias;
}
@ -1021,7 +1024,8 @@ static void
execute_todo (unsigned int flags)
{
#if defined ENABLE_CHECKING
if (need_ssa_update_p ())
if (cfun
&& need_ssa_update_p (cfun))
gcc_assert (flags & TODO_update_ssa_any);
#endif
@ -1265,6 +1269,8 @@ execute_one_pass (struct opt_pass *pass)
This is a hack until the new folder is ready. */
in_gimple_form = (cfun && (cfun->curr_properties & PROP_trees)) != 0;
initializing_dump = pass_init_dump_file (pass);
/* Run pre-pass verification. */
execute_todo (pass->todo_flags_start);
@ -1273,8 +1279,6 @@ execute_one_pass (struct opt_pass *pass)
(void *)(size_t)pass->properties_required);
#endif
initializing_dump = pass_init_dump_file (pass);
/* If a timevar is present, start it. */
if (pass->tv_id)
timevar_push (pass->tv_id);

View File

@ -1,3 +1,62 @@
2009-04-03 Richard Guenther <rguenther@suse.de>
PR middle-end/13146
PR tree-optimization/23940
PR tree-optimization/33237
PR middle-end/33974
PR middle-end/34093
PR tree-optimization/36201
PR tree-optimization/36230
PR tree-optimization/38049
PR tree-optimization/38207
PR tree-optimization/38230
PR tree-optimization/38301
PR tree-optimization/38585
PR middle-end/38895
PR tree-optimization/38985
PR tree-optimization/39299
* gcc.dg/pr19633-1.c: Adjust.
* gcc.dg/torture/pta-callused-1.c: Likewise.
* gcc.dg/torture/pr39074-2.c: Likewise.
* gcc.dg/torture/pr39074.c: Likewise.
* gcc.dg/torture/pta-ptrarith-3.c: New testcase.
* gcc.dg/torture/pr30375.c: Adjust.
* gcc.dg/torture/pr33563.c: Likewise.
* gcc.dg/torture/pr33870.c: Likewise.
* gcc.dg/torture/pr33560.c: Likewise.
* gcc.dg/torture/pta-structcopy-1.c: New testcase.
* gcc.dg/torture/ssa-pta-fn-1.c: Likewise.
* gcc.dg/tree-ssa/alias-15.c: Remove.
* gcc.dg/tree-ssa/ssa-dce-4.c: New testcase.
* gcc.dg/tree-ssa/pr26421.c: Adjust.
* gcc.dg/tree-ssa/ssa-fre-10.c: XFAIL.
* gcc.dg/tree-ssa/ssa-dce-5.c: New testcase.
* gcc.dg/tree-ssa/pr23382.c: Adjust.
* gcc.dg/tree-ssa/ssa-fre-20.c: New testcase.
* gcc.dg/tree-ssa/alias-16.c: Adjust.
* gcc.dg/tree-ssa/ssa-fre-13.c: Likewise.
* gcc.dg/tree-ssa/ssa-fre-14.c: Likewise.
* gcc.dg/tree-ssa/alias-18.c: Likewise.
* gcc.dg/tree-ssa/ssa-fre-15.c: Likewise.
* gcc.dg/tree-ssa/ssa-lim-3.c: Likewise.
* gcc.dg/tree-ssa/alias-19.c: Likewise.
* gcc.dg/tree-ssa/pta-ptrarith-1.c: New testcase.
* gcc.dg/tree-ssa/pr13146.c: Likewise.
* gcc.dg/tree-ssa/ssa-pre-23.c: Likewise.
* gcc.dg/tree-ssa/pta-ptrarith-2.c: Likewise.
* gcc.dg/tree-ssa/ssa-fre-18.c: Likewise.
* gcc.dg/tree-ssa/ssa-pre-24.c: New XFAILed testcase.
* gcc.dg/tree-ssa/ssa-fre-19.c: New testcase.
* gcc.dg/tree-ssa/alias-20.c: Likewise.
* gcc.dg/tree-ssa/ssa-dse-12.c: Likewise.
* gcc.dg/tree-ssa/pr38895.c: Likewise.
* gcc.dg/uninit-B.c: XFAIL.
* gcc.dg/vect/no-vfa-vect-43.c: Adjust.
* gcc.dg/uninit-pr19430.c: XFAIL.
* g++.dg/tree-ssa/pr13146.C: New testcase.
* g++.dg/opt/pr36187.C: Adjust.
* g++.dg/torture/20090329-1.C: New testcase.
2009-04-02 Chao-ying Fu <fu@mips.com>
* gcc.target/mips/interrupt_handler.c: New test.

View File

@ -1,5 +1,5 @@
/* { dg-do run } */
/* { dg-options "-O2 --param max-aliased-vops=20" } */
/* { dg-options "-O2" } */
extern "C" void abort (void);
enum SbxDataType { SbxINTEGER, SbxDECIMAL, SbxBYREF = 0x4000 };

View File

@ -0,0 +1,59 @@
/* { dg-do compile } */
struct input_iterator_tag { };
template<typename _Category, typename _Tp, typename _Distance = long, typename _Pointer = _Tp*, typename _Reference = _Tp&>
struct iterator {
typedef _Category iterator_category;
};
template<typename _Iterator> struct iterator_traits {
typedef typename _Iterator::iterator_category iterator_category;
};
template<typename, typename> struct __lc_rai {
template<typename _II1, typename _II2>
static _II1 __newlast1(_II1, _II1 __last1, _II2, _II2) {
return __last1;
}
template<typename _II>
static bool __cnd2(_II __first, _II __last) {
return __first != __last;
}
};
template<typename _II1, typename _II2, typename _Compare>
bool lexicographical_compare(_II1 __first1, _II1 __last1, _II2 __first2,
_II2 __last2, _Compare __comp) {
typedef typename iterator_traits<_II1>::iterator_category _Category1;
typedef typename iterator_traits<_II2>::iterator_category _Category2;
typedef __lc_rai<_Category1, _Category2> __rai_type;
__last1 = __rai_type::__newlast1(__first1, __last1, __first2, __last2);
for (;
__first1 != __last1 && __rai_type::__cnd2(__first2, __last2);
++__first1, ++__first2) {
if (__comp(*__first1, *__first2)) return true;
}
}
void __assert_fail () throw () __attribute__ ((__noreturn__));
template<typename T> struct BoundsContainer { };
template<class T> class input_iterator_wrapper : public iterator<input_iterator_tag, T, long, T*, T&> {
public:
typedef BoundsContainer<T> ContainerType;
T* ptr;
ContainerType* SharedInfo;
input_iterator_wrapper(const input_iterator_wrapper& in) : ptr(in.ptr), SharedInfo(in.SharedInfo) { }
bool operator==(const input_iterator_wrapper& in) const {
(static_cast<void> ((SharedInfo != __null
&& SharedInfo == in.SharedInfo)
? 0 : (__assert_fail (), 0)));
}
bool operator!=(const input_iterator_wrapper& in) const {
return !(*this == in);
}
T& operator*() const { }
input_iterator_wrapper& operator++() { }
};
struct X { };
bool predicate(const X&, const X&) {
return true;
}
bool test2(input_iterator_wrapper<X>& x) {
return lexicographical_compare(x, x, x, x, predicate);
}

View File

@ -0,0 +1,74 @@
/* { dg-do link } */
/* { dg-options "-O -fstrict-aliasing" } */
class first
{
public:
double d;
int f1;
};
class middle : public first
{
};
class second : public middle
{
public:
int f2;
short a;
};
class third
{
public:
char a;
char b;
};
class multi: public third, public second
{
public:
short s;
char f3;
};
extern void link_error ();
void
foo (first *s1, second *s2)
{
s1->f1 = 0;
s2->f2 = 0;
s1->f1++;
s2->f2++;
s1->f1++;
s2->f2++;
if (s1->f1 != 2)
link_error ();
}
void
bar (first *s1, multi *s3)
{
s1->f1 = 0;
s3->f3 = 0;
s1->f1++;
s3->f3++;
s1->f1++;
s3->f3++;
if (s1->f1 != 2)
link_error ();
}
int
main()
{
first a;
second b;
multi c;
foo (&a, &b);
bar (&a, &c);
return 0;
}

View File

@ -1,9 +1,5 @@
/* { dg-do run } */
/* The max-aliased-vops setting is a temporary workaround to avoid the
random failures as described in PR 30194. This test case does not
need alias sets bigger than 13 elements. */
/* { dg-options "-O2 --param max-aliased-vops=15" } */
/* { dg-options "-O2" } */
extern void abort (void);

View File

@ -1,5 +1,4 @@
/* { dg-do run } */
/* { dg-options "--param max-aliased-vops=0" } */
typedef struct _s {
int a;

View File

@ -1,5 +1,4 @@
/* { dg-do run } */
/* { dg-options "--param max-aliased-vops=0" } */
struct T
{

View File

@ -1,5 +1,4 @@
/* { dg-do run } */
/* { dg-options "--param max-aliased-vops=0" } */
struct T
{

View File

@ -1,5 +1,4 @@
/* { dg-do run } */
/* { dg-options "--param max-aliased-vops=1" } */
struct X {
int i;

View File

@ -30,5 +30,5 @@ int main()
return 0;
}
/* { dg-final { scan-tree-dump "y.._., name memory tag: NMT..., is dereferenced, points-to vars: { i }" "alias" } } */
/* { dg-final { scan-tree-dump "y.._., points-to vars: { i }" "alias" } } */
/* { dg-final { cleanup-tree-dump "alias" } } */

View File

@ -27,5 +27,5 @@ int main()
return 0;
}
/* { dg-final { scan-tree-dump "y.._., name memory tag: NMT..., is dereferenced, points-to vars: { i }" "alias" } } */
/* { dg-final { scan-tree-dump "y.._., points-to vars: { i }" "alias" } } */
/* { dg-final { cleanup-tree-dump "alias" } } */

View File

@ -21,5 +21,5 @@ int main()
return 0;
}
/* { dg-final { scan-tree-dump "p.._., name memory tag: NMT..., is dereferenced, points-to vars: { i j }" "alias" } } */
/* { dg-final { scan-tree-dump "p.._., points-to vars: { i j }" "alias" } } */
/* { dg-final { cleanup-tree-dump "alias" } } */

View File

@ -0,0 +1,37 @@
/* { dg-do run } */
/* { dg-options "-fdump-tree-alias" } */
/* { dg-skip-if "" { *-*-* } { "-O0" } { "" } } */
extern void abort (void);
struct X {
int *p;
int *q;
int *r;
};
int __attribute__((noinline))
foo(int i, int j, int k, int off)
{
struct X x;
int **p, *q;
x.p = &i;
x.q = &j;
x.r = &k;
p = &x.q;
p += off;
/* *p points to { i, j, k } */
q = *p;
return *q;
}
int main()
{
if (foo(1, 2, 3, -1) != 1)
abort ();
if (foo(1, 2, 3, 0) != 2)
abort ();
if (foo(1, 2, 3, 1) != 3)
abort ();
return 0;
}
/* { dg-final { scan-tree-dump "q_., points-to vars: { i j k }" "alias" } } */
/* { dg-final { cleanup-tree-dump "alias" } } */

View File

@ -0,0 +1,34 @@
/* { dg-do run } */
/* { dg-options "-fno-tree-sra -fdump-tree-alias" } */
/* { dg-skip-if "" { *-*-* } { "-O0" } { "" } } */
struct X
{
long l1;
struct Y
{
long l2;
int *p;
} y;
};
int i;
static int
foo (struct X *x)
{
struct Y y = x->y;
*y.p = 0;
i = 1;
return *y.p;
}
extern void abort (void);
int main()
{
struct X x;
x.y.p = &i;
if (foo(&x) != 1)
abort ();
return 0;
}
/* { dg-final { scan-tree-dump "points-to vars: { i }" "alias" } } */
/* { dg-final { cleanup-tree-dump "alias" } } */

View File

@ -0,0 +1,61 @@
/* { dg-do run } */
/* { dg-options "-fdump-tree-alias" } */
/* { dg-skip-if "" { *-*-* } { "-O0" } { "" } } */
extern void abort (void);
int *glob;
int * __attribute__((noinline,const))
foo_const(int *p) { return p; }
int * __attribute__((noinline,pure))
foo_pure(int *p) { return glob; }
int * __attribute__((noinline))
foo_normal(int *p) { glob = p; return p; }
void test_const(void)
{
int i;
int *p = &i;
int *q_const = foo_const(p);
*p = 1;
*q_const = 2;
if (*p != 2)
abort ();
}
void test(void)
{
int i;
int *p = &i;
int *q_normal = foo_normal(p);
*p = 1;
*q_normal = 2;
if (*p != 2)
abort ();
}
void test_pure(void)
{
int i;
int *p = &i;
int *q_pure = foo_pure(p);
*p = 1;
*q_pure = 2;
if (*p != 2)
abort ();
}
int main()
{
test_const();
test();
test_pure();
return 0;
}
/* { dg-final { scan-tree-dump "q_const_., points-to non-local, points-to vars: { i }" "alias" } } */
/* { dg-final { scan-tree-dump "q_pure_., points-to non-local, points-to escaped, points-to vars: { i }" "alias" } } */
/* { dg-final { scan-tree-dump "q_normal_., points-to non-local, points-to escaped, points-to vars: { }" "alias" } } */
/* { dg-final { cleanup-tree-dump "alias" } } */

View File

@ -1,19 +0,0 @@
/* { dg-do compile } */
/* { dg-options "-O -fno-early-inlining -fdump-tree-alias-vops-details" } */
struct foo {
int a;
struct X {
int b[4];
} b;
} m;
static inline struct X *wrap(struct X *p) { return p; }
int test2(void)
{
struct X *p = wrap(&m.b);
/* Both memory references need to alias the same tags. */
return p->b[3] - m.b.b[3];
}
/* { dg-final { scan-tree-dump-times "VUSE <m_.\\\(D\\\)>" 2 "alias" } } */
/* { dg-final { cleanup-tree-dump "alias" } } */

View File

@ -1,9 +1,5 @@
/* { dg-do run } */
/* { dg-options "-O --param max-aliased-vops=1" } */
/* Compile with -O --param max-aliased-vops=1. This partitions all
the initial SFTs for 'm' which was causing the operand scanner to
miss adding the right SFTs to p->b[2]. */
extern void abort (void);
struct X {

View File

@ -1,5 +1,5 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-fre-details -fdump-tree-optimized --param max-aliased-vops=0" } */
/* { dg-options "-O2 -fdump-tree-fre-details -fdump-tree-optimized" } */
struct A {
int i;

View File

@ -26,6 +26,5 @@ int main()
}
/* { dg-final { scan-tree-dump "q_. = { a b }" "alias" } } */
/* { dg-final { scan-tree-dump "q_., name memory tag: NMT..., is dereferenced, points-to vars: { a b }" "alias" } } */
/* { dg-final { scan-tree-dump "# VUSE <a_.\\\(D\\\), b_.>" "alias" } } */
/* { dg-final { scan-tree-dump "q_., points-to vars: { a b }" "alias" } } */
/* { dg-final { cleanup-tree-dump "alias" } } */

View File

@ -0,0 +1,24 @@
/* { dg-do compile } */
/* { dg-options "-O -fstrict-aliasing -fdump-tree-optimized" } */
struct S { float f; int i; };
struct R { int x; int i; };
/* Strict-aliasing rules say that int and float do not alias. */
int bar(struct S *s, int *i)
{
*i = 0;
s->f = 1.0;
return *i;
}
/* Strict-aliasing rules say that S and R do not alias. */
int foo(struct S *s, struct R *r)
{
r->i = 0;
s->i = 1;
return r->i;
}
/* { dg-final { scan-tree-dump-times "return 0;" 2 "optimized" } } */
/* { dg-final { cleanup-tree-dump "optimized" } } */

View File

@ -0,0 +1,22 @@
/* { dg-do compile } */
/* { dg-options "-O -fstrict-aliasing -fdump-tree-optimized" } */
struct A
{
int i;
};
struct B
{
struct A a;
int j;
};
int foo (struct A *p, struct B *q)
{
p->i = 0;
q->j = 1;
return p->i;
}
/* { dg-final { scan-tree-dump "return 0;" "optimized" } } */
/* { dg-final { cleanup-tree-dump "optimized" } } */

View File

@ -1,5 +1,5 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-alias-vops" } */
/* { dg-options "-O2 -fdump-tree-pre-details" } */
struct a
{
int length;
@ -13,5 +13,5 @@ int f(void)
struct a *a = malloc(sizeof(struct a));
return a->length;
}
/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias"} } */
/* { dg-final { cleanup-tree-dump "alias" } } */
/* { dg-final { scan-tree-dump-times "Variable: HEAP" 1 "pre"} } */
/* { dg-final { cleanup-tree-dump "pre" } } */

View File

@ -1,5 +1,5 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-alias-vops" } */
/* { dg-options "-O2 -fdump-tree-optimized" } */
typedef struct {
int i;
@ -18,5 +18,5 @@ int foo(void)
/* Verify the call clobbers all of a. */
/* { dg-final { scan-tree-dump-times "VDEF <a_" 2 "alias" } } */
/* { dg-final { cleanup-tree-dump "alias" } } */
/* { dg-final { scan-tree-dump-not "return 1;" "optimized" } } */
/* { dg-final { cleanup-tree-dump "optimized" } } */

View File

@ -0,0 +1,24 @@
/* { dg-do compile } */
/* { dg-options "-O -fstrict-aliasing -fdump-tree-optimized" } */
struct A {
int i;
int j;
};
struct B {
struct A a1;
struct A a2;
};
struct C {
struct A a1;
struct B b;
};
int foo(struct C *c, struct B *b)
{
c->a1.i = 1;
b->a1.i = 0;
return c->a1.i;
}
/* { dg-final { scan-tree-dump "return 1;" "optimized" } } */
/* { dg-final { cleanup-tree-dump "optimized" } } */

View File

@ -0,0 +1,26 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fno-tree-ccp -fdump-tree-alias" } */
extern void abort (void);
struct X {
int *p;
int *q;
int *r;
};
int __attribute__((noinline))
foo(int i, int j, int k, int off)
{
struct X x;
int **p, *q;
x.p = &i;
x.q = &j;
x.r = &k;
p = &x.q;
p += 1;
/* *p points to { k } */
q = *p;
return *q;
}
/* { dg-final { scan-tree-dump "q_., points-to vars: { k }" "alias" } } */
/* { dg-final { cleanup-tree-dump "alias" } } */

View File

@ -0,0 +1,26 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fno-tree-ccp -fdump-tree-alias" } */
extern void abort (void);
struct X {
int *p;
int *q;
int *r;
};
int __attribute__((noinline))
foo(int i, int j, int k, int off)
{
struct X x;
int **p, *q;
x.p = &i;
x.q = &j;
x.r = &k;
p = &x.q;
p -= 1;
/* *p points to { i } */
q = *p;
return *q;
}
/* { dg-final { scan-tree-dump "q_., points-to vars: { i }" "alias" } } */
/* { dg-final { cleanup-tree-dump "alias" } } */

View File

@ -0,0 +1,18 @@
/* { dg-do compile } */
/* { dg-options "-O -fdump-tree-cddce1" } */
int foo(int b)
{
int a[128];
a[b] = 1;
if (b)
{
b = 2;
a[2] = 0;
}
a[2] = 3;
return a[2] + b;
}
/* { dg-final { scan-tree-dump-times "a\\\[\[^\n\]\\\]" 2 "cddce1" } } */
/* { dg-final { cleanup-tree-dump "cddce1" } } */

View File

@ -0,0 +1,15 @@
/* { dg-do compile } */
/* { dg-options "-O -fno-tree-sra -fdump-tree-cddce1" } */
struct X { int i; };
struct X foo(int b)
{
struct X x;
if (b)
x.i = 0;
x.i = 1;
return x;
}
/* { dg-final { scan-tree-dump-times "x.i =" 1 "cddce1" } } */
/* { dg-final { cleanup-tree-dump "cddce1" } } */

View File

@ -0,0 +1,12 @@
/* { dg-do compile } */
/* { dg-options "-O -fdump-tree-dse1" } */
void foo (int *p, int b)
{
if (b)
*p = 1;
*p = 0;
}
/* { dg-final { scan-tree-dump-times "\\\*p" 1 "dse1" } } */
/* { dg-final { cleanup-tree-dump "dse1" } } */

View File

@ -18,5 +18,9 @@ void __frame_state_for (volatile char *state_in, int x)
}
}
/* { dg-final { scan-tree-dump "Insertions: 2" "pre" } } */
/* This is a weird testcase. It should need PPRE to hoist the loop
invariants and the volatileness of state_in prevents DSE of the
first store. Thus, this is XFAILed. */
/* { dg-final { scan-tree-dump "Insertions: 2" "pre" { xfail *-*-* } } } */
/* { dg-final { cleanup-tree-dump "pre" } } */

View File

@ -1,9 +1,7 @@
/* { dg-do compile } */
/* { dg-options "-O -fstrict-aliasing -fno-tree-sra --param max-aliased-vops=0 --param max-fields-for-field-sensitive=0 -fdump-tree-fre-details" } */
/* { dg-options "-O -fstrict-aliasing -fno-tree-sra -fdump-tree-fre-details" } */
/* Should be optimized, propagating &a into (*p)[i] with parameters
--param max-aliased-vops=0 --param max-fields-for-field-sensitive=0
which means max 1 VOP per stmt and no SFTs. */
/* Should be optimized, propagating &a into (*p)[i]. */
/* For this testcase we need TBAA to work. */

View File

@ -1,9 +1,7 @@
/* { dg-do compile } */
/* { dg-options "-O -fno-tree-sra --param max-aliased-vops=0 --param max-fields-for-field-sensitive=0 -fdump-tree-fre-details" } */
/* { dg-options "-O -fno-tree-sra -fdump-tree-fre-details" } */
/* Should be optimized, propagating &a into (*p)[i] with parameters
--param max-aliased-vops=0 --param max-fields-for-field-sensitive=0
which means max 1 VOP per stmt and no SFTs. */
/* Should be optimized, propagating &a into (*p)[i]. */
struct Foo
{

View File

@ -1,9 +1,7 @@
/* { dg-do compile } */
/* { dg-options "-O -fno-tree-sra --param max-aliased-vops=0 --param max-fields-for-field-sensitive=0 -fdump-tree-fre-details" } */
/* { dg-options "-O -fno-tree-sra -fdump-tree-fre-details" } */
/* Should be optimized, propagating &a into (*p)[i] with parameters
--param max-aliased-vops=0 --param max-fields-for-field-sensitive=0
which means max 1 VOP per stmt and no SFTs. */
/* Should be optimized, propagating &a into (*p)[i]. */
struct Foo
{

View File

@ -0,0 +1,28 @@
/* { dg-do compile } */
/* { dg-options "-O -fdump-tree-fre" } */
struct a
{
union
{
int a;
int b;
};
union
{
int c;
int d;
};
};
int f(struct a *c)
{
int d = c->a;
c->c = 1;
return c->a + d;
}
/* We should have CSEd the load from c->a. */
/* { dg-final { scan-tree-dump-times "c_.*\\\.a" 1 "fre" } } */
/* { dg-final { cleanup-tree-dump "fre" } } */

View File

@ -0,0 +1,31 @@
/* { dg-do compile } */
/* { dg-options "-O -fdump-tree-fre" } */
struct a
{
union
{
int a;
int b;
};
union
{
int c;
int d;
};
int e;
};
int f(struct a *c)
{
int d;
c->e = 2;
d = c->a;
c->c = 1;
return c->a + d;
}
/* We should have CSEd the load from c->a. */
/* { dg-final { scan-tree-dump-times "c_.*\\\.a" 1 "fre" } } */
/* { dg-final { cleanup-tree-dump "fre" } } */

View File

@ -0,0 +1,20 @@
/* { dg-do compile } */
/* { dg-options "-O -fdump-tree-optimized" } */
int i, j;
int foo(int b)
{
j = 0;
if (b)
goto L2;
L1:
i = i + 1;
L2:
i = i + 1;
if (i == 1)
goto L1;
return j;
}
/* { dg-final { scan-tree-dump "return 0;" "optimized" } } */
/* { dg-final { cleanup-tree-dump "optimized" } } */

View File

@ -1,5 +1,5 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-lim-details" } */
/* { dg-options "-O -fdump-tree-lim-details" } */
struct { int x; int y; } global;
void foo(int n)

View File

@ -0,0 +1,13 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-pre-stats" } */
struct { int x; int y; } global;
void foo(int n)
{
int i;
for ( i=0; i<n; i++)
global.y += global.x*global.x;
}
/* { dg-final { scan-tree-dump "Eliminated: 2" "pre" } } */
/* { dg-final { cleanup-tree-dump "pre" } } */

View File

@ -0,0 +1,20 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-pre" } */
void foo(int *p, double *x, int n)
{
int i;
for (i = 0; i < n; ++i)
*(x + *p * i) = 0.0;
}
/* We should remove the unnecessary insertion of a phi-node and
_not_ end up using the phi result for replacement *p.
The issue here is that when PHI-translating the virtual operands
we assign different value-numbers to the load. Re-running VN
after insertion or trying to be clever and doing this on the
fly during PHI translation would solve this. The next copyprop
fixes this anyway. */
/* { dg-final { scan-tree-dump-not "= prephitmp" "pre" { xfail *-*-* } } } */
/* { dg-final { cleanup-tree-dump "pre" } } */

View File

@ -9,7 +9,7 @@ void
baz (void)
{
int i;
if (i) /* { dg-warning "is used uninitialized" "uninit i warning" } */
if (i) /* { dg-warning "is used uninitialized" "uninit i warning" { xfail *-*-* } } */
bar (i);
foo (&i);
}

View File

@ -29,7 +29,7 @@ void frob(int *pi);
int main(void)
{
int i;
printf("i = %d\n", i); /* { dg-warning "'i' is used uninitialized in this function" } */
printf("i = %d\n", i); /* { dg-warning "'i' is used uninitialized in this function" "" { xfail *-*-* } } */
frob(&i);
return 0;
@ -38,6 +38,6 @@ int main(void)
void foo3(int*);
void bar3(void) {
int x;
if(x) /* { dg-warning "'x' is used uninitialized in this function" "uninitialized" } */
if(x) /* { dg-warning "'x' is used uninitialized in this function" "uninitialized" { xfail *-*-* } } */
foo3(&x);
}

View File

@ -28,7 +28,8 @@ main1 (float *pa)
float pb[N] __attribute__ ((__aligned__(16))) = {0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51,54,57};
float pc[N] __attribute__ ((__aligned__(16))) = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19};
/* Not vectorizable: pa may alias pb and/or pc, since their addresses escape. */
/* Vectorizable: pa may not alias pb and/or pc, even though their
addresses escape. &pa would need to escape to point to escaped memory. */
for (i = 0; i < N; i++)
{
pa[i] = pb[i] * pc[i];
@ -74,6 +75,6 @@ int main (void)
return 0;
}
/* { dg-final { scan-tree-dump-times "vectorized 1 loops" 1 "vect" } } */
/* { dg-final { scan-tree-dump-times "vectorized 1 loops" 2 "vect" } } */
/* { dg-final { scan-tree-dump-times "Alignment of access forced using versioning" 1 "vect" { target vect_no_align } } } */
/* { dg-final { cleanup-tree-dump "vect" } } */

View File

@ -69,6 +69,7 @@ DEFTIMEVAR (TV_DF_NOTE , "df reg dead/unused notes")
DEFTIMEVAR (TV_REG_STATS , "register information")
DEFTIMEVAR (TV_ALIAS_ANALYSIS , "alias analysis")
DEFTIMEVAR (TV_ALIAS_STMT_WALK , "alias stmt walking")
DEFTIMEVAR (TV_REG_SCAN , "register scan")
DEFTIMEVAR (TV_REBUILD_JUMP , "rebuild jump labels")
/* Timing in various stages of the compiler. */
@ -87,11 +88,6 @@ DEFTIMEVAR (TV_TREE_COPY_PROP , "tree copy propagation")
DEFTIMEVAR (TV_TREE_STORE_COPY_PROP , "tree store copy prop")
DEFTIMEVAR (TV_FIND_REFERENCED_VARS , "tree find ref. vars")
DEFTIMEVAR (TV_TREE_PTA , "tree PTA")
DEFTIMEVAR (TV_TREE_MAY_ALIAS , "tree alias analysis")
DEFTIMEVAR (TV_CALL_CLOBBER , "tree call clobbering")
DEFTIMEVAR (TV_FLOW_SENSITIVE , "tree flow sensitive alias")
DEFTIMEVAR (TV_FLOW_INSENSITIVE , "tree flow insensitive alias")
DEFTIMEVAR (TV_MEMORY_PARTITIONING , "tree memory partitioning")
DEFTIMEVAR (TV_TREE_INSERT_PHI_NODES , "tree PHI insertion")
DEFTIMEVAR (TV_TREE_SSA_REWRITE_BLOCKS, "tree SSA rewrite")
DEFTIMEVAR (TV_TREE_SSA_OTHER , "tree SSA other")

View File

@ -84,6 +84,7 @@ along with GCC; see the file COPYING3. If not see
#include "tree-mudflap.h"
#include "tree-pass.h"
#include "gimple.h"
#include "tree-ssa-alias.h"
#if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
#include "dwarf2out.h"
@ -2170,6 +2171,8 @@ dump_memory_report (bool final)
dump_bitmap_statistics ();
dump_vec_loc_statistics ();
dump_ggc_loc_statistics (final);
dump_alias_stats (stderr);
dump_pta_stats (stderr);
}
/* Clean up: close opened files, etc. */

View File

@ -906,6 +906,9 @@ tree_call_cdce (void)
{
free_dominance_info (CDI_DOMINATORS);
free_dominance_info (CDI_POST_DOMINATORS);
/* As we introduced new control-flow we need to insert PHI-nodes
for the call-clobbers of the remaining call. */
mark_sym_for_renaming (gimple_vop (cfun));
return (TODO_update_ssa | TODO_cleanup_cfg | TODO_ggc_collect
| TODO_remove_unused_locals);
}

View File

@ -2879,7 +2879,9 @@ verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
x = TREE_OPERAND (x, 0))
;
if (TREE_CODE (x) != VAR_DECL && TREE_CODE (x) != PARM_DECL)
if (!(TREE_CODE (x) == VAR_DECL
|| TREE_CODE (x) == PARM_DECL
|| TREE_CODE (x) == RESULT_DECL))
return NULL;
if (!TREE_ADDRESSABLE (x))
{
@ -4969,7 +4971,6 @@ gimple_duplicate_bb (basic_block bb)
operands. */
copy = gimple_copy (stmt);
gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
copy_virtual_operands (copy, stmt);
region = lookup_stmt_eh_region (stmt);
if (region >= 0)
add_stmt_to_eh_region (copy, region);
@ -5141,7 +5142,7 @@ gimple_duplicate_sese_region (edge entry, edge exit,
free_region_copy = true;
}
gcc_assert (!need_ssa_update_p ());
gcc_assert (!need_ssa_update_p (cfun));
/* Record blocks outside the region that are dominated by something
inside. */
@ -5300,7 +5301,7 @@ gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNU
free_region_copy = true;
}
gcc_assert (!need_ssa_update_p ());
gcc_assert (!need_ssa_update_p (cfun));
/* Record blocks outside the region that are dominated by something
inside. */
@ -5627,19 +5628,6 @@ mark_virtual_ops_in_bb (basic_block bb)
mark_virtual_ops_for_renaming (gsi_stmt (gsi));
}
/* Marks virtual operands of all statements in basic blocks BBS for
renaming. */
static void
mark_virtual_ops_in_region (VEC (basic_block,heap) *bbs)
{
basic_block bb;
unsigned i;
for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
mark_virtual_ops_in_bb (bb);
}
/* Move basic block BB from function CFUN to function DEST_FN. The
block is moved out of the original linked list and placed after
block AFTER in the new list. Also, the block is removed from the
@ -5746,7 +5734,7 @@ move_block_to_fn (struct function *dest_cfun, basic_block bb,
old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
if (old_len <= (unsigned) uid)
{
new_len = 3 * uid / 2;
new_len = 3 * uid / 2 + 1;
VEC_safe_grow_cleared (basic_block, gc,
cfg->x_label_to_block_map, new_len);
}
@ -6008,11 +5996,6 @@ move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
pop_cfun ();
/* The ssa form for virtual operands in the source function will have to
be repaired. We do not care for the real operands -- the sese region
must be closed with respect to those. */
mark_virtual_ops_in_region (bbs);
/* Move blocks from BBS into DEST_CFUN. */
gcc_assert (VEC_length (basic_block, bbs) >= 2);
after = dest_cfun->cfg->x_entry_block_ptr;

View File

@ -745,23 +745,6 @@ update_phi_components (basic_block bb)
}
}
/* Mark each virtual op in STMT for ssa update. */
static void
update_all_vops (gimple stmt)
{
ssa_op_iter iter;
tree sym;
FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
{
if (TREE_CODE (sym) == SSA_NAME)
sym = SSA_NAME_VAR (sym);
mark_sym_for_renaming (sym);
}
}
/* Expand a complex move to scalars. */
static void
@ -817,7 +800,6 @@ expand_complex_move (gimple_stmt_iterator *gsi, tree type)
}
else
{
update_all_vops (stmt);
if (gimple_assign_rhs_code (stmt) != COMPLEX_EXPR)
{
r = extract_component (gsi, rhs, 0, true);
@ -860,7 +842,6 @@ expand_complex_move (gimple_stmt_iterator *gsi, tree type)
gimple_return_set_retval (stmt, lhs);
}
update_all_vops (stmt);
update_stmt (stmt);
}
}

View File

@ -792,34 +792,15 @@ dr_analyze_indices (struct data_reference *dr, struct loop *nest)
static void
dr_analyze_alias (struct data_reference *dr)
{
gimple stmt = DR_STMT (dr);
tree ref = DR_REF (dr);
tree base = get_base_address (ref), addr, smt = NULL_TREE;
ssa_op_iter it;
tree op;
bitmap vops;
tree base = get_base_address (ref), addr;
if (DECL_P (base))
smt = base;
else if (INDIRECT_REF_P (base))
if (INDIRECT_REF_P (base))
{
addr = TREE_OPERAND (base, 0);
if (TREE_CODE (addr) == SSA_NAME)
{
smt = symbol_mem_tag (SSA_NAME_VAR (addr));
DR_PTR_INFO (dr) = SSA_NAME_PTR_INFO (addr);
}
DR_PTR_INFO (dr) = SSA_NAME_PTR_INFO (addr);
}
DR_SYMBOL_TAG (dr) = smt;
vops = BITMAP_ALLOC (NULL);
FOR_EACH_SSA_TREE_OPERAND (op, stmt, it, SSA_OP_VIRTUAL_USES)
{
bitmap_set_bit (vops, DECL_UID (SSA_NAME_VAR (op)));
}
DR_VOPS (dr) = vops;
}
/* Returns true if the address of DR is invariant. */
@ -842,7 +823,6 @@ dr_address_invariant_p (struct data_reference *dr)
void
free_data_ref (data_reference_p dr)
{
BITMAP_FREE (DR_VOPS (dr));
VEC_free (tree, heap, DR_ACCESS_FNS (dr));
free (dr);
}
@ -887,8 +867,6 @@ create_data_ref (struct loop *nest, tree memref, gimple stmt, bool is_read)
print_generic_expr (dump_file, DR_ALIGNED_TO (dr), TDF_SLIM);
fprintf (dump_file, "\n\tbase_object: ");
print_generic_expr (dump_file, DR_BASE_OBJECT (dr), TDF_SLIM);
fprintf (dump_file, "\n\tsymbol tag: ");
print_generic_expr (dump_file, DR_SYMBOL_TAG (dr), TDF_SLIM);
fprintf (dump_file, "\n");
}
@ -1238,23 +1216,21 @@ dr_may_alias_p (const struct data_reference *a, const struct data_reference *b)
const_tree type_a, type_b;
const_tree decl_a = NULL_TREE, decl_b = NULL_TREE;
/* If the sets of virtual operands are disjoint, the memory references do not
alias. */
if (!bitmap_intersect_p (DR_VOPS (a), DR_VOPS (b)))
return false;
/* If the accessed objects are disjoint, the memory references do not
alias. */
if (disjoint_objects_p (DR_BASE_OBJECT (a), DR_BASE_OBJECT (b)))
return false;
/* Query the alias oracle. */
if (!refs_may_alias_p (DR_REF (a), DR_REF (b)))
return false;
if (!addr_a || !addr_b)
return true;
/* If the references are based on different static objects, they cannot alias
(PTA should be able to disambiguate such accesses, but often it fails to,
since currently we cannot distinguish between pointer and offset in pointer
arithmetics). */
/* If the references are based on different static objects, they cannot
alias (PTA should be able to disambiguate such accesses, but often
it fails to). */
if (TREE_CODE (addr_a) == ADDR_EXPR
&& TREE_CODE (addr_b) == ADDR_EXPR)
return TREE_OPERAND (addr_a, 0) == TREE_OPERAND (addr_b, 0);
@ -4050,7 +4026,7 @@ get_references_in_stmt (gimple stmt, VEC (data_ref_loc, heap) **references)
&& gimple_asm_volatile_p (stmt)))
clobbers_memory = true;
if (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
if (!gimple_vuse (stmt))
return clobbers_memory;
if (stmt_code == GIMPLE_ASSIGN)
@ -4358,7 +4334,6 @@ analyze_all_data_dependences (struct loop *loop)
{
unsigned nb_top_relations = 0;
unsigned nb_bot_relations = 0;
unsigned nb_basename_differ = 0;
unsigned nb_chrec_relations = 0;
struct data_dependence_relation *ddr;
@ -4368,15 +4343,7 @@ analyze_all_data_dependences (struct loop *loop)
nb_top_relations++;
else if (DDR_ARE_DEPENDENT (ddr) == chrec_known)
{
struct data_reference *a = DDR_A (ddr);
struct data_reference *b = DDR_B (ddr);
if (!bitmap_intersect_p (DR_VOPS (a), DR_VOPS (b)))
nb_basename_differ++;
else
nb_bot_relations++;
}
nb_bot_relations++;
else
nb_chrec_relations++;
@ -4939,7 +4906,7 @@ stores_from_loop (struct loop *loop, VEC (gimple, heap) **stmts)
gimple_stmt_iterator bsi;
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
if (!ZERO_SSA_OPERANDS (gsi_stmt (bsi), SSA_OP_VDEF))
if (gimple_vdef (gsi_stmt (bsi)))
VEC_safe_push (gimple, heap, *stmts, gsi_stmt (bsi));
}

View File

@ -88,7 +88,6 @@ struct dr_alias
{
/* The alias information that should be used for new pointers to this
location. SYMBOL_TAG is either a DECL or a SYMBOL_MEMORY_TAG. */
tree symbol_tag;
struct ptr_info_def *ptr_info;
/* The set of virtual operands corresponding to this memory reference,
@ -204,9 +203,7 @@ struct data_reference
#define DR_OFFSET(DR) (DR)->innermost.offset
#define DR_INIT(DR) (DR)->innermost.init
#define DR_STEP(DR) (DR)->innermost.step
#define DR_SYMBOL_TAG(DR) (DR)->alias.symbol_tag
#define DR_PTR_INFO(DR) (DR)->alias.ptr_info
#define DR_VOPS(DR) (DR)->alias.vops
#define DR_ALIGNED_TO(DR) (DR)->innermost.aligned_to
#define DR_ACCESS_MATRIX(DR) (DR)->access_matrix

View File

@ -254,8 +254,9 @@ dump_referenced_vars (FILE *file)
{
fprintf (file, "Variable: ");
dump_variable (file, var);
fprintf (file, "\n");
}
fprintf (file, "\n");
}
@ -297,12 +298,6 @@ dump_variable (FILE *file, tree var)
fprintf (file, ", ");
print_generic_expr (file, TREE_TYPE (var), dump_flags);
if (ann && ann->symbol_mem_tag)
{
fprintf (file, ", symbol memory tag: ");
print_generic_expr (file, ann->symbol_mem_tag, dump_flags);
}
if (TREE_ADDRESSABLE (var))
fprintf (file, ", is addressable");
@ -312,36 +307,10 @@ dump_variable (FILE *file, tree var)
if (TREE_THIS_VOLATILE (var))
fprintf (file, ", is volatile");
dump_mem_sym_stats_for_var (file, var);
if (is_call_clobbered (var))
{
const char *s = "";
var_ann_t va = var_ann (var);
unsigned int escape_mask = va->escape_mask;
fprintf (file, ", call clobbered");
fprintf (file, " (");
if (escape_mask & ESCAPE_STORED_IN_GLOBAL)
{ fprintf (file, "%sstored in global", s); s = ", "; }
if (escape_mask & ESCAPE_TO_ASM)
{ fprintf (file, "%sgoes through ASM", s); s = ", "; }
if (escape_mask & ESCAPE_TO_CALL)
{ fprintf (file, "%spassed to call", s); s = ", "; }
if (escape_mask & ESCAPE_BAD_CAST)
{ fprintf (file, "%sbad cast", s); s = ", "; }
if (escape_mask & ESCAPE_TO_RETURN)
{ fprintf (file, "%sreturned from func", s); s = ", "; }
if (escape_mask & ESCAPE_TO_PURE_CONST)
{ fprintf (file, "%spassed to pure/const", s); s = ", "; }
if (escape_mask & ESCAPE_IS_GLOBAL)
{ fprintf (file, "%sis global var", s); s = ", "; }
if (escape_mask & ESCAPE_IS_PARM)
{ fprintf (file, "%sis incoming pointer", s); s = ", "; }
if (escape_mask & ESCAPE_UNKNOWN)
{ fprintf (file, "%sunknown escape", s); s = ", "; }
fprintf (file, ")");
}
fprintf (file, ", call clobbered");
else if (is_call_used (var))
fprintf (file, ", call used");
if (ann->noalias_state == NO_ALIAS)
fprintf (file, ", NO_ALIAS (does not alias other NO_ALIAS symbols)");
@ -357,27 +326,6 @@ dump_variable (FILE *file, tree var)
print_generic_expr (file, gimple_default_def (cfun, var), dump_flags);
}
if (MTAG_P (var) && may_aliases (var))
{
fprintf (file, ", may aliases: ");
dump_may_aliases_for (file, var);
}
if (!is_gimple_reg (var))
{
if (memory_partition (var))
{
fprintf (file, ", belongs to partition: ");
print_generic_expr (file, memory_partition (var), dump_flags);
}
if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
{
fprintf (file, ", partition symbols: ");
dump_decl_set (file, MPT_SYMBOLS (var));
}
}
fprintf (file, "\n");
}
@ -516,8 +464,8 @@ collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
gimple stmt = gsi_stmt (si);
dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
dfa_stats_p->num_vdefs += NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
dfa_stats_p->num_vuses += NUM_SSA_OPERANDS (stmt, SSA_OP_VUSE);
dfa_stats_p->num_vdefs += gimple_vdef (stmt) ? 1 : 0;
dfa_stats_p->num_vuses += gimple_vuse (stmt) ? 1 : 0;
}
}
}
@ -650,13 +598,6 @@ add_referenced_var (tree var)
/* Insert VAR into the referenced_vars has table if it isn't present. */
if (referenced_var_check_and_insert (var))
{
/* This is the first time we found this variable, annotate it with
attributes that are intrinsic to the variable. */
/* Tag's don't have DECL_INITIAL. */
if (MTAG_P (var))
return true;
/* Scan DECL_INITIAL for pointer variables as they may contain
address arithmetic referencing the address of other
variables.
@ -684,22 +625,12 @@ remove_referenced_var (tree var)
void **loc;
unsigned int uid = DECL_UID (var);
clear_call_clobbered (var);
bitmap_clear_bit (gimple_call_used_vars (cfun), uid);
if ((v_ann = var_ann (var)))
/* Preserve var_anns of globals. */
if (!is_global_var (var)
&& (v_ann = var_ann (var)))
{
/* Preserve var_anns of globals, but clear their alias info. */
if (MTAG_P (var)
|| (!TREE_STATIC (var) && !DECL_EXTERNAL (var)))
{
ggc_free (v_ann);
var->base.ann = NULL;
}
else
{
v_ann->mpt = NULL_TREE;
v_ann->symbol_mem_tag = NULL_TREE;
}
ggc_free (v_ann);
var->base.ann = NULL;
}
gcc_assert (DECL_P (var));
in.uid = uid;
@ -803,8 +734,6 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
bool seen_variable_array_ref = false;
bool seen_union = false;
gcc_assert (!SSA_VAR_P (exp));
/* First get the final access size from just the outermost expression. */
if (TREE_CODE (exp) == COMPONENT_REF)
size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
@ -984,242 +913,3 @@ stmt_references_abnormal_ssa_name (gimple stmt)
return false;
}
/* Return true, if the two memory references REF1 and REF2 may alias. */
bool
refs_may_alias_p (tree ref1, tree ref2)
{
tree base1, base2;
HOST_WIDE_INT offset1 = 0, offset2 = 0;
HOST_WIDE_INT size1 = -1, size2 = -1;
HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
bool strict_aliasing_applies;
gcc_assert ((SSA_VAR_P (ref1)
|| handled_component_p (ref1)
|| INDIRECT_REF_P (ref1)
|| TREE_CODE (ref1) == TARGET_MEM_REF)
&& (SSA_VAR_P (ref2)
|| handled_component_p (ref2)
|| INDIRECT_REF_P (ref2)
|| TREE_CODE (ref2) == TARGET_MEM_REF));
/* Defer to TBAA if possible. */
if (flag_strict_aliasing
&& !alias_sets_conflict_p (get_alias_set (ref1), get_alias_set (ref2)))
return false;
/* Decompose the references into their base objects and the access. */
base1 = ref1;
if (handled_component_p (ref1))
base1 = get_ref_base_and_extent (ref1, &offset1, &size1, &max_size1);
base2 = ref2;
if (handled_component_p (ref2))
base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &max_size2);
/* If both references are based on different variables, they cannot alias.
If both references are based on the same variable, they cannot alias if
the accesses do not overlap. */
if (SSA_VAR_P (base1)
&& SSA_VAR_P (base2))
{
if (!operand_equal_p (base1, base2, 0))
return false;
return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
}
/* If one base is a ref-all pointer weird things are allowed. */
strict_aliasing_applies = (flag_strict_aliasing
&& (!INDIRECT_REF_P (base1)
|| get_alias_set (base1) != 0)
&& (!INDIRECT_REF_P (base2)
|| get_alias_set (base2) != 0));
/* If strict aliasing applies the only way to access a scalar variable
is through a pointer dereference or through a union (gcc extension). */
if (strict_aliasing_applies
&& ((SSA_VAR_P (ref2)
&& !AGGREGATE_TYPE_P (TREE_TYPE (ref2))
&& !INDIRECT_REF_P (ref1)
&& TREE_CODE (TREE_TYPE (base1)) != UNION_TYPE)
|| (SSA_VAR_P (ref1)
&& !AGGREGATE_TYPE_P (TREE_TYPE (ref1))
&& !INDIRECT_REF_P (ref2)
&& TREE_CODE (TREE_TYPE (base2)) != UNION_TYPE)))
return false;
/* If both references are through the same type, or if strict aliasing
doesn't apply they are through two same pointers, they do not alias
if the accesses do not overlap. */
if ((strict_aliasing_applies
&& (TYPE_MAIN_VARIANT (TREE_TYPE (base1))
== TYPE_MAIN_VARIANT (TREE_TYPE (base2))))
|| (TREE_CODE (base1) == INDIRECT_REF
&& TREE_CODE (base2) == INDIRECT_REF
&& operand_equal_p (TREE_OPERAND (base1, 0),
TREE_OPERAND (base2, 0), 0)))
return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
/* If both are component references through pointers try to find a
common base and apply offset based disambiguation. This handles
for example
struct A { int i; int j; } *q;
struct B { struct A a; int k; } *p;
disambiguating q->i and p->a.j. */
if (strict_aliasing_applies
&& (TREE_CODE (base1) == INDIRECT_REF
|| TREE_CODE (base2) == INDIRECT_REF)
&& handled_component_p (ref1)
&& handled_component_p (ref2))
{
tree *refp;
/* Now search for the type of base1 in the access path of ref2. This
would be a common base for doing offset based disambiguation on. */
refp = &ref2;
while (handled_component_p (*refp)
/* Note that the following is only conservative if there are
never copies of types appearing as sub-structures. */
&& (TYPE_MAIN_VARIANT (TREE_TYPE (*refp))
!= TYPE_MAIN_VARIANT (TREE_TYPE (base1))))
refp = &TREE_OPERAND (*refp, 0);
if (TYPE_MAIN_VARIANT (TREE_TYPE (*refp))
== TYPE_MAIN_VARIANT (TREE_TYPE (base1)))
{
HOST_WIDE_INT offadj, sztmp, msztmp;
get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
offset2 -= offadj;
return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
}
/* The other way around. */
refp = &ref1;
while (handled_component_p (*refp)
&& (TYPE_MAIN_VARIANT (TREE_TYPE (*refp))
!= TYPE_MAIN_VARIANT (TREE_TYPE (base2))))
refp = &TREE_OPERAND (*refp, 0);
if (TYPE_MAIN_VARIANT (TREE_TYPE (*refp))
== TYPE_MAIN_VARIANT (TREE_TYPE (base2)))
{
HOST_WIDE_INT offadj, sztmp, msztmp;
get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
offset1 -= offadj;
return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
}
/* If we can be sure to catch all equivalent types in the search
for the common base then we could return false here. In that
case we would be able to disambiguate q->i and p->k. */
}
return true;
}
/* Given a stmt STMT that references memory, return the single stmt
that is reached by following the VUSE -> VDEF link. Returns
NULL_TREE, if there is no single stmt that defines all VUSEs of
STMT.
Note that for a stmt with a single virtual operand this may return
a PHI node as well. Note that if all VUSEs are default definitions
this function will return an empty statement. */
gimple
get_single_def_stmt (gimple stmt)
{
gimple def_stmt = NULL;
tree use;
ssa_op_iter iter;
FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_VIRTUAL_USES)
{
gimple tmp = SSA_NAME_DEF_STMT (use);
/* ??? This is too simplistic for multiple virtual operands
reaching different PHI nodes of the same basic blocks or for
reaching all default definitions. */
if (def_stmt
&& def_stmt != tmp
&& !(gimple_nop_p (def_stmt)
&& gimple_nop_p (tmp)))
return NULL;
def_stmt = tmp;
}
return def_stmt;
}
/* Given a PHI node of virtual operands, tries to eliminate cyclic
reached definitions if they do not alias REF and returns the
defining statement of the single virtual operand that flows in
from a non-backedge. Returns NULL_TREE if such statement within
the above conditions cannot be found. */
gimple
get_single_def_stmt_from_phi (tree ref, gimple phi)
{
tree def_arg = NULL_TREE;
unsigned i;
/* Find the single PHI argument that is not flowing in from a
back edge and verify that the loop-carried definitions do
not alias the reference we look for. */
for (i = 0; i < gimple_phi_num_args (phi); ++i)
{
tree arg = PHI_ARG_DEF (phi, i);
gimple def_stmt;
if (!(gimple_phi_arg_edge (phi, i)->flags & EDGE_DFS_BACK))
{
/* Multiple non-back edges? Do not try to handle this. */
if (def_arg)
return NULL;
def_arg = arg;
continue;
}
/* Follow the definitions back to the original PHI node. Bail
out once a definition is found that may alias REF. */
def_stmt = SSA_NAME_DEF_STMT (arg);
do
{
if (!is_gimple_assign (def_stmt)
|| refs_may_alias_p (ref, gimple_assign_lhs (def_stmt)))
return NULL;
/* ??? This will only work, reaching the PHI node again if
there is a single virtual operand on def_stmt. */
def_stmt = get_single_def_stmt (def_stmt);
if (!def_stmt)
return NULL;
}
while (def_stmt != phi);
}
return SSA_NAME_DEF_STMT (def_arg);
}
/* Return the single reference statement defining all virtual uses
on STMT or NULL_TREE, if there are multiple defining statements.
Take into account only definitions that alias REF if following
back-edges when looking through a loop PHI node. */
gimple
get_single_def_stmt_with_phi (tree ref, gimple stmt)
{
switch (NUM_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES))
{
case 0:
gcc_unreachable ();
case 1:
{
gimple def_stmt = SSA_NAME_DEF_STMT (SINGLE_SSA_TREE_OPERAND
(stmt, SSA_OP_VIRTUAL_USES));
/* We can handle lookups over PHI nodes only for a single
virtual operand. */
if (gimple_code (def_stmt) == GIMPLE_PHI)
return get_single_def_stmt_from_phi (ref, def_stmt);
return def_stmt;
}
default:
return get_single_def_stmt (stmt);
}
}

View File

@ -510,10 +510,6 @@ dequeue_and_dump (dump_info_p di)
case CONST_DECL:
dump_child ("cnst", DECL_INITIAL (t));
break;
case SYMBOL_MEMORY_TAG:
case NAME_MEMORY_TAG:
break;
case VAR_DECL:
case PARM_DECL:

View File

@ -2824,8 +2824,29 @@ cleanup_empty_eh (basic_block bb)
similar updating as jump threading does. */
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (gsi_stmt (si))));
{
tree res = PHI_RESULT (gsi_stmt (si));
gimple stmt;
imm_use_iterator iter;
use_operand_p use_p;
/* As we are going to delete this block we will release all
defs which makes the immediate uses on use stmts invalid.
Avoid that by replacing all uses with the bare variable
and updating the stmts. */
FOR_EACH_IMM_USE_STMT (stmt, iter, res)
{
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
SET_USE (use_p, SSA_NAME_VAR (res));
update_stmt (stmt);
}
mark_sym_for_renaming (SSA_NAME_VAR (res));
}
/* We want to thread over the current receiver to the next reachable
one. Do so by deleting all outgoing EH edges from all
predecessors of the receiver block we are going to delete and
rebuild EH edges for them. */
while ((e = ei_safe_edge (ei_start (bb->preds))))
{
basic_block src = e->src;
@ -2843,6 +2864,8 @@ cleanup_empty_eh (basic_block bb)
if (!stmt_can_throw_internal (last_stmt (src)))
continue;
make_eh_edges (last_stmt (src));
/* Make sure to also rename symbols that feed into receivers
that are now newly reachable from current src. */
FOR_EACH_EDGE (e, ei, src->succs)
if (e->flags & EDGE_EH)
{

View File

@ -35,46 +35,6 @@ gimple_in_ssa_p (const struct function *fun)
return fun && fun->gimple_df && fun->gimple_df->in_ssa_p;
}
/* 'true' after aliases have been computed (see compute_may_aliases). */
static inline bool
gimple_aliases_computed_p (const struct function *fun)
{
gcc_assert (fun && fun->gimple_df);
return fun->gimple_df->aliases_computed_p;
}
/* Addressable variables in the function. If bit I is set, then
REFERENCED_VARS (I) has had its address taken. Note that
CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
addressable variable is not necessarily call-clobbered (e.g., a
local addressable whose address does not escape) and not all
call-clobbered variables are addressable (e.g., a local static
variable). */
static inline bitmap
gimple_addressable_vars (const struct function *fun)
{
gcc_assert (fun && fun->gimple_df);
return fun->gimple_df->addressable_vars;
}
/* Call clobbered variables in the function. If bit I is set, then
REFERENCED_VARS (I) is call-clobbered. */
static inline bitmap
gimple_call_clobbered_vars (const struct function *fun)
{
gcc_assert (fun && fun->gimple_df);
return fun->gimple_df->call_clobbered_vars;
}
/* Call-used variables in the function. If bit I is set, then
REFERENCED_VARS (I) is call-used at pure function call-sites. */
static inline bitmap
gimple_call_used_vars (const struct function *fun)
{
gcc_assert (fun && fun->gimple_df);
return fun->gimple_df->call_used_vars;
}
/* Array of all variables referenced in the function. */
static inline htab_t
gimple_referenced_vars (const struct function *fun)
@ -84,14 +44,6 @@ gimple_referenced_vars (const struct function *fun)
return fun->gimple_df->referenced_vars;
}
/* Artificial variable used to model the effects of function calls. */
static inline tree
gimple_global_var (const struct function *fun)
{
gcc_assert (fun && fun->gimple_df);
return fun->gimple_df->global_var;
}
/* Artificial variable used to model the effects of nonlocal
variables. */
static inline tree
@ -101,6 +53,14 @@ gimple_nonlocal_all (const struct function *fun)
return fun->gimple_df->nonlocal_all;
}
/* Artificial variable used for the virtual operand FUD chain. */
static inline tree
gimple_vop (const struct function *fun)
{
gcc_assert (fun && fun->gimple_df);
return fun->gimple_df->vop;
}
/* Initialize the hashtable iterator HTI to point to hashtable TABLE */
static inline void *
@ -263,14 +223,6 @@ ann_type (tree_ann_t ann)
return ann->common.type;
}
/* Return the may_aliases bitmap for variable VAR, or NULL if it has
no may aliases. */
static inline bitmap
may_aliases (const_tree var)
{
return MTAG_ALIASES (var);
}
/* Return the line number for EXPR, or return -1 if we have no line
number information for it. */
static inline int
@ -592,17 +544,27 @@ set_is_used (tree var)
}
/* Return true if T (assumed to be a DECL) is a global variable. */
/* Return true if T (assumed to be a DECL) is a global variable.
A variable is considered global if its storage is not automatic. */
static inline bool
is_global_var (const_tree t)
{
if (MTAG_P (t))
return MTAG_GLOBAL (t);
else
return (TREE_STATIC (t) || DECL_EXTERNAL (t));
return (TREE_STATIC (t) || DECL_EXTERNAL (t));
}
/* Return true if VAR may be aliased. A variable is considered as
maybe aliased if it has its address taken by the local TU
or possibly by another TU. */
static inline bool
may_be_aliased (const_tree var)
{
return (TREE_PUBLIC (var) || DECL_EXTERNAL (var) || TREE_ADDRESSABLE (var));
}
/* PHI nodes should contain only ssa_names and invariants. A test
for ssa_name is definitely simpler; don't let invalid contents
slip in in the meantime. */
@ -632,77 +594,22 @@ loop_containing_stmt (gimple stmt)
}
/* Return the memory partition tag associated with symbol SYM. */
static inline tree
memory_partition (tree sym)
{
tree tag;
/* MPTs belong to their own partition. */
if (TREE_CODE (sym) == MEMORY_PARTITION_TAG)
return sym;
gcc_assert (!is_gimple_reg (sym));
/* Autoparallelization moves statements from the original function (which has
aliases computed) to the new one (which does not). When rebuilding
operands for the statement in the new function, we do not want to
record the memory partition tags of the original function. */
if (!gimple_aliases_computed_p (cfun))
return NULL_TREE;
tag = get_var_ann (sym)->mpt;
#if defined ENABLE_CHECKING
if (tag)
gcc_assert (TREE_CODE (tag) == MEMORY_PARTITION_TAG);
#endif
return tag;
}
/* Return true if NAME is a memory factoring SSA name (i.e., an SSA
name for a memory partition. */
/* Return true if VAR is clobbered by function calls. */
static inline bool
factoring_name_p (const_tree name)
is_call_clobbered (const_tree var)
{
return TREE_CODE (SSA_NAME_VAR (name)) == MEMORY_PARTITION_TAG;
return (is_global_var (var)
|| (may_be_aliased (var)
&& pt_solution_includes (&cfun->gimple_df->escaped, var)));
}
/* Return true if VAR is used by function calls. */
static inline bool
is_call_used (const_tree var)
{
return (var_ann (var)->call_clobbered
|| bitmap_bit_p (gimple_call_used_vars (cfun), DECL_UID (var)));
}
/* Return true if VAR is clobbered by function calls. */
static inline bool
is_call_clobbered (const_tree var)
{
return var_ann (var)->call_clobbered;
}
/* Mark variable VAR as being clobbered by function calls. */
static inline void
mark_call_clobbered (tree var, unsigned int escape_type)
{
var_ann (var)->escape_mask |= escape_type;
var_ann (var)->call_clobbered = true;
bitmap_set_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
}
/* Clear the call-clobbered attribute from variable VAR. */
static inline void
clear_call_clobbered (tree var)
{
var_ann_t ann = var_ann (var);
ann->escape_mask = 0;
if (MTAG_P (var))
MTAG_GLOBAL (var) = 0;
var_ann (var)->call_clobbered = false;
bitmap_clear_bit (gimple_call_clobbered_vars (cfun), DECL_UID (var));
return (is_call_clobbered (var)
|| (may_be_aliased (var)
&& pt_solution_includes (&cfun->gimple_df->callused, var)));
}
/* Return the common annotation for T. Return NULL if the annotation
@ -751,26 +658,6 @@ op_iter_next_use (ssa_op_iter *ptr)
ptr->uses = ptr->uses->next;
return use_p;
}
if (ptr->vuses)
{
use_p = VUSE_OP_PTR (ptr->vuses, ptr->vuse_index);
if (++(ptr->vuse_index) >= VUSE_NUM (ptr->vuses))
{
ptr->vuse_index = 0;
ptr->vuses = ptr->vuses->next;
}
return use_p;
}
if (ptr->mayuses)
{
use_p = VDEF_OP_PTR (ptr->mayuses, ptr->mayuse_index);
if (++(ptr->mayuse_index) >= VDEF_NUM (ptr->mayuses))
{
ptr->mayuse_index = 0;
ptr->mayuses = ptr->mayuses->next;
}
return use_p;
}
if (ptr->phi_i < ptr->num_phi)
{
return PHI_ARG_DEF_PTR (ptr->phi_stmt, (ptr->phi_i)++);
@ -793,12 +680,6 @@ op_iter_next_def (ssa_op_iter *ptr)
ptr->defs = ptr->defs->next;
return def_p;
}
if (ptr->vdefs)
{
def_p = VDEF_RESULT_PTR (ptr->vdefs);
ptr->vdefs = ptr->vdefs->next;
return def_p;
}
ptr->done = true;
return NULL_DEF_OPERAND_P;
}
@ -817,38 +698,12 @@ op_iter_next_tree (ssa_op_iter *ptr)
ptr->uses = ptr->uses->next;
return val;
}
if (ptr->vuses)
{
val = VUSE_OP (ptr->vuses, ptr->vuse_index);
if (++(ptr->vuse_index) >= VUSE_NUM (ptr->vuses))
{
ptr->vuse_index = 0;
ptr->vuses = ptr->vuses->next;
}
return val;
}
if (ptr->mayuses)
{
val = VDEF_OP (ptr->mayuses, ptr->mayuse_index);
if (++(ptr->mayuse_index) >= VDEF_NUM (ptr->mayuses))
{
ptr->mayuse_index = 0;
ptr->mayuses = ptr->mayuses->next;
}
return val;
}
if (ptr->defs)
{
val = DEF_OP (ptr->defs);
ptr->defs = ptr->defs->next;
return val;
}
if (ptr->vdefs)
{
val = VDEF_RESULT (ptr->vdefs);
ptr->vdefs = ptr->vdefs->next;
return val;
}
ptr->done = true;
return NULL_TREE;
@ -865,34 +720,36 @@ clear_and_done_ssa_iter (ssa_op_iter *ptr)
{
ptr->defs = NULL;
ptr->uses = NULL;
ptr->vuses = NULL;
ptr->vdefs = NULL;
ptr->mayuses = NULL;
ptr->iter_type = ssa_op_iter_none;
ptr->phi_i = 0;
ptr->num_phi = 0;
ptr->phi_stmt = NULL;
ptr->done = true;
ptr->vuse_index = 0;
ptr->mayuse_index = 0;
}
/* Initialize the iterator PTR to the virtual defs in STMT. */
static inline void
op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
{
ptr->defs = (flags & SSA_OP_DEF) ? gimple_def_ops (stmt) : NULL;
ptr->uses = (flags & SSA_OP_USE) ? gimple_use_ops (stmt) : NULL;
ptr->vuses = (flags & SSA_OP_VUSE) ? gimple_vuse_ops (stmt) : NULL;
ptr->vdefs = (flags & SSA_OP_VDEF) ? gimple_vdef_ops (stmt) : NULL;
ptr->mayuses = (flags & SSA_OP_VMAYUSE) ? gimple_vdef_ops (stmt) : NULL;
/* We do not support iterating over virtual defs or uses without
iterating over defs or uses at the same time. */
gcc_assert ((!(flags & SSA_OP_VDEF) || (flags & SSA_OP_DEF))
&& (!(flags & SSA_OP_VUSE) || (flags & SSA_OP_USE)));
ptr->defs = (flags & (SSA_OP_DEF|SSA_OP_VDEF)) ? gimple_def_ops (stmt) : NULL;
if (!(flags & SSA_OP_VDEF)
&& ptr->defs
&& gimple_vdef (stmt) != NULL_TREE)
ptr->defs = ptr->defs->next;
ptr->uses = (flags & (SSA_OP_USE|SSA_OP_VUSE)) ? gimple_use_ops (stmt) : NULL;
if (!(flags & SSA_OP_VUSE)
&& ptr->uses
&& gimple_vuse (stmt) != NULL_TREE)
ptr->uses = ptr->uses->next;
ptr->done = false;
ptr->phi_i = 0;
ptr->num_phi = 0;
ptr->phi_stmt = NULL;
ptr->vuse_index = 0;
ptr->mayuse_index = 0;
}
/* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
@ -900,7 +757,8 @@ op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
static inline use_operand_p
op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags)
{
gcc_assert ((flags & SSA_OP_ALL_DEFS) == 0);
gcc_assert ((flags & SSA_OP_ALL_DEFS) == 0
&& (flags & SSA_OP_USE));
op_iter_init (ptr, stmt, flags);
ptr->iter_type = ssa_op_iter_use;
return op_iter_next_use (ptr);
@ -911,7 +769,8 @@ op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags)
static inline def_operand_p
op_iter_init_def (ssa_op_iter *ptr, gimple stmt, int flags)
{
gcc_assert ((flags & SSA_OP_ALL_USES) == 0);
gcc_assert ((flags & SSA_OP_ALL_USES) == 0
&& (flags & SSA_OP_DEF));
op_iter_init (ptr, stmt, flags);
ptr->iter_type = ssa_op_iter_def;
return op_iter_next_def (ptr);
@ -927,58 +786,6 @@ op_iter_init_tree (ssa_op_iter *ptr, gimple stmt, int flags)
return op_iter_next_tree (ptr);
}
/* Get the next iterator mustdef value for PTR, returning the mustdef values in
KILL and DEF. */
static inline void
op_iter_next_vdef (vuse_vec_p *use, def_operand_p *def,
ssa_op_iter *ptr)
{
#ifdef ENABLE_CHECKING
gcc_assert (ptr->iter_type == ssa_op_iter_vdef);
#endif
if (ptr->mayuses)
{
*def = VDEF_RESULT_PTR (ptr->mayuses);
*use = VDEF_VECT (ptr->mayuses);
ptr->mayuses = ptr->mayuses->next;
return;
}
*def = NULL_DEF_OPERAND_P;
*use = NULL;
ptr->done = true;
return;
}
static inline void
op_iter_next_mustdef (use_operand_p *use, def_operand_p *def,
ssa_op_iter *ptr)
{
vuse_vec_p vp;
op_iter_next_vdef (&vp, def, ptr);
if (vp != NULL)
{
gcc_assert (VUSE_VECT_NUM_ELEM (*vp) == 1);
*use = VUSE_ELEMENT_PTR (*vp, 0);
}
else
*use = NULL_USE_OPERAND_P;
}
/* Initialize iterator PTR to the operands in STMT. Return the first operands
in USE and DEF. */
static inline void
op_iter_init_vdef (ssa_op_iter *ptr, gimple stmt, vuse_vec_p *use,
def_operand_p *def)
{
gcc_assert (gimple_code (stmt) != GIMPLE_PHI);
op_iter_init (ptr, stmt, SSA_OP_VMAYUSE);
ptr->iter_type = ssa_op_iter_vdef;
op_iter_next_vdef (use, def, ptr);
}
/* If there is a single operand in STMT matching FLAGS, return it. Otherwise
return NULL. */
@ -1074,52 +881,6 @@ delink_stmt_imm_use (gimple stmt)
}
/* This routine will compare all the operands matching FLAGS in STMT1 to those
in STMT2. TRUE is returned if they are the same. STMTs can be NULL. */
static inline bool
compare_ssa_operands_equal (gimple stmt1, gimple stmt2, int flags)
{
ssa_op_iter iter1, iter2;
tree op1 = NULL_TREE;
tree op2 = NULL_TREE;
bool look1, look2;
if (stmt1 == stmt2)
return true;
look1 = stmt1 != NULL;
look2 = stmt2 != NULL;
if (look1)
{
op1 = op_iter_init_tree (&iter1, stmt1, flags);
if (!look2)
return op_iter_done (&iter1);
}
else
clear_and_done_ssa_iter (&iter1);
if (look2)
{
op2 = op_iter_init_tree (&iter2, stmt2, flags);
if (!look1)
return op_iter_done (&iter2);
}
else
clear_and_done_ssa_iter (&iter2);
while (!op_iter_done (&iter1) && !op_iter_done (&iter2))
{
if (op1 != op2)
return false;
op1 = op_iter_next_tree (&iter1);
op2 = op_iter_next_tree (&iter2);
}
return (op_iter_done (&iter1) && op_iter_done (&iter2));
}
/* If there is a single DEF in the PHI node which matches FLAG, return it.
Otherwise return NULL_DEF_OPERAND_P. */
static inline tree
@ -1177,11 +938,12 @@ op_iter_init_phidef (ssa_op_iter *ptr, gimple phi, int flags)
comp = (is_gimple_reg (phi_def) ? SSA_OP_DEF : SSA_OP_VIRTUAL_DEFS);
/* If the PHI node doesn't the operand type we care about, we're done. */
/* If the PHI node doesn't have the operand type we care about,
we're done. */
if ((flags & comp) == 0)
{
ptr->done = true;
return NULL_USE_OPERAND_P;
return NULL_DEF_OPERAND_P;
}
ptr->iter_type = ssa_op_iter_def;
@ -1261,9 +1023,17 @@ link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
}
else
{
FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag)
if (USE_FROM_PTR (use_p) == use)
last_p = move_use_after_head (use_p, head, last_p);
if (flag == SSA_OP_USE)
{
FOR_EACH_SSA_USE_OPERAND (use_p, head_stmt, op_iter, flag)
if (USE_FROM_PTR (use_p) == use)
last_p = move_use_after_head (use_p, head, last_p);
}
else if ((use_p = gimple_vuse_op (head_stmt)) != NULL_USE_OPERAND_P)
{
if (USE_FROM_PTR (use_p) == use)
last_p = move_use_after_head (use_p, head, last_p);
}
}
/* Link iter node in after last_p. */
if (imm->iter_node.prev != NULL)
@ -1287,7 +1057,7 @@ first_imm_use_stmt (imm_use_iterator *imm, tree var)
imm->iter_node.prev = NULL_USE_OPERAND_P;
imm->iter_node.next = NULL_USE_OPERAND_P;
imm->iter_node.loc.stmt = NULL;
imm->iter_node.use = NULL_USE_OPERAND_P;
imm->iter_node.use = NULL;
if (end_imm_use_stmt_p (imm))
return NULL;
@ -1355,9 +1125,6 @@ unmodifiable_var_p (const_tree var)
if (TREE_CODE (var) == SSA_NAME)
var = SSA_NAME_VAR (var);
if (MTAG_P (var))
return false;
return TREE_READONLY (var) && (TREE_STATIC (var) || DECL_EXTERNAL (var));
}
@ -1414,35 +1181,6 @@ ranges_overlap_p (unsigned HOST_WIDE_INT pos1,
return false;
}
/* Return the memory tag associated with symbol SYM. */
static inline tree
symbol_mem_tag (tree sym)
{
tree tag = get_var_ann (sym)->symbol_mem_tag;
#if defined ENABLE_CHECKING
if (tag)
gcc_assert (TREE_CODE (tag) == SYMBOL_MEMORY_TAG);
#endif
return tag;
}
/* Set the memory tag associated with symbol SYM. */
static inline void
set_symbol_mem_tag (tree sym, tree tag)
{
#if defined ENABLE_CHECKING
if (tag)
gcc_assert (TREE_CODE (tag) == SYMBOL_MEMORY_TAG);
#endif
get_var_ann (sym)->symbol_mem_tag = tag;
}
/* Accessor to tree-ssa-operands.c caches. */
static inline struct ssa_operands *
gimple_ssa_operands (const struct function *fun)
@ -1450,13 +1188,6 @@ gimple_ssa_operands (const struct function *fun)
return &fun->gimple_df->ssa_operands;
}
/* Map describing reference statistics for function FN. */
static inline struct mem_ref_stats_d *
gimple_mem_ref_stats (const struct function *fn)
{
return &fn->gimple_df->mem_ref_stats;
}
/* Given an edge_var_map V, return the PHI arg definition. */
static inline tree

View File

@ -30,6 +30,7 @@ along with GCC; see the file COPYING3. If not see
#include "tree-ssa-operands.h"
#include "cgraph.h"
#include "ipa-reference.h"
#include "tree-ssa-alias.h"
/* Forward declare structures for the garbage collector GTY markers. */
#ifndef GCC_BASIC_BLOCK_H
@ -40,98 +41,6 @@ typedef struct basic_block_def *basic_block;
#endif
struct static_var_ann_d;
/* The reasons a variable may escape a function. */
enum escape_type
{
NO_ESCAPE = 0, /* Doesn't escape. */
ESCAPE_STORED_IN_GLOBAL = 1 << 0,
ESCAPE_TO_ASM = 1 << 1, /* Passed by address to an assembly
statement. */
ESCAPE_TO_CALL = 1 << 2, /* Escapes to a function call. */
ESCAPE_BAD_CAST = 1 << 3, /* Cast from pointer to integer */
ESCAPE_TO_RETURN = 1 << 4, /* Returned from function. */
ESCAPE_TO_PURE_CONST = 1 << 5, /* Escapes to a pure or constant
function call. */
ESCAPE_IS_GLOBAL = 1 << 6, /* Is a global variable. */
ESCAPE_IS_PARM = 1 << 7, /* Is an incoming function argument. */
ESCAPE_UNKNOWN = 1 << 8 /* We believe it escapes for
some reason not enumerated
above. */
};
/* Memory reference statistics for individual memory symbols,
collected during alias analysis. */
struct mem_sym_stats_d GTY(())
{
/* Memory symbol. */
tree var;
/* Nonzero if this entry has been assigned a partition. */
unsigned int partitioned_p : 1;
/* Nonzero if VAR is a memory partition tag that already contains
call-clobbered variables in its partition set. */
unsigned int has_call_clobbered_vars : 1;
/* Number of direct reference sites. A direct reference to VAR is any
reference of the form 'VAR = ' or ' = VAR'. For GIMPLE reg
pointers, this is the number of sites where the pointer is
dereferenced. */
long num_direct_writes;
long num_direct_reads;
/* Number of indirect reference sites. An indirect reference to VAR
is any reference via a pointer that contains VAR in its points-to
set or, in the case of call-clobbered symbols, a function call. */
long num_indirect_writes;
long num_indirect_reads;
/* Execution frequency. This is the sum of the execution
frequencies of all the statements that reference this object
weighted by the number of references in each statement. This is
the main key used to sort the list of symbols to partition.
Symbols with high execution frequencies are put at the bottom of
the work list (ie, they are partitioned last).
Execution frequencies are taken directly from each basic block,
so compiling with PGO enabled will increase the precision of this
estimate. */
long frequency_reads;
long frequency_writes;
/* Set of memory tags that contain VAR in their alias set. */
bitmap parent_tags;
};
typedef struct mem_sym_stats_d *mem_sym_stats_t;
DEF_VEC_P(mem_sym_stats_t);
DEF_VEC_ALLOC_P(mem_sym_stats_t, heap);
/* Memory reference statistics collected during alias analysis. */
struct mem_ref_stats_d GTY(())
{
/* Number of statements that make memory references. */
long num_mem_stmts;
/* Number of statements that make function calls. */
long num_call_sites;
/* Number of statements that make calls to pure/const functions. */
long num_pure_const_call_sites;
/* Number of ASM statements. */
long num_asm_sites;
/* Estimated number of virtual operands needed as computed by
compute_memory_partitions. */
long num_vuses;
long num_vdefs;
/* This maps every symbol used to make "memory" references
(pointers, arrays, structures, etc) to an instance of struct
mem_sym_stats_d describing reference statistics for the symbol. */
struct pointer_map_t * GTY((skip)) mem_sym_stats;
};
/* Gimple dataflow datastructure. All publicly available fields shall have
gimple_ accessor defined in tree-flow-inline.h, all publicly modifiable
@ -151,29 +60,18 @@ struct gimple_df GTY(())
/* Array of all SSA_NAMEs used in the function. */
VEC(tree,gc) *ssa_names;
/* Artificial variable used to model the effects of function calls. */
tree global_var;
/* Artificial variable used for the virtual operand FUD chain. */
tree vop;
/* Artificial variable used to model the effects of nonlocal
variables. */
tree nonlocal_all;
/* Call clobbered variables in the function. If bit I is set, then
REFERENCED_VARS (I) is call-clobbered. */
bitmap call_clobbered_vars;
/* The PTA solution for the ESCAPED artificial variable. */
struct pt_solution escaped;
/* Call-used variables in the function. If bit I is set, then
REFERENCED_VARS (I) is call-used at pure function call-sites. */
bitmap call_used_vars;
/* Addressable variables in the function. If bit I is set, then
REFERENCED_VARS (I) has had its address taken. Note that
CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An
addressable variable is not necessarily call-clobbered (e.g., a
local addressable whose address does not escape) and not all
call-clobbered variables are addressable (e.g., a local static
variable). */
bitmap addressable_vars;
/* The PTA solution for the CALLUSED artificial variable. */
struct pt_solution callused;
/* Free list of SSA_NAMEs. */
tree free_ssanames;
@ -184,18 +82,14 @@ struct gimple_df GTY(())
for this variable with an empty defining statement. */
htab_t GTY((param_is (union tree_node))) default_defs;
/* 'true' after aliases have been computed (see compute_may_aliases). */
unsigned int aliases_computed_p : 1;
/* Symbols whose SSA form needs to be updated or created for the first
time. */
bitmap syms_to_rename;
/* True if the code is in ssa form. */
unsigned int in_ssa_p : 1;
struct ssa_operands ssa_operands;
/* Memory reference statistics collected during alias analysis.
This information is used to drive the memory partitioning
heuristics in compute_memory_partitions. */
struct mem_ref_stats_d mem_ref_stats;
};
/* Accessors for internal use only. Generic code should use abstraction
@ -204,6 +98,7 @@ struct gimple_df GTY(())
#define SSANAMES(fun) (fun)->gimple_df->ssa_names
#define MODIFIED_NORETURN_CALLS(fun) (fun)->gimple_df->modified_noreturn_calls
#define DEFAULT_DEFS(fun) (fun)->gimple_df->default_defs
#define SYMS_TO_RENAME(fun) (fun)->gimple_df->syms_to_rename
typedef struct
{
@ -231,37 +126,8 @@ typedef struct
/* Aliasing information for SSA_NAMEs representing pointer variables. */
struct ptr_info_def GTY(())
{
/* Mask of reasons this pointer's value escapes the function. */
ENUM_BITFIELD (escape_type) escape_mask : 9;
/* Nonzero if points-to analysis couldn't determine where this pointer
is pointing to. */
unsigned int pt_anything : 1;
/* Nonzero if the value of this pointer escapes the current function. */
unsigned int value_escapes_p : 1;
/* Nonzero if a memory tag is needed for this pointer. This is
true if this pointer is eventually dereferenced. */
unsigned int memory_tag_needed : 1;
/* Nonzero if this pointer is really dereferenced. */
unsigned int is_dereferenced : 1;
/* Nonzero if this pointer points to a global variable. */
unsigned int pt_global_mem : 1;
/* Nonzero if this pointer points to NULL. */
unsigned int pt_null : 1;
/* Set of variables that this pointer may point to. */
bitmap pt_vars;
/* If this pointer has been dereferenced, and points-to information is
more precise than type-based aliasing, indirect references to this
pointer will be represented by this memory tag, instead of the type
tag computed by TBAA. */
tree name_mem_tag;
/* The points-to solution, TBAA-pruned if the pointer is dereferenced. */
struct pt_solution pt;
};
@ -359,42 +225,15 @@ struct var_ann_d GTY(())
states. */
ENUM_BITFIELD (need_phi_state) need_phi_state : 2;
/* Used during operand processing to determine if this variable is already
in the VUSE list. */
unsigned in_vuse_list : 1;
/* Used during operand processing to determine if this variable is already
in the VDEF list. */
unsigned in_vdef_list : 1;
/* True for HEAP artificial variables. These variables represent
the memory area allocated by a call to malloc. */
unsigned is_heapvar : 1;
/* True if the variable is call clobbered. */
unsigned call_clobbered : 1;
/* This field describes several "no alias" attributes that some
symbols are known to have. See the enum's definition for more
information on each attribute. */
ENUM_BITFIELD (noalias_state) noalias_state : 2;
/* Mask of values saying the reasons why this variable has escaped
the function. */
ENUM_BITFIELD (escape_type) escape_mask : 9;
/* Memory partition tag assigned to this symbol. */
tree mpt;
/* If this variable is a pointer P that has been dereferenced, this
field is an artificial variable that represents the memory
location *P. Every other pointer Q that is type-compatible with
P will also have the same memory tag. If the variable is not a
pointer or if it is never dereferenced, this must be NULL.
FIXME, do we really need this here? How much slower would it be
to convert to hash table? */
tree symbol_mem_tag;
/* Used when going out of SSA form to indicate which partition this
variable represents storage for. */
unsigned partition;
@ -535,7 +374,6 @@ static inline function_ann_t function_ann (const_tree);
static inline function_ann_t get_function_ann (tree);
static inline enum tree_ann_type ann_type (tree_ann_t);
static inline void update_stmt (gimple);
static inline bitmap may_aliases (const_tree);
static inline int get_lineno (const_gimple);
/*---------------------------------------------------------------------------
@ -776,10 +614,8 @@ extern tree make_rename_temp (tree, const char *);
extern void set_default_def (tree, tree);
extern tree gimple_default_def (struct function *, tree);
extern bool stmt_references_abnormal_ssa_name (gimple);
extern bool refs_may_alias_p (tree, tree);
extern gimple get_single_def_stmt (gimple);
extern gimple get_single_def_stmt_from_phi (tree, gimple);
extern gimple get_single_def_stmt_with_phi (tree, gimple);
extern tree get_ref_base_and_extent (tree, HOST_WIDE_INT *,
HOST_WIDE_INT *, HOST_WIDE_INT *);
/* In tree-phinodes.c */
extern void reserve_phi_args_for_new_edge (basic_block);
@ -804,43 +640,6 @@ extern bool block_may_fallthru (const_tree);
extern bool gimple_seq_may_fallthru (gimple_seq);
extern bool gimple_stmt_may_fallthru (gimple);
/* In tree-ssa-alias.c */
extern unsigned int compute_may_aliases (void);
extern void dump_may_aliases_for (FILE *, tree);
extern void debug_may_aliases_for (tree);
extern void dump_alias_info (FILE *);
extern void debug_alias_info (void);
extern void dump_points_to_info (FILE *);
extern void debug_points_to_info (void);
extern void dump_points_to_info_for (FILE *, tree);
extern void debug_points_to_info_for (tree);
extern bool may_be_aliased (tree);
extern bool may_alias_p (tree, alias_set_type, tree, alias_set_type, bool);
extern struct ptr_info_def *get_ptr_info (tree);
extern bool may_point_to_global_var (tree);
extern void new_type_alias (tree, tree, tree);
extern void count_uses_and_derefs (tree, gimple, unsigned *, unsigned *,
unsigned *);
static inline bool ref_contains_array_ref (const_tree);
static inline bool array_ref_contains_indirect_ref (const_tree);
extern tree get_ref_base_and_extent (tree, HOST_WIDE_INT *,
HOST_WIDE_INT *, HOST_WIDE_INT *);
extern tree create_tag_raw (enum tree_code, tree, const char *);
extern void delete_mem_ref_stats (struct function *);
extern void dump_mem_ref_stats (FILE *);
extern void debug_mem_ref_stats (void);
extern void debug_memory_partitions (void);
extern void debug_mem_sym_stats (tree var);
extern void dump_mem_sym_stats_for_var (FILE *, tree);
extern void debug_all_mem_sym_stats (void);
/* Call-back function for walk_use_def_chains(). At each reaching
definition, a function with this prototype is called. */
typedef bool (*walk_use_def_chains_fn) (tree, gimple, void *);
/* In tree-ssa-alias-warnings.c */
extern void strict_aliasing_warning_backend (void);
/* In tree-ssa.c */
@ -869,8 +668,14 @@ extern edge ssa_redirect_edge (edge, basic_block);
extern void flush_pending_stmts (edge);
extern void verify_ssa (bool);
extern void delete_tree_ssa (void);
extern void walk_use_def_chains (tree, walk_use_def_chains_fn, void *, bool);
extern bool ssa_undefined_value_p (tree);
extern void execute_update_addresses_taken (bool);
/* Call-back function for walk_use_def_chains(). At each reaching
definition, a function with this prototype is called. */
typedef bool (*walk_use_def_chains_fn) (tree, gimple, void *);
extern void walk_use_def_chains (tree, walk_use_def_chains_fn, void *, bool);
/* In tree-into-ssa.c */
@ -878,7 +683,7 @@ void update_ssa (unsigned);
void delete_update_ssa (void);
void register_new_name_mapping (tree, tree);
tree create_new_def_for (tree, gimple, def_operand_p);
bool need_ssa_update_p (void);
bool need_ssa_update_p (struct function *);
bool name_mappings_registered_p (void);
bool name_registered_for_update_p (tree);
bitmap ssa_names_to_replace (void);
@ -1065,9 +870,10 @@ char *get_lsm_tmp_name (tree, unsigned);
/* In tree-flow-inline.h */
static inline bool is_call_clobbered (const_tree);
static inline void mark_call_clobbered (tree, unsigned int);
static inline void set_is_used (tree);
static inline bool unmodifiable_var_p (const_tree);
static inline bool ref_contains_array_ref (const_tree);
static inline bool array_ref_contains_indirect_ref (const_tree);
/* In tree-eh.c */
extern void make_eh_edges (gimple);
@ -1149,11 +955,6 @@ tree force_gimple_operand_gsi (gimple_stmt_iterator *, tree, bool, tree,
tree gimple_fold_indirect_ref (tree);
void mark_addressable (tree);
/* In tree-ssa-structalias.c */
bool find_what_p_points_to (tree);
bool clobber_what_escaped (void);
void compute_call_used_vars (void);
/* In tree-ssa-live.c */
extern void remove_unused_locals (void);
extern void dump_scope_blocks (FILE *, int);
@ -1174,8 +975,6 @@ rtx addr_for_mem_ref (struct mem_address *, bool);
void get_address_description (tree, struct mem_address *);
tree maybe_fold_tmr (tree);
void init_alias_heapvars (void);
void delete_alias_heapvars (void);
unsigned int execute_fixup_cfg (void);
#include "tree-flow-inline.h"

View File

@ -1297,6 +1297,14 @@ remap_gimple_stmt (gimple stmt, copy_body_data *id)
else
walk_gimple_op (copy, remap_gimple_op_r, &wi);
/* Clear the copied virtual operands. We are not remapping them here
but are going to recreate them from scratch. */
if (gimple_has_mem_ops (copy))
{
gimple_set_vdef (copy, NULL_TREE);
gimple_set_vuse (copy, NULL_TREE);
}
/* We have to handle EH region remapping of GIMPLE_RESX specially because
the region number is not an operand. */
if (gimple_code (stmt) == GIMPLE_RESX && id->eh_region_offset)
@ -3410,6 +3418,9 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
pointer_map_destroy (id->decl_map);
id->decl_map = st;
/* Unlink the calls virtual operands before replacing it. */
unlink_stmt_vdef (stmt);
/* If the inlined function returns a result that we care about,
substitute the GIMPLE_CALL with an assignment of the return
variable to the LHS of the call. That is, if STMT was
@ -3420,10 +3431,7 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
gsi_replace (&stmt_gsi, stmt, false);
if (gimple_in_ssa_p (cfun))
{
update_stmt (stmt);
mark_symbols_for_renaming (stmt);
}
mark_symbols_for_renaming (stmt);
maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
}
else
@ -3443,7 +3451,6 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
undefined via a move. */
stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
gsi_replace (&stmt_gsi, stmt, true);
update_stmt (stmt);
}
else
{
@ -4451,28 +4458,16 @@ tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
/* Clean up. */
pointer_map_destroy (id.decl_map);
free_dominance_info (CDI_DOMINATORS);
free_dominance_info (CDI_POST_DOMINATORS);
if (!update_clones)
{
fold_marked_statements (0, id.statements_to_fold);
pointer_set_destroy (id.statements_to_fold);
fold_cond_expr_cond ();
}
if (gimple_in_ssa_p (cfun))
{
free_dominance_info (CDI_DOMINATORS);
free_dominance_info (CDI_POST_DOMINATORS);
if (!update_clones)
delete_unreachable_blocks ();
delete_unreachable_blocks ();
update_ssa (TODO_update_ssa);
if (!update_clones)
{
fold_cond_expr_cond ();
if (need_ssa_update_p ())
update_ssa (TODO_update_ssa);
}
}
free_dominance_info (CDI_DOMINATORS);
free_dominance_info (CDI_POST_DOMINATORS);
VEC_free (gimple, heap, init_stmts);
pop_cfun ();
current_function_decl = old_current_function_decl;

View File

@ -113,10 +113,6 @@ static sbitmap old_ssa_names;
static sbitmap new_ssa_names;
/* Symbols whose SSA form needs to be updated or created for the first
time. */
static bitmap syms_to_rename;
/* Subset of SYMS_TO_RENAME. Contains all the GIMPLE register symbols
that have been marked for renaming. */
static bitmap regs_to_rename;
@ -155,12 +151,9 @@ struct repl_map_d
then REPL_TBL[N_i] = { O_1, O_2, ..., O_j }. */
static htab_t repl_tbl;
/* true if register_new_name_mapping needs to initialize the data
structures needed by update_ssa. */
static bool need_to_initialize_update_ssa_p = true;
/* true if update_ssa needs to update virtual operands. */
static bool need_to_update_vops_p = false;
/* The function the SSA updating data structures have been initialized for.
NULL if they need to be initialized by register_new_name_mapping. */
static struct function *update_ssa_initialized_fn = NULL;
/* Statistics kept by update_ssa to use in the virtual mapping
heuristic. If the number of virtual mappings is beyond certain
@ -585,7 +578,7 @@ set_livein_block (tree var, basic_block bb)
static inline bool
symbol_marked_for_renaming (tree sym)
{
return bitmap_bit_p (syms_to_rename, DECL_UID (sym));
return bitmap_bit_p (SYMS_TO_RENAME (cfun), DECL_UID (sym));
}
@ -595,6 +588,8 @@ static inline bool
is_old_name (tree name)
{
unsigned ver = SSA_NAME_VERSION (name);
if (!new_ssa_names)
return false;
return ver < new_ssa_names->n_bits && TEST_BIT (old_ssa_names, ver);
}
@ -605,6 +600,8 @@ static inline bool
is_new_name (tree name)
{
unsigned ver = SSA_NAME_VERSION (name);
if (!new_ssa_names)
return false;
return ver < new_ssa_names->n_bits && TEST_BIT (new_ssa_names, ver);
}
@ -695,8 +692,6 @@ add_new_name_mapping (tree new_tree, tree old)
{
tree sym;
need_to_update_vops_p = true;
update_ssa_stats.num_virtual_mappings++;
update_ssa_stats.num_virtual_symbols++;
@ -1455,10 +1450,10 @@ dump_decl_set (FILE *file, bitmap set)
fprintf (file, " ");
}
fprintf (file, "}\n");
fprintf (file, "}");
}
else
fprintf (file, "NIL\n");
fprintf (file, "NIL");
}
@ -1468,6 +1463,7 @@ void
debug_decl_set (bitmap set)
{
dump_decl_set (stderr, set);
fprintf (stderr, "\n");
}
@ -1551,7 +1547,8 @@ dump_currdefs (FILE *file)
fprintf (file, "\n\nCurrent reaching definitions\n\n");
FOR_EACH_REFERENCED_VAR (var, i)
if (syms_to_rename == NULL || bitmap_bit_p (syms_to_rename, DECL_UID (var)))
if (SYMS_TO_RENAME (cfun) == NULL
|| bitmap_bit_p (SYMS_TO_RENAME (cfun), DECL_UID (var)))
{
fprintf (file, "CURRDEF (");
print_generic_expr (file, var, 0);
@ -1943,27 +1940,15 @@ rewrite_update_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
/* Rewrite USES included in OLD_SSA_NAMES and USES whose underlying
symbol is marked for renaming. */
if (rewrite_uses_p (stmt))
{
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
maybe_replace_use (use_p);
if (need_to_update_vops_p)
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_VIRTUAL_USES)
maybe_replace_use (use_p);
}
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
maybe_replace_use (use_p);
/* Register definitions of names in NEW_SSA_NAMES and OLD_SSA_NAMES.
Also register definitions for names whose underlying symbol is
marked for renaming. */
if (register_defs_p (stmt))
{
FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
maybe_register_def (def_p, stmt);
if (need_to_update_vops_p)
FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_VIRTUAL_DEFS)
maybe_register_def (def_p, stmt);
}
FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_ALL_DEFS)
maybe_register_def (def_p, stmt);
}
@ -2293,6 +2278,7 @@ struct gimple_opt_pass pass_build_ssa =
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func
| TODO_update_ssa_only_virtuals
| TODO_verify_ssa
| TODO_remove_unused_locals /* todo_flags_finish */
}
@ -2577,7 +2563,7 @@ dump_update_ssa (FILE *file)
unsigned i = 0;
bitmap_iterator bi;
if (!need_ssa_update_p ())
if (!need_ssa_update_p (cfun))
return;
if (new_ssa_names && sbitmap_first_set_bit (new_ssa_names) >= 0)
@ -2604,10 +2590,11 @@ dump_update_ssa (FILE *file)
update_ssa_stats.num_virtual_symbols);
}
if (syms_to_rename && !bitmap_empty_p (syms_to_rename))
if (!bitmap_empty_p (SYMS_TO_RENAME (cfun)))
{
fprintf (file, "\n\nSymbols to be put in SSA form\n\n");
dump_decl_set (file, syms_to_rename);
dump_decl_set (file, SYMS_TO_RENAME (cfun));
fprintf (file, "\n");
}
if (names_to_release && !bitmap_empty_p (names_to_release))
@ -2636,7 +2623,7 @@ debug_update_ssa (void)
/* Initialize data structures used for incremental SSA updates. */
static void
init_update_ssa (void)
init_update_ssa (struct function *fn)
{
/* Reserve more space than the current number of names. The calls to
add_new_name_mapping are typically done after creating new SSA
@ -2648,14 +2635,12 @@ init_update_ssa (void)
sbitmap_zero (new_ssa_names);
repl_tbl = htab_create (20, repl_map_hash, repl_map_eq, repl_map_free);
need_to_initialize_update_ssa_p = false;
need_to_update_vops_p = false;
syms_to_rename = BITMAP_ALLOC (NULL);
regs_to_rename = BITMAP_ALLOC (NULL);
mem_syms_to_rename = BITMAP_ALLOC (NULL);
names_to_release = NULL;
memset (&update_ssa_stats, 0, sizeof (update_ssa_stats));
update_ssa_stats.virtual_symbols = BITMAP_ALLOC (NULL);
update_ssa_initialized_fn = fn;
}
@ -2676,9 +2661,7 @@ delete_update_ssa (void)
htab_delete (repl_tbl);
repl_tbl = NULL;
need_to_initialize_update_ssa_p = true;
need_to_update_vops_p = false;
BITMAP_FREE (syms_to_rename);
bitmap_clear (SYMS_TO_RENAME (update_ssa_initialized_fn));
BITMAP_FREE (regs_to_rename);
BITMAP_FREE (mem_syms_to_rename);
BITMAP_FREE (update_ssa_stats.virtual_symbols);
@ -2705,6 +2688,7 @@ delete_update_ssa (void)
BITMAP_FREE (blocks_with_phis_to_rewrite);
BITMAP_FREE (blocks_to_update);
update_ssa_initialized_fn = NULL;
}
@ -2751,12 +2735,14 @@ create_new_def_for (tree old_name, gimple stmt, def_operand_p def)
update_ssa. */
void
register_new_name_mapping (tree new_Tree ATTRIBUTE_UNUSED, tree old ATTRIBUTE_UNUSED)
register_new_name_mapping (tree new_tree, tree old)
{
if (need_to_initialize_update_ssa_p)
init_update_ssa ();
if (!update_ssa_initialized_fn)
init_update_ssa (cfun);
add_new_name_mapping (new_Tree, old);
gcc_assert (update_ssa_initialized_fn == cfun);
add_new_name_mapping (new_tree, old);
}
@ -2765,17 +2751,7 @@ register_new_name_mapping (tree new_Tree ATTRIBUTE_UNUSED, tree old ATTRIBUTE_UN
void
mark_sym_for_renaming (tree sym)
{
if (need_to_initialize_update_ssa_p)
init_update_ssa ();
bitmap_set_bit (syms_to_rename, DECL_UID (sym));
if (!is_gimple_reg (sym))
{
need_to_update_vops_p = true;
if (memory_partition (sym))
bitmap_set_bit (syms_to_rename, DECL_UID (memory_partition (sym)));
}
bitmap_set_bit (SYMS_TO_RENAME (cfun), DECL_UID (sym));
}
@ -2790,20 +2766,21 @@ mark_set_for_renaming (bitmap set)
if (set == NULL || bitmap_empty_p (set))
return;
if (need_to_initialize_update_ssa_p)
init_update_ssa ();
EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
mark_sym_for_renaming (referenced_var (i));
}
/* Return true if there is any work to be done by update_ssa. */
/* Return true if there is any work to be done by update_ssa
for function FN. */
bool
need_ssa_update_p (void)
need_ssa_update_p (struct function *fn)
{
return syms_to_rename || old_ssa_names || new_ssa_names;
gcc_assert (fn != NULL);
return (update_ssa_initialized_fn == fn
|| (fn->gimple_df
&& !bitmap_empty_p (SYMS_TO_RENAME (fn))));
}
/* Return true if SSA name mappings have been registered for SSA updating. */
@ -2811,6 +2788,11 @@ need_ssa_update_p (void)
bool
name_mappings_registered_p (void)
{
if (!update_ssa_initialized_fn)
return false;
gcc_assert (update_ssa_initialized_fn == cfun);
return repl_tbl && htab_elements (repl_tbl) > 0;
}
@ -2819,12 +2801,12 @@ name_mappings_registered_p (void)
bool
name_registered_for_update_p (tree n ATTRIBUTE_UNUSED)
{
if (!need_ssa_update_p ())
if (!update_ssa_initialized_fn)
return false;
return is_new_name (n)
|| is_old_name (n)
|| symbol_marked_for_renaming (SSA_NAME_VAR (n));
gcc_assert (update_ssa_initialized_fn == cfun);
return is_new_name (n) || is_old_name (n);
}
@ -2837,6 +2819,9 @@ ssa_names_to_replace (void)
bitmap ret;
sbitmap_iterator sbi;
gcc_assert (update_ssa_initialized_fn == NULL
|| update_ssa_initialized_fn == cfun);
ret = BITMAP_ALLOC (NULL);
EXECUTE_IF_SET_IN_SBITMAP (old_ssa_names, 0, i, sbi)
bitmap_set_bit (ret, i);
@ -2850,7 +2835,7 @@ ssa_names_to_replace (void)
void
release_ssa_name_after_update_ssa (tree name)
{
gcc_assert (!need_to_initialize_update_ssa_p);
gcc_assert (cfun && update_ssa_initialized_fn == cfun);
if (names_to_release == NULL)
names_to_release = BITMAP_ALLOC (NULL);
@ -3110,11 +3095,15 @@ update_ssa (unsigned update_flags)
bool insert_phi_p;
sbitmap_iterator sbi;
if (!need_ssa_update_p ())
if (!need_ssa_update_p (cfun))
return;
timevar_push (TV_TREE_SSA_INCREMENTAL);
if (!update_ssa_initialized_fn)
init_update_ssa (cfun);
gcc_assert (update_ssa_initialized_fn == cfun);
blocks_with_phis_to_rewrite = BITMAP_ALLOC (NULL);
if (!phis_to_rewrite)
phis_to_rewrite = VEC_alloc (gimple_vec, heap, last_basic_block);
@ -3166,30 +3155,21 @@ update_ssa (unsigned update_flags)
/* If there are symbols to rename, identify those symbols that are
GIMPLE registers into the set REGS_TO_RENAME and those that are
memory symbols into the set MEM_SYMS_TO_RENAME. */
if (!bitmap_empty_p (syms_to_rename))
if (!bitmap_empty_p (SYMS_TO_RENAME (cfun)))
{
unsigned i;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (syms_to_rename, 0, i, bi)
EXECUTE_IF_SET_IN_BITMAP (SYMS_TO_RENAME (cfun), 0, i, bi)
{
tree sym = referenced_var (i);
if (is_gimple_reg (sym))
bitmap_set_bit (regs_to_rename, i);
else
{
/* Memory partitioning information may have been
computed after the symbol was marked for renaming,
if SYM is inside a partition also mark the partition
for renaming. */
tree mpt = memory_partition (sym);
if (mpt)
bitmap_set_bit (syms_to_rename, DECL_UID (mpt));
}
}
/* Memory symbols are those not in REGS_TO_RENAME. */
bitmap_and_compl (mem_syms_to_rename, syms_to_rename, regs_to_rename);
bitmap_and_compl (mem_syms_to_rename,
SYMS_TO_RENAME (cfun), regs_to_rename);
}
/* If there are names defined in the replacement table, prepare
@ -3203,12 +3183,12 @@ update_ssa (unsigned update_flags)
removal, and there are no symbols to rename, then there's
nothing else to do. */
if (sbitmap_first_set_bit (new_ssa_names) < 0
&& bitmap_empty_p (syms_to_rename))
&& bitmap_empty_p (SYMS_TO_RENAME (cfun)))
goto done;
}
/* Next, determine the block at which to start the renaming process. */
if (!bitmap_empty_p (syms_to_rename))
if (!bitmap_empty_p (SYMS_TO_RENAME (cfun)))
{
/* If we have to rename some symbols from scratch, we need to
start the process at the root of the CFG. FIXME, it should
@ -3262,7 +3242,7 @@ update_ssa (unsigned update_flags)
sbitmap_free (tmp);
}
EXECUTE_IF_SET_IN_BITMAP (syms_to_rename, 0, i, bi)
EXECUTE_IF_SET_IN_BITMAP (SYMS_TO_RENAME (cfun), 0, i, bi)
insert_updated_phi_nodes_for (referenced_var (i), dfs, blocks_to_update,
update_flags);
@ -3283,7 +3263,7 @@ update_ssa (unsigned update_flags)
EXECUTE_IF_SET_IN_SBITMAP (old_ssa_names, 0, i, sbi)
set_current_def (ssa_name (i), NULL_TREE);
EXECUTE_IF_SET_IN_BITMAP (syms_to_rename, 0, i, bi)
EXECUTE_IF_SET_IN_BITMAP (SYMS_TO_RENAME (cfun), 0, i, bi)
set_current_def (referenced_var (i), NULL_TREE);
/* Now start the renaming process at START_BB. */

View File

@ -77,9 +77,6 @@ static bitmap remaining_stmts;
predecessor a node that writes to memory. */
static bitmap upstream_mem_writes;
/* TODOs we need to run after the pass. */
static unsigned int todo;
/* Update the PHI nodes of NEW_LOOP. NEW_LOOP is a duplicate of
ORIG_LOOP. */
@ -241,14 +238,13 @@ static bool
generate_memset_zero (gimple stmt, tree op0, tree nb_iter,
gimple_stmt_iterator bsi)
{
tree t, addr_base;
tree addr_base;
tree nb_bytes = NULL;
bool res = false;
gimple_seq stmts = NULL, stmt_list = NULL;
gimple fn_call;
tree mem, fndecl, fntype, fn;
gimple_stmt_iterator i;
ssa_op_iter iter;
struct data_reference *dr = XCNEW (struct data_reference);
DR_STMT (dr) = stmt;
@ -303,29 +299,6 @@ generate_memset_zero (gimple stmt, tree op0, tree nb_iter,
{
gimple s = gsi_stmt (i);
update_stmt_if_modified (s);
FOR_EACH_SSA_TREE_OPERAND (t, s, iter, SSA_OP_VIRTUAL_DEFS)
{
if (TREE_CODE (t) == SSA_NAME)
t = SSA_NAME_VAR (t);
mark_sym_for_renaming (t);
}
}
/* Mark also the uses of the VDEFS of STMT to be renamed. */
FOR_EACH_SSA_TREE_OPERAND (t, stmt, iter, SSA_OP_VIRTUAL_DEFS)
{
if (TREE_CODE (t) == SSA_NAME)
{
gimple s;
imm_use_iterator imm_iter;
FOR_EACH_IMM_USE_STMT (s, imm_iter, t)
update_stmt (s);
t = SSA_NAME_VAR (t);
}
mark_sym_for_renaming (t);
}
gsi_insert_seq_after (&bsi, stmt_list, GSI_CONTINUE_LINKING);
@ -334,8 +307,6 @@ generate_memset_zero (gimple stmt, tree op0, tree nb_iter,
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "generated memset zero\n");
todo |= TODO_rebuild_alias;
end:
free_data_ref (dr);
return res;
@ -606,7 +577,6 @@ static void
rdg_flag_uses (struct graph *rdg, int u, bitmap partition, bitmap loops,
bitmap processed, bool *part_has_writes)
{
ssa_op_iter iter;
use_operand_p use_p;
struct vertex *x = &(rdg->vertices[u]);
gimple stmt = RDGV_STMT (x);
@ -626,7 +596,7 @@ rdg_flag_uses (struct graph *rdg, int u, bitmap partition, bitmap loops,
if (gimple_code (stmt) != GIMPLE_PHI)
{
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_VIRTUAL_USES)
if ((use_p = gimple_vuse_op (stmt)) != NULL_USE_OPERAND_P)
{
tree use = USE_FROM_PTR (use_p);
@ -1211,8 +1181,6 @@ tree_loop_distribution (void)
loop_iterator li;
int nb_generated_loops = 0;
todo = 0;
FOR_EACH_LOOP (li, loop, 0)
{
VEC (gimple, heap) *work_list = VEC_alloc (gimple, heap, 3);
@ -1244,7 +1212,7 @@ tree_loop_distribution (void)
VEC_free (gimple, heap, work_list);
}
return todo;
return 0;
}
static bool

View File

@ -122,14 +122,10 @@ create_temp (tree t)
DECL_GIMPLE_REG_P (tmp) = DECL_GIMPLE_REG_P (t);
add_referenced_var (tmp);
/* add_referenced_var will create the annotation and set up some
of the flags in the annotation. However, some flags we need to
inherit from our original variable. */
set_symbol_mem_tag (tmp, symbol_mem_tag (t));
if (is_call_clobbered (t))
mark_call_clobbered (tmp, var_ann (t)->escape_mask);
if (bitmap_bit_p (gimple_call_used_vars (cfun), DECL_UID (t)))
bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (tmp));
/* We should never have copied variables in non-automatic storage
or variables that have their address taken. So it is pointless
to try to copy call-clobber state here. */
gcc_assert (!may_be_aliased (t) && !is_global_var (t));
return tmp;
}

View File

@ -286,7 +286,10 @@ struct dump_file_info
#define TODO_mark_first_instance (1 << 19)
/* Rebuild aliasing info. */
#define TODO_rebuild_alias (1 << 20)
#define TODO_rebuild_alias (1 << 20)
/* Rebuild the addressable-vars bitmap and do register promotion. */
#define TODO_update_address_taken (1 << 21)
#define TODO_update_ssa_any \
(TODO_update_ssa \
@ -374,7 +377,6 @@ extern struct gimple_opt_pass pass_forwprop;
extern struct gimple_opt_pass pass_phiprop;
extern struct gimple_opt_pass pass_tree_ifcombine;
extern struct gimple_opt_pass pass_dse;
extern struct gimple_opt_pass pass_simple_dse;
extern struct gimple_opt_pass pass_nrv;
extern struct gimple_opt_pass pass_mark_used_blocks;
extern struct gimple_opt_pass pass_rename_ssa_copies;
@ -391,7 +393,6 @@ extern struct gimple_opt_pass pass_reassoc;
extern struct gimple_opt_pass pass_rebuild_cgraph_edges;
extern struct gimple_opt_pass pass_remove_cgraph_callee_edges;
extern struct gimple_opt_pass pass_build_cgraph_edges;
extern struct gimple_opt_pass pass_reset_cc_flags;
extern struct gimple_opt_pass pass_local_pure_const;
/* IPA Passes */

View File

@ -1423,7 +1423,6 @@ get_init_expr (chain_p chain, unsigned index)
void
mark_virtual_ops_for_renaming (gimple stmt)
{
ssa_op_iter iter;
tree var;
if (gimple_code (stmt) == GIMPLE_PHI)
@ -1439,24 +1438,8 @@ mark_virtual_ops_for_renaming (gimple stmt)
}
update_stmt (stmt);
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_VIRTUALS)
{
if (TREE_CODE (var) == SSA_NAME)
var = SSA_NAME_VAR (var);
mark_sym_for_renaming (var);
}
}
/* Calls mark_virtual_ops_for_renaming for all members of LIST. */
static void
mark_virtual_ops_for_renaming_list (gimple_seq list)
{
gimple_stmt_iterator gsi;
for (gsi = gsi_start (list); !gsi_end_p (gsi); gsi_next (&gsi))
mark_virtual_ops_for_renaming (gsi_stmt (gsi));
if (gimple_vuse (stmt))
mark_sym_for_renaming (gimple_vop (cfun));
}
/* Returns a new temporary variable used for the I-th variable carrying
@ -1525,10 +1508,7 @@ initialize_root_vars (struct loop *loop, chain_p chain, bitmap tmp_vars)
init = force_gimple_operand (init, &stmts, true, NULL_TREE);
if (stmts)
{
mark_virtual_ops_for_renaming_list (stmts);
gsi_insert_seq_on_edge_immediate (entry, stmts);
}
gsi_insert_seq_on_edge_immediate (entry, stmts);
phi = create_phi_node (var, loop->header);
SSA_NAME_DEF_STMT (var) = phi;
@ -1589,10 +1569,7 @@ initialize_root_vars_lm (struct loop *loop, dref root, bool written,
init = force_gimple_operand (init, &stmts, written, NULL_TREE);
if (stmts)
{
mark_virtual_ops_for_renaming_list (stmts);
gsi_insert_seq_on_edge_immediate (entry, stmts);
}
gsi_insert_seq_on_edge_immediate (entry, stmts);
if (written)
{
@ -2421,31 +2398,6 @@ try_combine_chains (VEC (chain_p, heap) **chains)
}
}
/* Sets alias information based on data reference DR for REF,
if necessary. */
static void
set_alias_info (tree ref, struct data_reference *dr)
{
tree var;
tree tag = DR_SYMBOL_TAG (dr);
gcc_assert (tag != NULL_TREE);
ref = get_base_address (ref);
if (!ref || !INDIRECT_REF_P (ref))
return;
var = SSA_NAME_VAR (TREE_OPERAND (ref, 0));
if (var_ann (var)->symbol_mem_tag)
return;
if (!MTAG_P (tag))
new_type_alias (var, tag, ref);
else
var_ann (var)->symbol_mem_tag = tag;
}
/* Prepare initializers for CHAIN in LOOP. Returns false if this is
impossible because one of these initializers may trap, true otherwise. */
@ -2491,11 +2443,7 @@ prepare_initializers_chain (struct loop *loop, chain_p chain)
init = force_gimple_operand (init, &stmts, false, NULL_TREE);
if (stmts)
{
mark_virtual_ops_for_renaming_list (stmts);
gsi_insert_seq_on_edge_immediate (entry, stmts);
}
set_alias_info (init, dr);
gsi_insert_seq_on_edge_immediate (entry, stmts);
VEC_replace (tree, chain->inits, i, init);
}

View File

@ -881,13 +881,10 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
}
break;
case SYMBOL_MEMORY_TAG:
case NAME_MEMORY_TAG:
case VAR_DECL:
case PARM_DECL:
case FIELD_DECL:
case NAMESPACE_DECL:
case MEMORY_PARTITION_TAG:
dump_decl_name (buffer, node, flags);
break;

View File

@ -78,9 +78,6 @@ along with GCC; see the file COPYING3. If not see
/* True if this is the "early" pass, before inlining. */
static bool early_sra;
/* The set of todo flags to return from tree_sra. */
static unsigned int todoflags;
/* The set of aggregate variables that are candidates for scalarization. */
static bitmap sra_candidates;
@ -210,7 +207,6 @@ extern void debug_sra_elt_name (struct sra_elt *);
static tree generate_element_ref (struct sra_elt *);
static gimple_seq sra_build_assignment (tree dst, tree src);
static void mark_all_v_defs_seq (gimple_seq);
static void mark_all_v_defs_stmt (gimple);
/* Return true if DECL is an SRA candidate. */
@ -1057,11 +1053,10 @@ sra_walk_function (const struct sra_walk_fns *fns)
ni = si;
gsi_next (&ni);
/* If the statement has no virtual operands, then it doesn't
/* If the statement does not reference memory, then it doesn't
make any structure references that we care about. */
if (gimple_aliases_computed_p (cfun)
&& ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
continue;
if (!gimple_references_memory_p (stmt))
continue;
switch (gimple_code (stmt))
{
@ -2008,27 +2003,6 @@ decide_instantiations (void)
/* Phase Four: Update the function to match the replacements created. */
/* Mark all the variables in VDEF/VUSE operators for STMT for
renaming. This becomes necessary when we modify all of a
non-scalar. */
static void
mark_all_v_defs_stmt (gimple stmt)
{
tree sym;
ssa_op_iter iter;
update_stmt_if_modified (stmt);
FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
{
if (TREE_CODE (sym) == SSA_NAME)
sym = SSA_NAME_VAR (sym);
mark_sym_for_renaming (sym);
}
}
/* Mark all the variables in virtual operands in all the statements in
LIST for renaming. */
@ -2038,7 +2012,7 @@ mark_all_v_defs_seq (gimple_seq seq)
gimple_stmt_iterator gsi;
for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
mark_all_v_defs_stmt (gsi_stmt (gsi));
update_stmt_if_modified (gsi_stmt (gsi));
}
/* Mark every replacement under ELT with TREE_NO_WARNING. */
@ -2863,6 +2837,7 @@ static void
sra_replace (gimple_stmt_iterator *gsi, gimple_seq seq)
{
sra_insert_before (gsi, seq);
unlink_stmt_vdef (gsi_stmt (*gsi));
gsi_remove (gsi, false);
if (gsi_end_p (*gsi))
*gsi = gsi_last (gsi_seq (*gsi));
@ -3138,7 +3113,7 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, gimple_stmt_iterator *gsi,
replacement = tmp;
}
if (is_output)
mark_all_v_defs_stmt (stmt);
update_stmt_if_modified (stmt);
*expr_p = REPLDUP (replacement);
update_stmt (stmt);
}
@ -3358,7 +3333,7 @@ scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
original block copy statement. */
stmt = gsi_stmt (*gsi);
mark_all_v_defs_stmt (stmt);
update_stmt_if_modified (stmt);
seq = NULL;
generate_element_copy (lhs_elt, rhs_elt, &seq);
@ -3425,7 +3400,7 @@ scalarize_init (struct sra_elt *lhs_elt, tree rhs, gimple_stmt_iterator *gsi)
/* The LHS is fully instantiated. The list of initializations
replaces the original structure assignment. */
gcc_assert (seq);
mark_all_v_defs_stmt (gsi_stmt (*gsi));
update_stmt_if_modified (gsi_stmt (*gsi));
mark_all_v_defs_seq (seq);
sra_replace (gsi, seq);
}
@ -3476,7 +3451,7 @@ scalarize_ldst (struct sra_elt *elt, tree other,
gimple_seq seq = NULL;
gimple stmt = gsi_stmt (*gsi);
mark_all_v_defs_stmt (stmt);
update_stmt_if_modified (stmt);
generate_copy_inout (elt, is_output, other, &seq);
gcc_assert (seq);
mark_all_v_defs_seq (seq);
@ -3637,7 +3612,6 @@ static unsigned int
tree_sra (void)
{
/* Initialize local variables. */
todoflags = 0;
gcc_obstack_init (&sra_obstack);
sra_candidates = BITMAP_ALLOC (NULL);
needs_copy_in = BITMAP_ALLOC (NULL);
@ -3650,8 +3624,6 @@ tree_sra (void)
scan_function ();
decide_instantiations ();
scalarize_function ();
if (!bitmap_empty_p (sra_candidates))
todoflags |= TODO_rebuild_alias;
}
/* Free allocated memory. */
@ -3662,7 +3634,7 @@ tree_sra (void)
BITMAP_FREE (sra_type_decomp_cache);
BITMAP_FREE (sra_type_inst_cache);
obstack_free (&sra_obstack, NULL);
return todoflags;
return 0;
}
static unsigned int
@ -3674,7 +3646,7 @@ tree_sra_early (void)
ret = tree_sra ();
early_sra = false;
return ret & ~TODO_rebuild_alias;
return ret;
}
static bool
@ -3719,7 +3691,7 @@ struct gimple_opt_pass pass_sra =
PROP_cfg | PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_update_address_taken, /* todo_flags_start */
TODO_dump_func
| TODO_update_ssa
| TODO_ggc_collect

View File

@ -333,9 +333,9 @@ create_mem_ref_raw (tree type, struct mem_address *addr)
if (addr->offset && integer_zerop (addr->offset))
addr->offset = NULL_TREE;
return build7 (TARGET_MEM_REF, type,
return build6 (TARGET_MEM_REF, type,
addr->symbol, addr->base, addr->index,
addr->step, addr->offset, NULL, NULL);
addr->step, addr->offset, NULL);
}
/* Returns true if OBJ is an object whose address is a link time constant. */
@ -709,9 +709,6 @@ get_address_description (tree op, struct mem_address *addr)
void
copy_mem_ref_info (tree to, tree from)
{
/* Copy the annotation, to preserve the aliasing information. */
TMR_TAG (to) = TMR_TAG (from);
/* And the info about the original reference. */
TMR_ORIGINAL (to) = TMR_ORIGINAL (from);
}

File diff suppressed because it is too large Load Diff

106
gcc/tree-ssa-alias.h Normal file
View File

@ -0,0 +1,106 @@
/* Tree based alias analysis and alias oracle.
Copyright (C) 2008 Free Software Foundation, Inc.
Contributed by Richard Guenther <rguenther@suse.de>
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify
under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
GCC is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
#ifndef TREE_SSA_ALIAS_H
#define TREE_SSA_ALIAS_H
#include "coretypes.h"
/* The reasons a variable may escape a function. */
enum escape_type
{
NO_ESCAPE = 0, /* Doesn't escape. */
ESCAPE_STORED_IN_GLOBAL = 1 << 0,
ESCAPE_TO_ASM = 1 << 1, /* Passed by address to an assembly
statement. */
ESCAPE_TO_CALL = 1 << 2, /* Escapes to a function call. */
ESCAPE_BAD_CAST = 1 << 3, /* Cast from pointer to integer */
ESCAPE_TO_RETURN = 1 << 4, /* Returned from function. */
ESCAPE_TO_PURE_CONST = 1 << 5, /* Escapes to a pure or constant
function call. */
ESCAPE_IS_GLOBAL = 1 << 6, /* Is a global variable. */
ESCAPE_IS_PARM = 1 << 7, /* Is an incoming function argument. */
ESCAPE_UNKNOWN = 1 << 8 /* We believe it escapes for
some reason not enumerated
above. */
};
/* The points-to solution.
The points-to solution is a union of pt_vars and the abstract
sets specified by the flags. */
struct pt_solution GTY(())
{
/* Nonzero if points-to analysis couldn't determine where this pointer
is pointing to. */
unsigned int anything : 1;
/* Nonzero if the points-to set includes any global memory. Note that
even if this is zero pt_vars can still include global variables. */
unsigned int nonlocal : 1;
/* Nonzero if the points-to set includes any escaped local variable. */
unsigned int escaped : 1;
/* Nonzero if the points-to set includes 'nothing', the points-to set
includes memory at address NULL. */
unsigned int null : 1;
/* Nonzero if the pt_vars bitmap includes a global variable. */
unsigned int vars_contains_global : 1;
/* Set of variables that this pointer may point to. */
bitmap vars;
};
/* In tree-ssa-alias.c */
extern enum escape_type is_escape_site (gimple);
extern bool ptr_deref_may_alias_global_p (tree);
extern bool refs_may_alias_p (tree, tree);
extern bool ref_maybe_used_by_stmt_p (gimple, tree);
extern bool stmt_may_clobber_ref_p (gimple, tree);
extern void *walk_non_aliased_vuses (tree, tree,
void *(*)(tree, tree, void *), void *);
extern unsigned int walk_aliased_vdefs (tree, tree,
bool (*)(tree, tree, void *), void *,
bitmap *);
extern struct ptr_info_def *get_ptr_info (tree);
extern void dump_alias_info (FILE *);
extern void debug_alias_info (void);
extern void dump_points_to_info_for (FILE *, tree);
extern void debug_points_to_info_for (tree);
extern void dump_alias_stats (FILE *);
/* In tree-ssa-structalias.c */
extern unsigned int compute_may_aliases (void);
extern void delete_alias_heapvars (void);
extern bool pt_solution_includes_global (struct pt_solution *);
extern bool pt_solution_includes (struct pt_solution *, const_tree);
extern bool pt_solutions_intersect (struct pt_solution *, struct pt_solution *);
extern void pt_solution_reset (struct pt_solution *);
extern void dump_pta_stats (FILE *);
#endif /* TREE_SSA_ALIAS_H */

View File

@ -274,8 +274,7 @@ tree
get_symbol_constant_value (tree sym)
{
if (TREE_STATIC (sym)
&& TREE_READONLY (sym)
&& !MTAG_P (sym))
&& TREE_READONLY (sym))
{
tree val = DECL_INITIAL (sym);
if (val)
@ -528,8 +527,10 @@ likely_value (gimple stmt)
has_constant_operand = true;
}
/* There may be constants in regular rhs operands. */
for (i = is_gimple_call (stmt) + gimple_has_lhs (stmt);
/* There may be constants in regular rhs operands. For calls we
have to ignore lhs, fndecl and static chain, otherwise only
the lhs. */
for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
i < gimple_num_ops (stmt); ++i)
{
tree op = gimple_op (stmt, i);
@ -601,7 +602,7 @@ surely_varying_stmt_p (gimple stmt)
}
/* Any other store operation is not interesting. */
else if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
else if (gimple_vdef (stmt))
return true;
/* Anything other than assignments and conditional jumps are not
@ -3172,11 +3173,16 @@ gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
}
if (lhs == NULL_TREE)
new_stmt = gimple_build_nop ();
{
new_stmt = gimple_build_nop ();
unlink_stmt_vdef (stmt);
release_defs (stmt);
}
else
{
new_stmt = gimple_build_assign (lhs, tmp);
copy_virtual_operands (new_stmt, stmt);
gimple_set_vuse (new_stmt, gimple_vuse (stmt));
gimple_set_vdef (new_stmt, gimple_vdef (stmt));
move_ssa_defining_stmt_for_defs (new_stmt, stmt);
}
@ -3264,10 +3270,7 @@ execute_fold_all_builtins (void)
push_stmt_changes (gsi_stmt_ptr (&i));
if (!update_call_from_tree (&i, result))
{
gimplify_and_update_call_from_tree (&i, result);
todoflags |= TODO_rebuild_alias;
}
gimplify_and_update_call_from_tree (&i, result);
stmt = gsi_stmt (i);
pop_stmt_changes (gsi_stmt_ptr (&i));

View File

@ -1114,12 +1114,9 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
bitmap_set_bit (used_in_real_ops, DECL_UID (SSA_NAME_VAR (var)));
/* Validate that virtual ops don't get used in funny ways. */
FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_VIRTUALS)
{
bitmap_set_bit (used_in_virtual_ops,
DECL_UID (SSA_NAME_VAR (var)));
}
if (gimple_vuse (stmt))
bitmap_set_bit (used_in_virtual_ops,
DECL_UID (SSA_NAME_VAR (gimple_vuse (stmt))));
#endif /* ENABLE_CHECKING */
}
}

View File

@ -72,24 +72,6 @@ may_propagate_copy (tree dest, tree orig)
if (TREE_CODE (dest) == SSA_NAME
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (dest))
return false;
/* For memory partitions, copies are OK as long as the memory symbol
belongs to the partition. */
if (TREE_CODE (dest) == SSA_NAME
&& TREE_CODE (SSA_NAME_VAR (dest)) == MEMORY_PARTITION_TAG)
return (TREE_CODE (orig) == SSA_NAME
&& !is_gimple_reg (orig)
&& (SSA_NAME_VAR (dest) == SSA_NAME_VAR (orig)
|| bitmap_bit_p (MPT_SYMBOLS (SSA_NAME_VAR (dest)),
DECL_UID (SSA_NAME_VAR (orig)))));
if (TREE_CODE (orig) == SSA_NAME
&& TREE_CODE (SSA_NAME_VAR (orig)) == MEMORY_PARTITION_TAG)
return (TREE_CODE (dest) == SSA_NAME
&& !is_gimple_reg (dest)
&& (SSA_NAME_VAR (dest) == SSA_NAME_VAR (orig)
|| bitmap_bit_p (MPT_SYMBOLS (SSA_NAME_VAR (orig)),
DECL_UID (SSA_NAME_VAR (dest)))));
/* Do not copy between types for which we *do* need a conversion. */
if (!useless_type_conversion_p (type_d, type_o))
@ -136,48 +118,21 @@ may_propagate_copy (tree dest, tree orig)
&& POINTER_TYPE_P (type_d)
&& POINTER_TYPE_P (type_o))
{
tree mt_dest = symbol_mem_tag (SSA_NAME_VAR (dest));
tree mt_orig = symbol_mem_tag (SSA_NAME_VAR (orig));
if (mt_dest && mt_orig && mt_dest != mt_orig)
if (get_alias_set (TREE_TYPE (type_d))
!= get_alias_set (TREE_TYPE (type_o)))
return false;
else if (get_alias_set (TREE_TYPE (type_d)) !=
get_alias_set (TREE_TYPE (type_o)))
else if (DECL_NO_TBAA_P (SSA_NAME_VAR (dest))
!= DECL_NO_TBAA_P (SSA_NAME_VAR (orig)))
return false;
else if (!MTAG_P (SSA_NAME_VAR (dest))
&& !MTAG_P (SSA_NAME_VAR (orig))
&& (DECL_NO_TBAA_P (SSA_NAME_VAR (dest))
!= DECL_NO_TBAA_P (SSA_NAME_VAR (orig))))
return false;
/* Also verify flow-sensitive information is compatible. */
if (SSA_NAME_PTR_INFO (orig) && SSA_NAME_PTR_INFO (dest))
{
struct ptr_info_def *orig_ptr_info = SSA_NAME_PTR_INFO (orig);
struct ptr_info_def *dest_ptr_info = SSA_NAME_PTR_INFO (dest);
if (orig_ptr_info->name_mem_tag
&& dest_ptr_info->name_mem_tag
&& orig_ptr_info->pt_vars
&& dest_ptr_info->pt_vars
&& !bitmap_intersect_p (dest_ptr_info->pt_vars,
orig_ptr_info->pt_vars))
return false;
}
}
/* If the destination is a SSA_NAME for a virtual operand, then we have
some special cases to handle. */
/* Propagating virtual operands is always ok. */
if (TREE_CODE (dest) == SSA_NAME && !is_gimple_reg (dest))
{
/* If both operands are SSA_NAMEs referring to virtual operands, then
we can always propagate. */
if (TREE_CODE (orig) == SSA_NAME
&& !is_gimple_reg (orig))
return true;
/* But only between virtual operands. */
gcc_assert (TREE_CODE (orig) == SSA_NAME && !is_gimple_reg (orig));
/* We have a "copy" from something like a constant into a virtual
operand. Reject these. */
return false;
return true;
}
/* Anything else is OK. */
@ -211,8 +166,7 @@ may_propagate_copy_into_stmt (gimple dest, tree orig)
is much simpler. */
if (TREE_CODE (orig) == SSA_NAME
&& (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
|| TREE_CODE (SSA_NAME_VAR (orig)) == MEMORY_PARTITION_TAG))
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig))
return false;
if (is_gimple_assign (dest))
@ -252,29 +206,13 @@ may_propagate_copy_into_asm (tree dest)
void
merge_alias_info (tree orig_name, tree new_name)
{
tree new_sym = SSA_NAME_VAR (new_name);
tree orig_sym = SSA_NAME_VAR (orig_name);
var_ann_t new_ann = var_ann (new_sym);
var_ann_t orig_ann = var_ann (orig_sym);
/* No merging necessary when memory partitions are involved. */
if (factoring_name_p (new_name))
{
gcc_assert (!is_gimple_reg (orig_sym));
return;
}
else if (factoring_name_p (orig_name))
{
gcc_assert (!is_gimple_reg (new_sym));
return;
}
gcc_assert (POINTER_TYPE_P (TREE_TYPE (orig_name))
&& POINTER_TYPE_P (TREE_TYPE (new_name)));
#if defined ENABLE_CHECKING
gcc_assert (useless_type_conversion_p (TREE_TYPE (orig_name),
TREE_TYPE (new_name)));
TREE_TYPE (new_name)));
#endif
/* Check that flow-sensitive information is compatible. Notice that
we may not merge flow-sensitive information here. This function
@ -290,58 +228,12 @@ merge_alias_info (tree orig_name, tree new_name)
same in every block dominated by the predicate.
Since we cannot distinguish one case from another in this
function, we can only make sure that if P_i and Q_j have
flow-sensitive information, they should be compatible.
function, we cannot merge flow-sensitive information by
intersecting. Instead the only thing we can do is to _not_
merge flow-sensitive information.
As callers of merge_alias_info are supposed to call may_propagate_copy
first, the following check is redundant. Thus, only do it if checking
is enabled. */
if (SSA_NAME_PTR_INFO (orig_name) && SSA_NAME_PTR_INFO (new_name))
{
struct ptr_info_def *orig_ptr_info = SSA_NAME_PTR_INFO (orig_name);
struct ptr_info_def *new_ptr_info = SSA_NAME_PTR_INFO (new_name);
/* Note that pointer NEW and ORIG may actually have different
pointed-to variables (e.g., PR 18291 represented in
testsuite/gcc.c-torture/compile/pr18291.c). However, since
NEW is being copy-propagated into ORIG, it must always be
true that the pointed-to set for pointer NEW is the same, or
a subset, of the pointed-to set for pointer ORIG. If this
isn't the case, we shouldn't have been able to do the
propagation of NEW into ORIG. */
if (orig_ptr_info->name_mem_tag
&& new_ptr_info->name_mem_tag
&& orig_ptr_info->pt_vars
&& new_ptr_info->pt_vars)
gcc_assert (bitmap_intersect_p (new_ptr_info->pt_vars,
orig_ptr_info->pt_vars));
}
#endif
/* Synchronize the symbol tags. If both pointers had a tag and they
are different, then something has gone wrong. Symbol tags can
always be merged because they are flow insensitive, all the SSA
names of the same base DECL share the same symbol tag. */
if (new_ann->symbol_mem_tag == NULL_TREE)
new_ann->symbol_mem_tag = orig_ann->symbol_mem_tag;
else if (orig_ann->symbol_mem_tag == NULL_TREE)
orig_ann->symbol_mem_tag = new_ann->symbol_mem_tag;
else
gcc_assert (new_ann->symbol_mem_tag == orig_ann->symbol_mem_tag);
/* Copy flow-sensitive alias information in case that NEW_NAME
didn't get a NMT but was set to pt_anything for optimization
purposes. In case ORIG_NAME has a NMT we can safely use its
flow-sensitive alias information as a conservative estimate. */
if (SSA_NAME_PTR_INFO (orig_name)
&& SSA_NAME_PTR_INFO (orig_name)->name_mem_tag
&& (!SSA_NAME_PTR_INFO (new_name)
|| !SSA_NAME_PTR_INFO (new_name)->name_mem_tag))
{
struct ptr_info_def *orig_ptr_info = SSA_NAME_PTR_INFO (orig_name);
struct ptr_info_def *new_ptr_info = get_ptr_info (new_name);
memcpy (new_ptr_info, orig_ptr_info, sizeof (struct ptr_info_def));
}
??? At some point we should enhance this machinery to distinguish
both cases in the caller. */
}
@ -464,8 +356,7 @@ propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
tree expr = NULL_TREE;
propagate_tree_value (&expr, val);
new_stmt = gimple_build_assign (gimple_call_lhs (stmt), expr);
copy_virtual_operands (new_stmt, stmt);
new_stmt = gimple_build_assign (gimple_call_lhs (stmt), expr);
move_ssa_defining_stmt_for_defs (new_stmt, stmt);
gsi_replace (gsi, new_stmt, false);
}
@ -513,7 +404,7 @@ stmt_may_generate_copy (gimple stmt)
return false;
/* Statements with loads and/or stores will never generate a useful copy. */
if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
if (gimple_vuse (stmt))
return false;
/* Otherwise, the only statements that generate useful copies are
@ -864,8 +755,10 @@ copy_prop_visit_phi_node (gimple phi)
Otherwise, this may move loop variant variables outside of
their loops and prevent coalescing opportunities. If the
value was loop invariant, it will be hoisted by LICM and
exposed for copy propagation. */
if (loop_depth_of_name (arg) > loop_depth_of_name (lhs))
exposed for copy propagation. Not a problem for virtual
operands though. */
if (is_gimple_reg (lhs)
&& loop_depth_of_name (arg) > loop_depth_of_name (lhs))
{
phi_val.value = lhs;
break;

View File

@ -115,7 +115,6 @@ copy_rename_partition_coalesce (var_map map, tree var1, tree var2, FILE *debug)
int p1, p2, p3;
tree root1, root2;
tree rep1, rep2;
var_ann_t ann1, ann2, ann3;
bool ign1, ign2, abnorm;
gcc_assert (TREE_CODE (var1) == SSA_NAME);
@ -144,9 +143,6 @@ copy_rename_partition_coalesce (var_map map, tree var1, tree var2, FILE *debug)
root1 = SSA_NAME_VAR (rep1);
root2 = SSA_NAME_VAR (rep2);
ann1 = var_ann (root1);
ann2 = var_ann (root2);
if (p1 == p2)
{
if (debug)
@ -207,16 +203,6 @@ copy_rename_partition_coalesce (var_map map, tree var1, tree var2, FILE *debug)
}
}
/* Don't coalesce if there are two different memory tags. */
if (ann1->symbol_mem_tag
&& ann2->symbol_mem_tag
&& ann1->symbol_mem_tag != ann2->symbol_mem_tag)
{
if (debug)
fprintf (debug, " : 2 memory tags. No coalesce.\n");
return false;
}
/* If both values have default defs, we can't coalesce. If only one has a
tag, make sure that variable is the new root partition. */
if (gimple_default_def (cfun, root1))
@ -252,8 +238,7 @@ copy_rename_partition_coalesce (var_map map, tree var1, tree var2, FILE *debug)
&& POINTER_TYPE_P (TREE_TYPE (root2))
&& ((get_alias_set (TREE_TYPE (TREE_TYPE (root1)))
!= get_alias_set (TREE_TYPE (TREE_TYPE (root2))))
|| ((DECL_P (root1) && !MTAG_P (root1))
&& (DECL_P (root2) && !MTAG_P (root2))
|| (DECL_P (root1) && DECL_P (root2)
&& DECL_NO_TBAA_P (root1) != DECL_NO_TBAA_P (root2))))
{
if (debug)
@ -272,13 +257,6 @@ copy_rename_partition_coalesce (var_map map, tree var1, tree var2, FILE *debug)
else if (!ign1)
replace_ssa_name_symbol (partition_to_var (map, p3), root1);
/* Update the various flag widgitry of the current base representative. */
ann3 = var_ann (SSA_NAME_VAR (partition_to_var (map, p3)));
if (ann1->symbol_mem_tag)
ann3->symbol_mem_tag = ann1->symbol_mem_tag;
else
ann3->symbol_mem_tag = ann2->symbol_mem_tag;
if (debug)
{
fprintf (debug, " --> P%d ", p3);

View File

@ -233,7 +233,12 @@ mark_operand_necessary (tree op)
ver = SSA_NAME_VERSION (op);
if (TEST_BIT (processed, ver))
return;
{
stmt = SSA_NAME_DEF_STMT (op);
gcc_assert (gimple_nop_p (stmt)
|| gimple_plf (stmt, STMT_NECESSARY));
return;
}
SET_BIT (processed, ver);
stmt = SSA_NAME_DEF_STMT (op);
@ -242,6 +247,14 @@ mark_operand_necessary (tree op)
if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
return;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "marking necessary through ");
print_generic_expr (dump_file, op, 0);
fprintf (dump_file, " stmt ");
print_gimple_stmt (dump_file, stmt, 0, 0);
}
gimple_set_plf (stmt, STMT_NECESSARY, true);
VEC_safe_push (gimple, heap, worklist, stmt);
}
@ -429,6 +442,133 @@ find_obviously_necessary_stmts (struct edge_list *el)
}
/* Return true if REF is based on an aliased base, otherwise false. */
static bool
ref_may_be_aliased (tree ref)
{
while (handled_component_p (ref))
ref = TREE_OPERAND (ref, 0);
return !(DECL_P (ref)
&& !may_be_aliased (ref));
}
struct ref_data {
tree base;
HOST_WIDE_INT size;
HOST_WIDE_INT offset;
HOST_WIDE_INT max_size;
};
static bitmap visited = NULL;
static unsigned int longest_chain = 0;
static unsigned int total_chain = 0;
static bool chain_ovfl = false;
/* Worker for the walker that marks reaching definitions of REF,
which is based on a non-aliased decl, necessary. It returns
true whenever the defining statement of the current VDEF is
a kill for REF, as no dominating may-defs are necessary for REF
anymore. DATA points to cached get_ref_base_and_extent data for REF. */
static bool
mark_aliased_reaching_defs_necessary_1 (tree ref, tree vdef, void *data)
{
gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
struct ref_data *refd = (struct ref_data *)data;
/* All stmts we visit are necessary. */
mark_operand_necessary (vdef);
/* If the stmt lhs kills ref, then we can stop walking. */
if (gimple_has_lhs (def_stmt)
&& TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME)
{
tree base, lhs = gimple_get_lhs (def_stmt);
HOST_WIDE_INT size, offset, max_size;
base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
/* We can get MEM[symbol: sZ, index: D.8862_1] here,
so base == refd->base does not always hold. */
if (base == refd->base)
{
/* For a must-alias check we need to be able to constrain
the accesses properly. */
if (size != -1 && size == max_size
&& refd->max_size != -1)
{
if (offset <= refd->offset
&& offset + size >= refd->offset + refd->max_size)
return true;
}
/* Or they need to be exactly the same. */
else if (operand_equal_p (ref, lhs, 0))
return true;
}
}
/* Otherwise keep walking. */
return false;
}
static void
mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
{
struct ref_data refd;
unsigned int chain;
gcc_assert (!chain_ovfl);
refd.base = get_ref_base_and_extent (ref, &refd.offset, &refd.size,
&refd.max_size);
chain = walk_aliased_vdefs (ref, gimple_vuse (stmt),
mark_aliased_reaching_defs_necessary_1,
&refd, NULL);
if (chain > longest_chain)
longest_chain = chain;
total_chain += chain;
}
/* Worker for the walker that marks reaching definitions of REF, which
is not based on a non-aliased decl. For simplicity we need to end
up marking all may-defs necessary that are not based on a non-aliased
decl. The only job of this walker is to skip may-defs based on
a non-aliased decl. */
static bool
mark_all_reaching_defs_necessary_1 (tree ref ATTRIBUTE_UNUSED,
tree vdef, void *data ATTRIBUTE_UNUSED)
{
gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
/* We have to skip already visited (and thus necessary) statements
to make the chaining work after we dropped back to simple mode. */
if (chain_ovfl
&& TEST_BIT (processed, SSA_NAME_VERSION (vdef)))
{
gcc_assert (gimple_nop_p (def_stmt)
|| gimple_plf (def_stmt, STMT_NECESSARY));
return false;
}
/* We want to skip stores to non-aliased variables. */
if (!chain_ovfl
&& gimple_assign_single_p (def_stmt))
{
tree lhs = gimple_assign_lhs (def_stmt);
if (!ref_may_be_aliased (lhs))
return false;
}
/* But can stop after the first necessary statement. */
mark_operand_necessary (vdef);
return true;
}
static void
mark_all_reaching_defs_necessary (gimple stmt)
{
walk_aliased_vdefs (NULL, gimple_vuse (stmt),
mark_all_reaching_defs_necessary_1, NULL, &visited);
}
/* Propagate necessity using the operands of necessary statements.
Process the uses on each statement in the worklist, and add all
feeding statements which contribute to the calculation of this
@ -471,7 +611,10 @@ propagate_necessity (struct edge_list *el)
}
}
if (gimple_code (stmt) == GIMPLE_PHI)
if (gimple_code (stmt) == GIMPLE_PHI
/* We do not process virtual PHI nodes nor do we track their
necessity. */
&& is_gimple_reg (gimple_phi_result (stmt)))
{
/* PHI nodes are somewhat special in that each PHI alternative has
data and control dependencies. All the statements feeding the
@ -506,16 +649,121 @@ propagate_necessity (struct edge_list *el)
{
/* Propagate through the operands. Examine all the USE, VUSE and
VDEF operands in this statement. Mark all the statements
which feed this statement's uses as necessary. The
operands of VDEF expressions are also needed as they
represent potential definitions that may reach this
statement (VDEF operands allow us to follow def-def
links). */
which feed this statement's uses as necessary. */
ssa_op_iter iter;
tree use;
FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
mark_operand_necessary (use);
use = gimple_vuse (stmt);
if (!use)
continue;
/* If we dropped to simple mode make all immediately
reachable definitions necessary. */
if (chain_ovfl)
{
mark_all_reaching_defs_necessary (stmt);
continue;
}
/* For statements that may load from memory (have a VUSE) we
have to mark all reaching (may-)definitions as necessary.
We partition this task into two cases:
1) explicit loads based on decls that are not aliased
2) implicit loads (like calls) and explicit loads not
based on decls that are not aliased (like indirect
references or loads from globals)
For 1) we mark all reaching may-defs as necessary, stopping
at dominating kills. For 2) we want to mark all dominating
references necessary, but non-aliased ones which we handle
in 1). Instead of doing so for each load we rely on the
worklist to eventually reach all dominating references and
instead just mark the immediately dominating references
as necessary (but skipping non-aliased ones). */
if (is_gimple_call (stmt))
{
unsigned i;
/* Calls implicitly load from memory, their arguments
in addition may explicitly perform memory loads.
This also ensures propagation for case 2 for stores. */
mark_all_reaching_defs_necessary (stmt);
for (i = 0; i < gimple_call_num_args (stmt); ++i)
{
tree arg = gimple_call_arg (stmt, i);
if (TREE_CODE (arg) == SSA_NAME
|| is_gimple_min_invariant (arg))
continue;
if (!ref_may_be_aliased (arg))
mark_aliased_reaching_defs_necessary (stmt, arg);
}
}
else if (gimple_assign_single_p (stmt))
{
tree lhs, rhs;
bool rhs_aliased = false;
/* If this is a load mark things necessary. */
rhs = gimple_assign_rhs1 (stmt);
if (TREE_CODE (rhs) != SSA_NAME
&& !is_gimple_min_invariant (rhs))
{
if (!ref_may_be_aliased (rhs))
mark_aliased_reaching_defs_necessary (stmt, rhs);
else
rhs_aliased = true;
}
/* If this is an aliased store, mark things necessary.
This is where we make sure to propagate for case 2. */
lhs = gimple_assign_lhs (stmt);
if (rhs_aliased
|| (TREE_CODE (lhs) != SSA_NAME
&& ref_may_be_aliased (lhs)))
mark_all_reaching_defs_necessary (stmt);
}
else if (gimple_code (stmt) == GIMPLE_RETURN)
{
tree rhs = gimple_return_retval (stmt);
/* A return statement may perform a load. */
if (TREE_CODE (rhs) != SSA_NAME
&& !is_gimple_min_invariant (rhs))
{
if (!ref_may_be_aliased (rhs))
mark_aliased_reaching_defs_necessary (stmt, rhs);
else
mark_all_reaching_defs_necessary (stmt);
}
}
else if (gimple_code (stmt) == GIMPLE_ASM)
{
unsigned i;
mark_all_reaching_defs_necessary (stmt);
/* Inputs may perform loads. */
for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
{
tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
if (TREE_CODE (op) != SSA_NAME
&& !is_gimple_min_invariant (op)
&& !ref_may_be_aliased (op))
mark_aliased_reaching_defs_necessary (stmt, op);
}
}
else
gcc_unreachable ();
/* If we over-used our alias oracle budget drop to simple
mode. The cost metric allows quadratic behavior up to
a constant maximal chain and after that falls back to
super-linear complexity. */
if (longest_chain > 256
&& total_chain > 256 * longest_chain)
{
chain_ovfl = true;
if (visited)
bitmap_clear (visited);
}
}
}
}
@ -537,6 +785,40 @@ remove_dead_phis (basic_block bb)
stats.total_phis++;
phi = gsi_stmt (gsi);
/* We do not track necessity of virtual PHI nodes. Instead do
very simple dead PHI removal here. */
if (!is_gimple_reg (gimple_phi_result (phi)))
{
unsigned i;
tree vuse;
/* Virtual PHI nodes with one or identical arguments
can be removed. */
vuse = gimple_phi_arg_def (phi, 0);
for (i = 1; i < gimple_phi_num_args (phi); ++i)
{
if (gimple_phi_arg_def (phi, i) != vuse)
{
vuse = NULL_TREE;
break;
}
}
if (vuse != NULL_TREE)
{
tree vdef = gimple_phi_result (phi);
use_operand_p use_p;
imm_use_iterator iter;
gimple use_stmt;
FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
SET_USE (use_p, vuse);
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
}
else
gimple_set_plf (phi, STMT_NECESSARY, true);
}
if (!gimple_plf (phi, STMT_NECESSARY))
{
something_changed = true;
@ -549,11 +831,10 @@ remove_dead_phis (basic_block bb)
remove_phi_node (&gsi, true);
stats.removed_phis++;
continue;
}
else
{
gsi_next (&gsi);
}
gsi_next (&gsi);
}
return something_changed;
}
@ -643,7 +924,8 @@ remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
remove_edge (EDGE_SUCC (bb, 1));
}
}
unlink_stmt_vdef (stmt);
gsi_remove (i, true);
release_defs (stmt);
}
@ -665,11 +947,6 @@ eliminate_unnecessary_stmts (void)
fprintf (dump_file, "\nEliminating unnecessary statements:\n");
clear_special_calls ();
FOR_EACH_BB (bb)
{
/* Remove dead PHI nodes. */
something_changed |= remove_dead_phis (bb);
}
FOR_EACH_BB (bb)
{
@ -692,7 +969,6 @@ eliminate_unnecessary_stmts (void)
if (call)
{
tree name;
gimple g;
/* When LHS of var = call (); is dead, simplify it into
call (); saving one operand. */
@ -709,11 +985,8 @@ eliminate_unnecessary_stmts (void)
}
push_stmt_changes (gsi_stmt_ptr (&gsi));
g = gimple_copy (stmt);
gimple_call_set_lhs (g, NULL_TREE);
gsi_replace (&gsi, g, false);
maybe_clean_or_replace_eh_stmt (stmt, g);
mark_symbols_for_renaming (g);
gimple_call_set_lhs (stmt, NULL_TREE);
maybe_clean_or_replace_eh_stmt (stmt, stmt);
pop_stmt_changes (gsi_stmt_ptr (&gsi));
release_ssa_name (name);
}
@ -728,6 +1001,12 @@ eliminate_unnecessary_stmts (void)
}
}
FOR_EACH_BB (bb)
{
/* Remove dead PHI nodes. */
something_changed |= remove_dead_phis (bb);
}
return something_changed;
}
@ -839,7 +1118,11 @@ perform_tree_ssa_dce (bool aggressive)
find_obviously_necessary_stmts (el);
longest_chain = 0;
total_chain = 0;
chain_ovfl = false;
propagate_necessity (el);
BITMAP_FREE (visited);
something_changed |= eliminate_unnecessary_stmts ();
something_changed |= cfg_altered;

View File

@ -134,10 +134,6 @@ static VEC(expr_hash_elt_t,heap) *avail_exprs_stack;
expressions are removed from AVAIL_EXPRS. Else we may change the
hash code for an expression and be unable to find/remove it from
AVAIL_EXPRS. */
typedef gimple *gimple_p;
DEF_VEC_P(gimple_p);
DEF_VEC_ALLOC_P(gimple_p,heap);
static VEC(gimple_p,heap) *stmts_to_rescan;
/* Structure for entries in the expression hash table. */
@ -1841,7 +1837,7 @@ eliminate_redundant_computations (gimple_stmt_iterator* gsi)
if (! def
|| TREE_CODE (def) != SSA_NAME
|| SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def)
|| !ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF)
|| gimple_vdef (stmt)
/* Do not record equivalences for increments of ivs. This would create
overlapping live ranges for a very questionable gain. */
|| simple_iv_increment_p (stmt))
@ -2021,7 +2017,7 @@ record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
else
new_stmt = gimple_build_assign (rhs, lhs);
create_ssa_artificial_load_stmt (new_stmt, stmt, true);
gimple_set_vuse (new_stmt, gimple_vdef (stmt));
/* Finally enter the statement into the available expression
table. */
@ -2405,7 +2401,6 @@ avail_expr_hash (const void *p)
gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
tree vuse;
ssa_op_iter iter;
hashval_t val = 0;
val = iterative_hash_hashable_expr (expr, val);
@ -2416,11 +2411,11 @@ avail_expr_hash (const void *p)
if (!stmt)
return val;
/* Add the SSA version numbers of every vuse operand. This is important
/* Add the SSA version numbers of the vuse operand. This is important
because compound variables like arrays are not renamed in the
operands. Rather, the rename is done on the virtual variable
representing all the elements of the array. */
FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VUSE)
if ((vuse = gimple_vuse (stmt)))
val = iterative_hash_expr (vuse, val);
return val;
@ -2462,8 +2457,8 @@ avail_expr_eq (const void *p1, const void *p2)
&& types_compatible_p (expr1->type, expr2->type))
{
/* Note that STMT1 and/or STMT2 may be NULL. */
bool ret = compare_ssa_operands_equal (stmt1, stmt2, SSA_OP_VUSE);
return ret;
return ((stmt1 ? gimple_vuse (stmt1) : NULL_TREE)
== (stmt2 ? gimple_vuse (stmt2) : NULL_TREE));
}
return false;

View File

@ -84,13 +84,6 @@ struct dse_block_local_data
bitmap stores;
};
/* Basic blocks of the potentially dead store and the following
store, for memory_address_same. */
struct address_walk_data
{
basic_block store1_bb, store2_bb;
};
static bool gate_dse (void);
static unsigned int tree_ssa_dse (void);
static void dse_initialize_block_local_data (struct dom_walk_data *,
@ -151,218 +144,112 @@ dse_initialize_block_local_data (struct dom_walk_data *walk_data,
}
}
/* Helper function for memory_address_same via walk_tree. Returns
non-NULL if it finds an SSA_NAME which is part of the address,
such that the definition of the SSA_NAME post-dominates the store
we want to delete but not the store that we believe makes it
redundant. This indicates that the address may change between
the two stores. */
static tree
memory_ssa_name_same (tree *expr_p, int *walk_subtrees ATTRIBUTE_UNUSED,
void *data)
{
struct address_walk_data *walk_data = (struct address_walk_data *) data;
tree expr = *expr_p;
gimple def_stmt;
basic_block def_bb;
if (TREE_CODE (expr) != SSA_NAME)
return NULL_TREE;
/* If we've found a default definition, then there's no problem. Both
stores will post-dominate it. And def_bb will be NULL. */
if (SSA_NAME_IS_DEFAULT_DEF (expr))
return NULL_TREE;
def_stmt = SSA_NAME_DEF_STMT (expr);
def_bb = gimple_bb (def_stmt);
/* DEF_STMT must dominate both stores. So if it is in the same
basic block as one, it does not post-dominate that store. */
if (walk_data->store1_bb != def_bb
&& dominated_by_p (CDI_POST_DOMINATORS, walk_data->store1_bb, def_bb))
{
if (walk_data->store2_bb == def_bb
|| !dominated_by_p (CDI_POST_DOMINATORS, walk_data->store2_bb,
def_bb))
/* Return non-NULL to stop the walk. */
return *expr_p;
}
return NULL_TREE;
}
/* Return TRUE if the destination memory address in STORE1 and STORE2
might be modified after STORE1, before control reaches STORE2. */
static bool
memory_address_same (gimple store1, gimple store2)
{
struct address_walk_data walk_data;
walk_data.store1_bb = gimple_bb (store1);
walk_data.store2_bb = gimple_bb (store2);
return (walk_tree (gimple_assign_lhs_ptr (store1), memory_ssa_name_same,
&walk_data, NULL)
== NULL);
}
/* Return true if there is a stmt that kills the lhs of STMT and is in the
virtual def-use chain of STMT without a use in between the kill and STMT.
Returns false if no such stmt is found.
*FIRST_USE_P is set to the first use of the single virtual def of
STMT. *USE_P is set to the vop killed by *USE_STMT. */
static bool
get_kill_of_stmt_lhs (gimple stmt,
use_operand_p * first_use_p,
use_operand_p * use_p, gimple * use_stmt)
{
tree lhs;
gcc_assert (is_gimple_assign (stmt));
lhs = gimple_assign_lhs (stmt);
/* We now walk the chain of single uses of the single VDEFs.
We succeeded finding a kill if the lhs of the use stmt is
equal to the original lhs. We can keep walking to the next
use if there are no possible uses of the original lhs in
the stmt. */
do
{
tree use_lhs;
def_operand_p def_p;
/* The stmt must have a single VDEF. */
def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_VDEF);
if (def_p == NULL_DEF_OPERAND_P)
return false;
/* Get the single immediate use of the def. */
if (!single_imm_use (DEF_FROM_PTR (def_p), first_use_p, &stmt))
return false;
first_use_p = use_p;
/* If there are possible hidden uses, give up. */
if (!gimple_assign_single_p (stmt)
|| (TREE_CODE (gimple_assign_rhs1 (stmt)) != SSA_NAME
&& !is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
return false;
/* If the use stmts lhs matches the original lhs we have
found the kill, otherwise continue walking. */
use_lhs = gimple_assign_lhs (stmt);
if (operand_equal_p (use_lhs, lhs, 0))
{
*use_stmt = stmt;
return true;
}
}
while (1);
}
/* A helper of dse_optimize_stmt.
Given a GIMPLE_ASSIGN in STMT, check that each VDEF has one
use, and that one use is another VDEF clobbering the first one.
Given a GIMPLE_ASSIGN in STMT, find a candidate statement *USE_STMT that
may prove STMT to be dead.
Return TRUE if the above conditions are met, otherwise FALSE. */
static bool
dse_possible_dead_store_p (gimple stmt,
use_operand_p *first_use_p,
use_operand_p *use_p,
gimple *use_stmt,
struct dse_global_data *dse_gd,
struct dse_block_local_data *bd)
dse_possible_dead_store_p (gimple stmt, gimple *use_stmt)
{
ssa_op_iter op_iter;
bool fail = false;
def_operand_p var1;
vuse_vec_p vv;
tree defvar = NULL_TREE;
tree prev_defvar = NULL_TREE;
gimple temp;
unsigned cnt = 0;
/* We want to verify that each virtual definition in STMT has
precisely one use and that all the virtual definitions are
used by the same single statement. When complete, we
want USE_STMT to refer to the one statement which uses
all of the virtual definitions from STMT. */
*use_stmt = NULL;
FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
/* Find the first dominated statement that clobbers (part of) the
memory stmt stores to with no intermediate statement that may use
part of the memory stmt stores. That is, find a store that may
prove stmt to be a dead store. */
temp = stmt;
do
{
defvar = DEF_FROM_PTR (var1);
gimple prev, use_stmt;
imm_use_iterator ui;
bool fail = false;
tree defvar;
/* If this virtual def does not have precisely one use, then
we will not be able to eliminate STMT. */
if (!has_single_use (defvar))
{
fail = true;
break;
}
/* Limit stmt walking to be linear in the number of possibly
dead stores. */
if (++cnt > 256)
return false;
/* Get the one and only immediate use of DEFVAR. */
single_imm_use (defvar, use_p, &temp);
gcc_assert (*use_p != NULL_USE_OPERAND_P);
*first_use_p = *use_p;
/* ??? If we hit a GIMPLE_PHI we could skip to the PHI_RESULT uses.
Don't bother to do that for now. */
if (gimple_code (temp) == GIMPLE_PHI)
defvar = PHI_RESULT (temp);
else
defvar = gimple_vdef (temp);
prev = temp;
temp = NULL;
FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
{
fail = true;
break;
}
cnt++;
/* In the case of memory partitions, we may get:
# MPT.764_162 = VDEF <MPT.764_161(D)>
x = {};
# MPT.764_167 = VDEF <MPT.764_162>
y = {};
So we must make sure we're talking about the same LHS.
*/
if (is_gimple_assign (temp))
{
tree base1 = get_base_address (gimple_assign_lhs (stmt));
tree base2 = get_base_address (gimple_assign_lhs (temp));
while (base1 && INDIRECT_REF_P (base1))
base1 = TREE_OPERAND (base1, 0);
while (base2 && INDIRECT_REF_P (base2))
base2 = TREE_OPERAND (base2, 0);
if (base1 != base2)
/* In simple cases we can look through PHI nodes, but we
have to be careful with loops and with memory references
containing operands that are also operands of PHI nodes.
See gcc.c-torture/execute/20051110-*.c. */
if (gimple_code (use_stmt) == GIMPLE_PHI)
{
if (temp
/* We can look through PHIs to post-dominated regions
without worrying if the use not also dominates prev
(in which case it would be a loop PHI with the use
in a latch block). */
|| gimple_bb (prev) == gimple_bb (use_stmt)
|| !dominated_by_p (CDI_POST_DOMINATORS,
gimple_bb (prev), gimple_bb (use_stmt))
|| dominated_by_p (CDI_DOMINATORS,
gimple_bb (prev), gimple_bb (use_stmt)))
{
fail = true;
BREAK_FROM_IMM_USE_STMT (ui);
}
temp = use_stmt;
}
/* If the statement is a use the store is not dead. */
else if (ref_maybe_used_by_stmt_p (use_stmt,
gimple_assign_lhs (stmt)))
{
fail = true;
break;
BREAK_FROM_IMM_USE_STMT (ui);
}
/* If this is a store, remember it or bail out if we have
multiple ones (the will be in different CFG parts then). */
else if (gimple_vdef (use_stmt))
{
if (temp)
{
fail = true;
BREAK_FROM_IMM_USE_STMT (ui);
}
temp = use_stmt;
}
}
/* If the immediate use of DEF_VAR is not the same as the
previously find immediate uses, then we will not be able
to eliminate STMT. */
if (*use_stmt == NULL)
if (fail)
return false;
/* If we didn't find any definition this means the store is dead
if it isn't a store to global reachable memory. In this case
just pretend the stmt makes itself dead. Otherwise fail. */
if (!temp)
{
*use_stmt = temp;
prev_defvar = defvar;
}
else if (temp != *use_stmt)
{
fail = true;
if (is_hidden_global_store (stmt))
return false;
temp = stmt;
break;
}
}
/* We deliberately stop on clobbering statements and not only on
killing ones to make walking cheaper. Otherwise we can just
continue walking until both stores have equal reference trees. */
while (!stmt_may_clobber_ref_p (temp, gimple_assign_lhs (stmt)));
if (fail)
{
record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
return false;
}
if (!is_gimple_assign (temp))
return false;
*use_stmt = temp;
return true;
}
@ -393,7 +280,7 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
/* If this statement has no virtual defs, then there is nothing
to do. */
if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
if (!gimple_vdef (stmt))
return;
/* We know we have virtual definitions. If this is a GIMPLE_ASSIGN
@ -406,51 +293,21 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
if (is_gimple_assign (stmt))
{
use_operand_p first_use_p = NULL_USE_OPERAND_P;
use_operand_p use_p = NULL;
gimple use_stmt;
if (!dse_possible_dead_store_p (stmt, &first_use_p, &use_p, &use_stmt,
dse_gd, bd))
return;
record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
/* If we have precisely one immediate use at this point, then we may
have found redundant store. Make sure that the stores are to
the same memory location. This includes checking that any
SSA-form variables in the address will have the same values. */
if (use_p != NULL_USE_OPERAND_P
&& bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
&& !operand_equal_p (gimple_assign_lhs (stmt),
gimple_assign_lhs (use_stmt), 0)
&& memory_address_same (stmt, use_stmt))
{
/* If we have precisely one immediate use at this point, but
the stores are not to the same memory location then walk the
virtual def-use chain to get the stmt which stores to that same
memory location. */
if (!get_kill_of_stmt_lhs (stmt, &first_use_p, &use_p, &use_stmt))
{
record_voperand_set (dse_gd->stores, &bd->stores,
gimple_uid (stmt));
return;
}
}
if (!dse_possible_dead_store_p (stmt, &use_stmt))
return;
/* If we have precisely one immediate use at this point and the
stores are to the same memory location or there is a chain of
virtual uses from stmt and the stmt which stores to that same
memory location, then we may have found redundant store. */
if (use_p != NULL_USE_OPERAND_P
&& bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
if (bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
&& operand_equal_p (gimple_assign_lhs (stmt),
gimple_assign_lhs (use_stmt), 0)
&& memory_address_same (stmt, use_stmt))
gimple_assign_lhs (use_stmt), 0))
{
ssa_op_iter op_iter;
def_operand_p var1;
vuse_vec_p vv;
tree stmt_lhs;
/* If use_stmt is or might be a nop assignment, e.g. for
struct { ... } S a, b, *p; ...
b = a; b = b;
@ -462,14 +319,14 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
*p = *u; *p = *v; where p might be v, then USE_STMT
acts as a use as well as definition, so store in STMT
is not dead. */
if (gimple_loaded_syms (use_stmt)
&& bitmap_intersect_p (gimple_loaded_syms (use_stmt),
gimple_stored_syms (use_stmt)))
{
record_voperand_set (dse_gd->stores, &bd->stores,
gimple_uid (stmt));
return;
}
if (stmt != use_stmt
&& !is_gimple_reg (gimple_assign_rhs1 (use_stmt))
&& !is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))
/* ??? Should {} be invariant? */
&& gimple_assign_rhs_code (use_stmt) != CONSTRUCTOR
&& refs_may_alias_p (gimple_assign_lhs (use_stmt),
gimple_assign_rhs1 (use_stmt)))
return;
if (dump_file && (dump_flags & TDF_DETAILS))
{
@ -479,21 +336,7 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
}
/* Then we need to fix the operand of the consuming stmt. */
stmt_lhs = USE_FROM_PTR (first_use_p);
FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
{
tree usevar;
gimple temp;
single_imm_use (DEF_FROM_PTR (var1), &use_p, &temp);
gcc_assert (VUSE_VECT_NUM_ELEM (*vv) == 1);
usevar = VUSE_ELEMENT_VAR (*vv, 0);
SET_USE (use_p, usevar);
/* Make sure we propagate the ABNORMAL bit setting. */
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (stmt_lhs))
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (usevar) = 1;
}
unlink_stmt_vdef (stmt);
/* Remove the dead store. */
gsi_remove (&gsi, true);
@ -502,8 +345,6 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
SSA_NAME manager. */
release_defs (stmt);
}
record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
}
}
@ -564,6 +405,7 @@ tree_ssa_dse (void)
this pass could be seen as an extension of DCE which needs post
dominators. */
calculate_dominance_info (CDI_POST_DOMINATORS);
calculate_dominance_info (CDI_DOMINATORS);
/* Dead store elimination is fundamentally a walk of the post-dominator
tree and a backwards walk of statements within each block. */
@ -630,153 +472,3 @@ struct gimple_opt_pass pass_dse =
}
};
/* A very simple dead store pass eliminating write only local variables.
The pass does not require alias information and thus can be run before
inlining to quickly eliminate artifacts of some common C++ constructs. */
static unsigned int
execute_simple_dse (void)
{
gimple_stmt_iterator gsi;
basic_block bb;
bitmap variables_loaded = BITMAP_ALLOC (NULL);
unsigned int todo = 0;
/* Collect into VARIABLES LOADED all variables that are read in function
body. */
FOR_EACH_BB (bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
if (gimple_loaded_syms (gsi_stmt (gsi)))
bitmap_ior_into (variables_loaded,
gimple_loaded_syms (gsi_stmt (gsi)));
/* Look for statements writing into the write only variables.
And try to remove them. */
FOR_EACH_BB (bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
gimple stmt = gsi_stmt (gsi);
tree op;
bool removed = false;
ssa_op_iter iter;
tree size;
if (is_gimple_assign (stmt)
&& AGGREGATE_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt)))
&& (size = lang_hooks.expr_size (gimple_assign_lhs (stmt)))
&& integer_zerop (size))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " Deleted zero-sized store '");
print_gimple_stmt (dump_file, stmt, 0, dump_flags);
fprintf (dump_file, "'\n");
}
removed = true;
gsi_remove (&gsi, true);
todo |= TODO_cleanup_cfg;
}
else if (gimple_stored_syms (stmt)
&& !bitmap_empty_p (gimple_stored_syms (stmt))
&& (is_gimple_assign (stmt)
|| (is_gimple_call (stmt)
&& gimple_call_lhs (stmt)))
&& !bitmap_intersect_p (gimple_stored_syms (stmt),
variables_loaded))
{
unsigned int i;
bitmap_iterator bi;
bool dead = true;
/* See if STMT only stores to write-only variables and
verify that there are no volatile operands. tree-ssa-operands
sets has_volatile_ops flag for all statements involving
reads and writes when aliases are not built to prevent passes
from removing them as dead. The flag thus has no use for us
and we need to look into all operands. */
EXECUTE_IF_SET_IN_BITMAP (gimple_stored_syms (stmt), 0, i, bi)
{
tree var = referenced_var_lookup (i);
if (TREE_ADDRESSABLE (var)
|| is_global_var (var)
|| TREE_THIS_VOLATILE (var))
dead = false;
}
if (dead && gimple_loaded_syms (stmt))
EXECUTE_IF_SET_IN_BITMAP (gimple_loaded_syms (stmt), 0, i, bi)
if (TREE_THIS_VOLATILE (referenced_var_lookup (i)))
dead = false;
if (dead)
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_OPERANDS)
if (TREE_THIS_VOLATILE (op))
dead = false;
/* Look for possible occurrence var = indirect_ref (...) where
indirect_ref itself is volatile. */
if (dead && is_gimple_assign (stmt)
&& TREE_THIS_VOLATILE (gimple_assign_rhs1 (stmt)))
dead = false;
if (dead)
{
/* When LHS of var = call (); is dead, simplify it into
call (); saving one operand. */
if (is_gimple_call (stmt)
&& gimple_has_side_effects (stmt))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Deleted LHS of call: ");
print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
fprintf (dump_file, "\n");
}
push_stmt_changes (gsi_stmt_ptr (&gsi));
gimple_call_set_lhs (stmt, NULL);
pop_stmt_changes (gsi_stmt_ptr (&gsi));
}
else
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " Deleted dead store '");
print_gimple_stmt (dump_file, stmt, 0, dump_flags);
fprintf (dump_file, "'\n");
}
removed = true;
gsi_remove (&gsi, true);
todo |= TODO_cleanup_cfg;
}
todo |= TODO_remove_unused_locals | TODO_ggc_collect;
}
}
if (!removed)
gsi_next (&gsi);
}
BITMAP_FREE (variables_loaded);
return todo;
}
struct gimple_opt_pass pass_simple_dse =
{
{
GIMPLE_PASS,
"sdse", /* name */
NULL, /* gate */
execute_simple_dse, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
0, /* tv_id */
PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func /* todo_flags_finish */
}
};

View File

@ -594,8 +594,6 @@ tidy_after_forward_propagate_addr (gimple stmt)
if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
mark_symbols_for_renaming (stmt);
}
/* DEF_RHS contains the address of the 0th element in an array.
@ -930,18 +928,24 @@ forward_propagate_addr_expr (tree name, tree rhs)
continue;
}
push_stmt_changes (&use_stmt);
{
gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
push_stmt_changes (&use_stmt);
result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
single_use_p);
use_stmt = gsi_stmt (gsi);
/* If the use has moved to a different statement adjust
the update machinery. */
if (use_stmt != gsi_stmt (gsi))
{
pop_stmt_changes (&use_stmt);
use_stmt = gsi_stmt (gsi);
update_stmt (use_stmt);
}
else
pop_stmt_changes (&use_stmt);
}
all &= result;
pop_stmt_changes (&use_stmt);
/* Remove intermediate now unused copy and conversion chains. */
use_rhs = gimple_assign_rhs1 (use_stmt);
if (result

View File

@ -108,7 +108,7 @@ bb_no_side_effects_p (basic_block bb)
gimple stmt = gsi_stmt (gsi);
if (gimple_has_volatile_ops (stmt)
|| !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
|| gimple_vuse (stmt))
return false;
}

View File

@ -797,11 +797,9 @@ remove_unused_locals (void)
pass is performed. */
FOR_EACH_REFERENCED_VAR (t, rvi)
if (!is_global_var (t)
&& !MTAG_P (t)
&& TREE_CODE (t) != PARM_DECL
&& TREE_CODE (t) != RESULT_DECL
&& !(ann = var_ann (t))->used
&& !ann->symbol_mem_tag
&& !TREE_ADDRESSABLE (t)
&& (optimize || DECL_ARTIFICIAL (t)))
remove_referenced_var (t);

View File

@ -362,7 +362,7 @@ movement_possibility (gimple stmt)
if (gimple_get_lhs (stmt) == NULL_TREE)
return MOVE_IMPOSSIBLE;
if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
if (gimple_vdef (stmt))
return MOVE_IMPOSSIBLE;
if (stmt_ends_bb_p (stmt)
@ -681,7 +681,7 @@ determine_max_movement (gimple stmt, bool must_preserve_exec)
if (!add_dependency (val, lim_data, loop, true))
return false;
if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES))
if (gimple_vuse (stmt))
{
mem_ref_p ref = mem_ref_in_stmt (stmt);
@ -694,7 +694,7 @@ determine_max_movement (gimple stmt, bool must_preserve_exec)
}
else
{
FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
if ((val = gimple_vuse (stmt)) != NULL_TREE)
{
if (!add_dependency (val, lim_data, loop, false))
return false;
@ -1080,7 +1080,7 @@ move_computations (void)
fini_walk_dominator_tree (&walk_data);
gsi_commit_edge_inserts ();
if (need_ssa_update_p ())
if (need_ssa_update_p (cfun))
rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
}
@ -1309,13 +1309,12 @@ gather_mem_refs_stmt (struct loop *loop, gimple stmt)
hashval_t hash;
PTR *slot;
mem_ref_p ref;
ssa_op_iter oi;
tree vname;
bool is_stored;
bitmap clvops;
unsigned id;
if (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
if (!gimple_vuse (stmt))
return;
mem = simple_mem_ref_in_stmt (stmt, &is_stored);
@ -1347,14 +1346,14 @@ gather_mem_refs_stmt (struct loop *loop, gimple stmt)
if (is_stored)
mark_ref_stored (ref, loop);
FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
if ((vname = gimple_vuse (stmt)) != NULL_TREE)
bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
record_mem_ref_loc (ref, loop, stmt, mem);
return;
fail:
clvops = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num);
FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
if ((vname = gimple_vuse (stmt)) != NULL_TREE)
bitmap_set_bit (clvops, DECL_UID (SSA_NAME_VAR (vname)));
}

View File

@ -442,7 +442,7 @@ empty_loop_p (struct loop *loop)
{
gimple stmt = gsi_stmt (gsi);
if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS)
if (gimple_vdef (stmt)
|| gimple_has_volatile_ops (stmt))
{
free (body);

View File

@ -5222,63 +5222,6 @@ unshare_and_remove_ssa_names (tree ref)
return ref;
}
/* Extract the alias analysis info for the memory reference REF. There are
several ways how this information may be stored and what precisely is
its semantics depending on the type of the reference, but there always is
somewhere hidden one _DECL node that is used to determine the set of
virtual operands for the reference. The code below deciphers this jungle
and extracts this single useful piece of information. */
static tree
get_ref_tag (tree ref, tree orig)
{
tree var = get_base_address (ref);
tree aref = NULL_TREE, tag, sv;
HOST_WIDE_INT offset, size, maxsize;
for (sv = orig; handled_component_p (sv); sv = TREE_OPERAND (sv, 0))
{
aref = get_ref_base_and_extent (sv, &offset, &size, &maxsize);
if (ref)
break;
}
if (!var)
return NULL_TREE;
if (TREE_CODE (var) == INDIRECT_REF)
{
/* If the base is a dereference of a pointer, first check its name memory
tag. If it does not have one, use its symbol memory tag. */
var = TREE_OPERAND (var, 0);
if (TREE_CODE (var) != SSA_NAME)
return NULL_TREE;
if (SSA_NAME_PTR_INFO (var))
{
tag = SSA_NAME_PTR_INFO (var)->name_mem_tag;
if (tag)
return tag;
}
var = SSA_NAME_VAR (var);
tag = symbol_mem_tag (var);
gcc_assert (tag != NULL_TREE);
return tag;
}
else
{
if (!DECL_P (var))
return NULL_TREE;
tag = symbol_mem_tag (var);
if (tag)
return tag;
return var;
}
}
/* Copies the reference information from OLD_REF to NEW_REF. */
static void
@ -5287,10 +5230,7 @@ copy_ref_info (tree new_ref, tree old_ref)
if (TREE_CODE (old_ref) == TARGET_MEM_REF)
copy_mem_ref_info (new_ref, old_ref);
else
{
TMR_ORIGINAL (new_ref) = unshare_and_remove_ssa_names (old_ref);
TMR_TAG (new_ref) = get_ref_tag (old_ref, TMR_ORIGINAL (new_ref));
}
TMR_ORIGINAL (new_ref) = unshare_and_remove_ssa_names (old_ref);
}
/* Rewrites USE (address that is an iv) using candidate CAND. */

View File

@ -1993,17 +1993,13 @@ chain_of_csts_start (struct loop *loop, tree x)
code = gimple_assign_rhs_code (stmt);
if (gimple_references_memory_p (stmt)
/* Before alias information is computed, operand scanning marks
statements that write memory volatile. However, the statements
that only read memory are not marked, thus gimple_references_memory_p
returns false for them. */
|| TREE_CODE_CLASS (code) == tcc_reference
|| TREE_CODE_CLASS (code) == tcc_declaration
|| SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF) == NULL_DEF_OPERAND_P)
|| (code == ADDR_EXPR
&& !is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
return NULL;
use = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
if (use == NULL_USE_OPERAND_P)
if (use == NULL_TREE)
return NULL;
return chain_of_csts_start (loop, use);

View File

@ -502,7 +502,7 @@ gather_memory_references (struct loop *loop, bool *no_other_refs)
if (gimple_code (stmt) != GIMPLE_ASSIGN)
{
if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)
if (gimple_vuse (stmt)
|| (is_gimple_call (stmt)
&& !(gimple_call_flags (stmt) & ECF_CONST)))
*no_other_refs = false;

Some files were not shown because too many files have changed in this diff Show More