Eliminate ENTRY_BLOCK_PTR and EXIT_BLOCK_PTR macros
gcc/ * basic-block.h (ENTRY_BLOCK_PTR_FOR_FUNCTION): Rename macro to... (EXIT_BLOCK_PTR_FOR_FUNCTION): ...this. (ENTRY_BLOCK_PTR_FOR_FN): Renamed macro to... (EXIT_BLOCK_PTR_FOR_FN): ...this. (ENTRY_BLOCK_PTR): Eliminate macro as work towards making uses of cfun be explicit. (EXIT_BLOCK_PTR): Likewise. (FOR_ALL_BB): Rework for now to eliminate use of "ENTRY_BLOCK_PTR". (FOR_ALL_BB_FN): Update for renaming of "ENTRY_BLOCK_PTR_FOR_FUNCTION" to "ENTRY_BLOCK_PTR_FOR_FN". * cfg.c (init_flow): Likewise. (check_bb_profile): Likewise. * cfganal.c (pre_and_rev_post_order_compute_fn): Likewise. * cfgcleanup.c (walk_to_nondebug_insn): Likewise. * cfghooks.c (account_profile_record): Likewise. * cfgloop.c (init_loops_structure): Likewise. * cgraphbuild.c (record_eh_tables): Likewise. (compute_call_stmt_bb_frequency): Likewise. * ipa-inline-analysis.c (compute_bb_predicates): Likewise. * lto-streamer-in.c (input_cfg): Likewise. * predict.c (maybe_hot_frequency_p): Likewise. * tree-cfg.c (init_empty_tree_cfg_for_function): Likewise. * tree-inline.c (initialize_cfun): Likewise. (copy_cfg_body): Likewise. (copy_body): Likewise. (tree_function_versioning): Likewise. * bb-reorder.c (add_labels_and_missing_jumps): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (duplicate_computed_gotos): Remove usage of EXIT_BLOCK_PTR macro. (find_rarely_executed_basic_blocks_and_crossing_edges): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (connect_traces): Likewise. (rest_of_handle_reorder_blocks): Remove usage of EXIT_BLOCK_PTR macro. (bb_to_key): Remove usage of ENTRY_BLOCK_PTR macro. (fix_crossing_conditional_branches): Remove usage of EXIT_BLOCK_PTR macro. (find_traces_1_round): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (fix_up_fall_thru_edges): Remove usage of EXIT_BLOCK_PTR macro. (find_traces): Remove usage of ENTRY_BLOCK_PTR macro. (fix_up_crossing_landing_pad): Remove usage of EXIT_BLOCK_PTR macro. (rotate_loop): Likewise. * bt-load.c (migrate_btr_def): Remove usage of ENTRY_BLOCK_PTR macro. * cfg.c (clear_aux_for_edges): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (alloc_aux_for_edges): Likewise. (clear_bb_flags): Remove usage of ENTRY_BLOCK_PTR macro. (cached_make_edge): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (compact_blocks): Likewise. (clear_edges): Likewise. * cfganal.c (single_pred_before_succ_order): Remove usage of ENTRY_BLOCK_PTR macro. (bitmap_union_of_succs): Remove usage of EXIT_BLOCK_PTR macro. (bitmap_union_of_preds): Remove usage of ENTRY_BLOCK_PTR macro. (bitmap_intersection_of_succs): Remove usage of EXIT_BLOCK_PTR macro. (bitmap_intersection_of_preds): Remove usage of ENTRY_BLOCK_PTR macro. (inverted_post_order_compute): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (compute_dominance_frontiers_1): Remove usage of ENTRY_BLOCK_PTR macro. (post_order_compute): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (connect_infinite_loops_to_exit): Remove usage of EXIT_BLOCK_PTR macro. (remove_fake_edges): Remove usage of ENTRY_BLOCK_PTR macro. (add_noreturn_fake_exit_edges): Remove usage of EXIT_BLOCK_PTR macro. (find_pdom): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (remove_fake_exit_edges): Remove usage of EXIT_BLOCK_PTR macro. (verify_edge_list): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (print_edge_list): Likewise. (create_edge_list): Likewise. (find_unreachable_blocks): Remove usage of ENTRY_BLOCK_PTR macro. (mark_dfs_back_edges): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. * cfgbuild.c (find_bb_boundaries): Remove usage of ENTRY_BLOCK_PTR macro. (find_many_sub_basic_blocks): Remove usage of EXIT_BLOCK_PTR macro. (make_edges): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. * cfgcleanup.c (delete_unreachable_blocks): Likewise. (try_optimize_cfg): Likewise. (try_head_merge_bb): Remove usage of EXIT_BLOCK_PTR macro. (try_crossjump_to_edge): Remove usage of ENTRY_BLOCK_PTR macro. (try_crossjump_bb): Remove usage of EXIT_BLOCK_PTR macro. (merge_blocks_move): Remove usage of ENTRY_BLOCK_PTR macro. (outgoing_edges_match): Remove usage of EXIT_BLOCK_PTR macro. (try_forward_edges): Likewise. (try_simplify_condjump): Likewise. * cfgexpand.c (gimple_expand_cfg): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (construct_exit_block): Remove usage of EXIT_BLOCK_PTR macro. (construct_init_block): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (expand_gimple_basic_block): Remove usage of EXIT_BLOCK_PTR macro. (expand_gimple_tailcall): Likewise. * cfghooks.c (can_duplicate_block_p): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (tidy_fallthru_edges): Likewise. (verify_flow_info): Likewise. * cfgloop.c (flow_bb_inside_loop_p): Likewise. (num_loop_branches): Remove usage of EXIT_BLOCK_PTR macro. (disambiguate_multiple_latches): Remove usage of ENTRY_BLOCK_PTR macro. (get_loop_exit_edges): Remove usage of EXIT_BLOCK_PTR macro. (bb_loop_header_p): Remove usage of ENTRY_BLOCK_PTR macro. (get_loop_body_in_bfs_order): Remove usage of EXIT_BLOCK_PTR macro. (get_loop_body_in_dom_order): Likewise. (get_loop_body): Likewise. * cfgloopanal.c (mark_irreducible_loops): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. * cfgloopmanip.c (create_preheader): Remove usage of ENTRY_BLOCK_PTR macro. (remove_path): Remove usage of EXIT_BLOCK_PTR macro. (fix_bb_placement): Likewise. * cfgrtl.c (rtl_block_empty_p): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (rtl_can_remove_branch_p): Remove usage of EXIT_BLOCK_PTR macro. (cfg_layout_split_edge): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (rtl_flow_call_edges_add): Remove usage of EXIT_BLOCK_PTR macro. (cfg_layout_can_merge_blocks_p): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (cfg_layout_redirect_edge_and_branch): Remove usage of ENTRY_BLOCK_PTR macro. (fixup_fallthru_exit_predecessor): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (fixup_reorder_chain): Likewise. (relink_block_chain): Likewise. (cfg_layout_delete_block): Remove usage of EXIT_BLOCK_PTR macro. (rtl_verify_bb_layout): Remove usage of ENTRY_BLOCK_PTR macro. (cfg_layout_duplicate_bb): Remove usage of EXIT_BLOCK_PTR macro. (force_one_exit_fallthru): Likewise. (rtl_verify_fallthru): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (rtl_verify_edges): Likewise. (commit_edge_insertions): Likewise. (commit_one_edge_insertion): Likewise. (rtl_split_edge): Likewise. (force_nonfallthru_and_redirect): Likewise. (outof_cfg_layout_mode): Remove usage of EXIT_BLOCK_PTR macro. (skip_insns_after_block): Likewise. (fixup_partition_crossing): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (purge_dead_edges): Remove usage of EXIT_BLOCK_PTR macro. (rtl_can_merge_blocks): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (contains_no_active_insn_p): Likewise. (emit_insn_at_entry): Remove usage of ENTRY_BLOCK_PTR macro. (entry_of_function): Likewise. (last_bb_in_partition): Remove usage of EXIT_BLOCK_PTR macro. (fixup_new_cold_bb): Likewise. (patch_jump_insn): Likewise. (try_redirect_by_replacing_jump): Likewise. (block_label): Likewise. (could_fall_through): Likewise. (can_fallthru): Likewise. * cgraphbuild.c (cgraph_rebuild_references): Remove usage of ENTRY_BLOCK_PTR macro. (rebuild_cgraph_edges): Likewise. * cgraphunit.c (init_lowered_empty_function): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (expand_thunk): Remove usage of EXIT_BLOCK_PTR macro. * combine.c (get_last_value): Remove usage of ENTRY_BLOCK_PTR macro. (distribute_links): Remove usage of EXIT_BLOCK_PTR macro. (get_last_value_validate): Remove usage of ENTRY_BLOCK_PTR macro. (try_combine): Remove usage of EXIT_BLOCK_PTR macro. (reg_num_sign_bit_copies_for_combine): Remove usage of ENTRY_BLOCK_PTR macro. (reg_nonzero_bits_for_combine): Likewise. (set_nonzero_bits_and_sign_copies): Likewise. (combine_instructions): Likewise. * cprop.c (one_cprop_pass): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (bypass_conditional_jumps): Likewise. (bypass_block): Remove usage of EXIT_BLOCK_PTR macro. (find_implicit_sets): Likewise. (cprop_jump): Likewise. * cse.c (cse_cc_succs): Likewise. (cse_find_path): Likewise. * df-problems.c (df_lr_confluence_0): Likewise. * df-scan.c (df_entry_block_defs_collect): Remove usage of ENTRY_BLOCK_PTR macro. (df_exit_block_uses_collect): Remove usage of EXIT_BLOCK_PTR macro. * dominance.c (iterate_fix_dominators): Remove usage of ENTRY_BLOCK_PTR macro. (calc_idoms): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (determine_dominators_for_sons): Remove usage of ENTRY_BLOCK_PTR macro. (calc_dfs_tree): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (prune_bbs_to_update_dominators): Remove usage of ENTRY_BLOCK_PTR macro. (calc_dfs_tree_nonrec): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. * domwalk.c (cmp_bb_postorder): Likewise. * dse.c (dse_step1): Remove usage of EXIT_BLOCK_PTR macro. * except.c (finish_eh_generation): Remove usage of ENTRY_BLOCK_PTR macro. (sjlj_emit_function_enter): Likewise. * final.c (compute_alignments): Likewise. * function.c (thread_prologue_and_epilogue_insns): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (reposition_prologue_and_epilogue_notes): Remove usage of EXIT_BLOCK_PTR macro. (convert_jumps_to_returns): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (regno_clobbered_at_setjmp): Remove usage of ENTRY_BLOCK_PTR macro. (next_block_for_reg): Remove usage of EXIT_BLOCK_PTR macro. * gcse.c (hoist_code): Remove usage of ENTRY_BLOCK_PTR macro. (update_bb_reg_pressure): Remove usage of EXIT_BLOCK_PTR macro. (compute_code_hoist_vbeinout): Likewise. (should_hoist_expr_to_dom): Remove usage of ENTRY_BLOCK_PTR macro. (pre_expr_reaches_here_p_work): Likewise. * gimple-iterator.c (gsi_commit_edge_inserts): Likewise. (gimple_find_edge_insert_loc): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. * gimple-ssa-strength-reduction.c (slsr_process_phi): Remove usage of ENTRY_BLOCK_PTR macro. * graph.c (draw_cfg_nodes_for_loop): Remove usage of EXIT_BLOCK_PTR macro. * graphite-clast-to-gimple.c (translate_clast_user): Remove usage of ENTRY_BLOCK_PTR macro. * graphite-scop-detection.c (build_scops): Likewise. (create_sese_edges): Remove usage of EXIT_BLOCK_PTR macro. (scopdet_basic_block_info): Remove usage of ENTRY_BLOCK_PTR macro. * haifa-sched.c (restore_bb_notes): Remove usage of EXIT_BLOCK_PTR macro. (unlink_bb_notes): Likewise. (create_check_block_twin): Likewise. (init_before_recovery): Likewise. (sched_extend_bb): Likewise. (priority): Likewise. * hw-doloop.c (reorder_loops): Likewise. (discover_loop): Likewise. * ifcvt.c (dead_or_predicable): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (find_if_case_1): Remove usage of EXIT_BLOCK_PTR macro. (block_has_only_trap): Likewise. (cond_exec_find_if_block): Likewise. (merge_if_block): Likewise. * ipa-inline-analysis.c (param_change_prob): Remove usage of ENTRY_BLOCK_PTR macro. (record_modified): Likewise. * ipa-pure-const.c (execute_warn_function_noreturn): Remove usage of EXIT_BLOCK_PTR macro. (local_pure_const): Likewise. * ipa-split.c (split_function): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (find_split_points): Likewise. (consider_split): Likewise. (find_return_bb): Remove usage of EXIT_BLOCK_PTR macro. (verify_non_ssa_vars): Remove usage of ENTRY_BLOCK_PTR macro. * ira-build.c (ira_loop_tree_body_rev_postorder): Likewise. * ira-color.c (print_loop_title): Remove usage of EXIT_BLOCK_PTR macro. * ira-emit.c (entered_from_non_parent_p): Remove usage of ENTRY_BLOCK_PTR macro. (ira_emit): Remove usage of EXIT_BLOCK_PTR macro. * ira-int.h (ira_assert): Remove usage of ENTRY_BLOCK_PTR macro. * ira.c (split_live_ranges_for_shrink_wrap): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. * lcm.c (compute_rev_insert_delete): Remove usage of ENTRY_BLOCK_PTR macro. (compute_nearerout): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (compute_farthest): Likewise. (compute_available): Likewise. (compute_insert_delete): Remove usage of EXIT_BLOCK_PTR macro. (compute_laterin): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (compute_earliest): Likewise. (compute_antinout_edge): Likewise. * loop-iv.c (simplify_using_initial_values): Remove usage of ENTRY_BLOCK_PTR macro. * loop-unswitch.c (unswitch_loop): Remove usage of EXIT_BLOCK_PTR macro. * lra-assigns.c (find_hard_regno_for): Remove usage of ENTRY_BLOCK_PTR macro. * lra-constraints.c (lra_inheritance): Remove usage of EXIT_BLOCK_PTR macro. * lra-lives.c (lra_create_live_ranges): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. * lra.c (has_nonexceptional_receiver): Remove usage of EXIT_BLOCK_PTR macro. * lto-streamer-in.c (input_function): Remove usage of ENTRY_BLOCK_PTR macro. * lto-streamer-out.c (output_cfg): Likewise. * mcf.c (adjust_cfg_counts): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (create_fixup_graph): Remove usage of ENTRY_BLOCK_PTR macro. * mode-switching.c (optimize_mode_switching): Likewise. (create_pre_exit): Remove usage of EXIT_BLOCK_PTR macro. * modulo-sched.c (rest_of_handle_sms): Likewise. (canon_loop): Likewise. * omp-low.c (build_omp_regions): Remove usage of ENTRY_BLOCK_PTR macro. * postreload-gcse.c (eliminate_partially_redundant_loads): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. * predict.c (rebuild_frequencies): Remove usage of ENTRY_BLOCK_PTR macro. (propagate_freq): Remove usage of EXIT_BLOCK_PTR macro. (estimate_bb_frequencies): Remove usage of ENTRY_BLOCK_PTR macro. (tree_estimate_probability_bb): Remove usage of EXIT_BLOCK_PTR macro. (expensive_function_p): Remove usage of ENTRY_BLOCK_PTR macro. (tree_bb_level_predictions): Remove usage of EXIT_BLOCK_PTR macro. (counts_to_freqs): Remove usage of ENTRY_BLOCK_PTR macro. (apply_return_prediction): Remove usage of EXIT_BLOCK_PTR macro. (estimate_loops): Remove usage of ENTRY_BLOCK_PTR macro. (gimple_predict_edge): Likewise. (probably_never_executed): Likewise. * profile.c (find_spanning_tree): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (branch_prob): Likewise. (compute_branch_probabilities): Likewise. (compute_frequency_overlap): Remove usage of ENTRY_BLOCK_PTR macro. (is_inconsistent): Remove usage of EXIT_BLOCK_PTR macro. (read_profile_edge_counts): Remove usage of ENTRY_BLOCK_PTR macro. (set_bb_counts): Likewise. (correct_negative_edge_counts): Likewise. (get_exec_counts): Likewise. (instrument_values): Likewise. (instrument_edges): Likewise. * reg-stack.c (convert_regs): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (compensate_edges): Remove usage of ENTRY_BLOCK_PTR macro. (convert_regs_exit): Remove usage of EXIT_BLOCK_PTR macro. (convert_regs_entry): Remove usage of ENTRY_BLOCK_PTR macro. (reg_to_stack): Likewise. * regs.h (REG_N_SETS): Likewise. * reload.c (find_dummy_reload): Likewise. (combine_reloads): Likewise. (push_reload): Likewise. * reload1.c (has_nonexceptional_receiver): Remove usage of EXIT_BLOCK_PTR macro. * resource.c (mark_target_live_regs): Remove usage of ENTRY_BLOCK_PTR macro. (find_basic_block): Likewise. * sched-ebb.c (ebb_add_block): Remove usage of EXIT_BLOCK_PTR macro. (schedule_ebbs): Likewise. * sched-int.h (sel_sched_p): Likewise. * sched-rgn.c (compute_dom_prob_ps): Remove usage of ENTRY_BLOCK_PTR macro. (rgn_add_block): Remove usage of EXIT_BLOCK_PTR macro. (haifa_find_rgns): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (propagate_deps): Remove usage of EXIT_BLOCK_PTR macro. (extend_rgns): Likewise. (find_single_block_region): Likewise. * sel-sched-ir.c (sel_remove_loop_preheader): Remove usage of ENTRY_BLOCK_PTR macro. (setup_nop_and_exit_insns): Remove usage of EXIT_BLOCK_PTR macro. (sel_create_recovery_block): Likewise. (bb_ends_ebb_p): Likewise. (sel_bb_end): Likewise. (sel_bb_head): Likewise. (free_lv_sets): Likewise. (init_lv_sets): Likewise. (tidy_control_flow): Likewise. (maybe_tidy_empty_bb): Likewise. * sel-sched-ir.h (_succ_iter_cond): Likewise. (_succ_iter_start): Likewise. (sel_bb_empty_or_nop_p): Likewise. (get_loop_exit_edges_unique_dests): Likewise. (inner_loop_header_p): Likewise. * sel-sched.c (create_block_for_bookkeeping): Likewise. (find_block_for_bookkeeping): Likewise. * store-motion.c (remove_reachable_equiv_notes): Likewise. (insert_store): Likewise. * trans-mem.c (ipa_tm_transform_clone): Remove usage of ENTRY_BLOCK_PTR macro. (tm_memopt_compute_available): Remove usage of EXIT_BLOCK_PTR macro. (ipa_tm_scan_irr_function): Remove usage of ENTRY_BLOCK_PTR macro. (gate_tm_init): Likewise. (tm_region_init): Likewise. * tree-cfg.c (execute_fixup_cfg): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (execute_warn_function_return): Remove usage of EXIT_BLOCK_PTR macro. (split_critical_edges): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (print_loops): Remove usage of ENTRY_BLOCK_PTR macro. (move_sese_region_to_fn): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (gimple_redirect_edge_and_branch): Remove usage of ENTRY_BLOCK_PTR macro. (gimple_verify_flow_info): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (remove_edge_and_dominated_blocks): Remove usage of EXIT_BLOCK_PTR macro. (make_edges): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (gimple_flow_call_edges_add): Remove usage of EXIT_BLOCK_PTR macro. (make_blocks): Remove usage of ENTRY_BLOCK_PTR macro. (build_gimple_cfg): Likewise. (gimple_duplicate_bb): Remove usage of EXIT_BLOCK_PTR macro. (gimple_can_merge_blocks_p): Likewise. * tree-cfgcleanup.c (tree_forwarder_block_p): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. * tree-complex.c (update_parameter_components): Remove usage of ENTRY_BLOCK_PTR macro. * tree-if-conv.c (get_loop_body_in_if_conv_order): Remove usage of EXIT_BLOCK_PTR macro. * tree-inline.c (tree_function_versioning): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (delete_unreachable_blocks_update_callgraph): Likewise. (initialize_cfun): Likewise. (copy_cfg_body): Remove usage of ENTRY_BLOCK_PTR macro. (copy_edges_for_bb): Remove usage of EXIT_BLOCK_PTR macro. (remap_ssa_name): Remove usage of ENTRY_BLOCK_PTR macro. * tree-into-ssa.c (update_ssa): Likewise. (maybe_register_def): Remove usage of EXIT_BLOCK_PTR macro. (insert_updated_phi_nodes_for): Remove usage of ENTRY_BLOCK_PTR macro. (rewrite_into_ssa): Likewise. (rewrite_debug_stmt_uses): Likewise. * tree-outof-ssa.c (expand_phi_nodes): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. * tree-profile.c (gimple_gen_ic_func_profiler): Remove usage of ENTRY_BLOCK_PTR macro. * tree-scalar-evolution.h (block_before_loop): Likewise. * tree-sra.c (sra_ipa_reset_debug_stmts): Likewise. (dump_dereferences_table): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (analyze_caller_dereference_legality): Remove usage of ENTRY_BLOCK_PTR macro. (propagate_dereference_distances): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (initialize_parameter_reductions): Remove usage of ENTRY_BLOCK_PTR macro. * tree-ssa-ccp.c (gsi_prev_dom_bb_nondebug): Likewise. (optimize_stack_restore): Remove usage of EXIT_BLOCK_PTR macro. * tree-ssa-coalesce.c (create_outofssa_var_map): Likewise. * tree-ssa-dce.c (eliminate_unnecessary_stmts): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (remove_dead_stmt): Remove usage of EXIT_BLOCK_PTR macro. (propagate_necessity): Remove usage of ENTRY_BLOCK_PTR macro. (mark_control_dependent_edges_necessary): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. * tree-ssa-dom.c (eliminate_degenerate_phis): Remove usage of ENTRY_BLOCK_PTR macro. (tree_ssa_dominator_optimize): Remove usage of EXIT_BLOCK_PTR macro. * tree-ssa-live.c (verify_live_on_entry): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (calculate_live_on_exit): Likewise. (set_var_live_on_entry): Remove usage of ENTRY_BLOCK_PTR macro. (loe_visit_block): Likewise. * tree-ssa-live.h (live_on_exit): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (live_on_entry): Likewise. * tree-ssa-loop-ivopts.c (find_interesting_uses): Remove usage of EXIT_BLOCK_PTR macro. * tree-ssa-loop-manip.c (compute_live_loop_exits): Remove usage of ENTRY_BLOCK_PTR macro. * tree-ssa-loop-niter.c (simplify_using_initial_conditions): Likewise. (bound_difference): Likewise. * tree-ssa-loop-prefetch.c (may_use_storent_in_loop_p): Remove usage of EXIT_BLOCK_PTR macro. * tree-ssa-loop-unswitch.c (simplify_using_entry_checks): Remove usage of ENTRY_BLOCK_PTR macro. * tree-ssa-math-opts.c (register_division_in): Likewise. * tree-ssa-phiprop.c (tree_ssa_phiprop): Likewise. * tree-ssa-pre.c (compute_avail): Likewise. (compute_antic): Remove usage of EXIT_BLOCK_PTR macro. (insert): Remove usage of ENTRY_BLOCK_PTR macro. * tree-ssa-propagate.c (ssa_prop_init): Likewise. (simulate_block): Remove usage of EXIT_BLOCK_PTR macro. (cfg_blocks_add): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (add_control_edge): Remove usage of EXIT_BLOCK_PTR macro. * tree-ssa-reassoc.c (do_reassoc): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (build_and_add_sum): Remove usage of ENTRY_BLOCK_PTR macro. * tree-ssa-sink.c (nearest_common_dominator_of_uses): Likewise. (execute_sink_code): Remove usage of EXIT_BLOCK_PTR macro. * tree-ssa-uninit.c (find_dom): Remove usage of ENTRY_BLOCK_PTR macro. (compute_control_dep_chain): Remove usage of EXIT_BLOCK_PTR macro. (find_pdom): Likewise. (warn_uninitialized_vars): Remove usage of ENTRY_BLOCK_PTR macro. * tree-stdarg.c (reachable_at_most_once): Likewise. * tree-tailcall.c (tree_optimize_tail_calls_1): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (eliminate_tail_call): Likewise. * tsan.c (instrument_func_entry): Remove usage of ENTRY_BLOCK_PTR macro. (instrument_func_exit): Remove usage of EXIT_BLOCK_PTR macro. * var-tracking.c (vt_initialize): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. (vt_add_function_parameter): Remove usage of ENTRY_BLOCK_PTR macro. (vt_find_locations): Remove usage of EXIT_BLOCK_PTR macro. (vt_stack_adjustments): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR. * varasm.c (assemble_start_function): Remove usage of ENTRY_BLOCK_PTR macro. * config/bfin/bfin.c (hwloop_optimize): Likewise. * config/nds32/nds32.c (nds32_fp_as_gp_check_available): Remove usage of EXIT_BLOCK_PTR macro. * config/arm/arm.c (require_pic_register): Remove usage of ENTRY_BLOCK_PTR macro. (arm_r3_live_at_start_p): Likewise. (any_sibcall_could_use_r3): Remove usage of EXIT_BLOCK_PTR macro. * config/rs6000/rs6000.c (rs6000_emit_prologue): Likewise. * config/frv/frv.c (frv_optimize_membar_global): Likewise. * config/alpha/alpha.c (alpha_gp_save_rtx): Remove usage of ENTRY_BLOCK_PTR macro. * config/i386/i386.c (ix86_count_insn): Likewise. (ix86_seh_fixup_eh_fallthru): Remove usage of EXIT_BLOCK_PTR macro. (ix86_pad_short_function): Likewise. (ix86_compute_frame_layout): Remove usage of ENTRY_BLOCK_PTR macro. (ix86_pad_returns): Remove usage of EXIT_BLOCK_PTR macro. (ix86_eax_live_at_start_p): Remove usage of ENTRY_BLOCK_PTR macro. (add_condition_to_bb): Remove usage of EXIT_BLOCK_PTR macro. (ix86_expand_epilogue): Likewise. * config/ia64/ia64.c (ia64_asm_unwind_emit): Likewise. (ia64_expand_prologue): Likewise. From-SVN: r205055
This commit is contained in:
parent
faaa3afb19
commit
fefa31b568
517
gcc/ChangeLog
517
gcc/ChangeLog
|
@ -1,3 +1,520 @@
|
|||
2013-11-19 David Malcolm <dmalcolm@redhat.com>
|
||||
|
||||
* basic-block.h (ENTRY_BLOCK_PTR_FOR_FUNCTION): Rename macro to...
|
||||
(EXIT_BLOCK_PTR_FOR_FUNCTION): ...this.
|
||||
(ENTRY_BLOCK_PTR_FOR_FN): Renamed macro to...
|
||||
(EXIT_BLOCK_PTR_FOR_FN): ...this.
|
||||
(ENTRY_BLOCK_PTR): Eliminate macro as work towards making uses of
|
||||
cfun be explicit.
|
||||
(EXIT_BLOCK_PTR): Likewise.
|
||||
(FOR_ALL_BB): Rework for now to eliminate use of "ENTRY_BLOCK_PTR".
|
||||
(FOR_ALL_BB_FN): Update for renaming of
|
||||
"ENTRY_BLOCK_PTR_FOR_FUNCTION" to "ENTRY_BLOCK_PTR_FOR_FN".
|
||||
|
||||
* cfg.c (init_flow): Likewise.
|
||||
(check_bb_profile): Likewise.
|
||||
* cfganal.c (pre_and_rev_post_order_compute_fn): Likewise.
|
||||
* cfgcleanup.c (walk_to_nondebug_insn): Likewise.
|
||||
* cfghooks.c (account_profile_record): Likewise.
|
||||
* cfgloop.c (init_loops_structure): Likewise.
|
||||
* cgraphbuild.c (record_eh_tables): Likewise.
|
||||
(compute_call_stmt_bb_frequency): Likewise.
|
||||
* ipa-inline-analysis.c (compute_bb_predicates): Likewise.
|
||||
* lto-streamer-in.c (input_cfg): Likewise.
|
||||
* predict.c (maybe_hot_frequency_p): Likewise.
|
||||
* tree-cfg.c (init_empty_tree_cfg_for_function): Likewise.
|
||||
* tree-inline.c (initialize_cfun): Likewise.
|
||||
(copy_cfg_body): Likewise.
|
||||
(copy_body): Likewise.
|
||||
(tree_function_versioning): Likewise.
|
||||
|
||||
* bb-reorder.c (add_labels_and_missing_jumps): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(duplicate_computed_gotos): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(find_rarely_executed_basic_blocks_and_crossing_edges): Remove uses of
|
||||
macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(connect_traces): Likewise.
|
||||
(rest_of_handle_reorder_blocks): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(bb_to_key): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(fix_crossing_conditional_branches): Remove usage of EXIT_BLOCK_PTR
|
||||
macro.
|
||||
(find_traces_1_round): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(fix_up_fall_thru_edges): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(find_traces): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(fix_up_crossing_landing_pad): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(rotate_loop): Likewise.
|
||||
* bt-load.c (migrate_btr_def): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
* cfg.c (clear_aux_for_edges): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(alloc_aux_for_edges): Likewise.
|
||||
(clear_bb_flags): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(cached_make_edge): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(compact_blocks): Likewise.
|
||||
(clear_edges): Likewise.
|
||||
* cfganal.c (single_pred_before_succ_order): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
(bitmap_union_of_succs): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(bitmap_union_of_preds): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(bitmap_intersection_of_succs): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(bitmap_intersection_of_preds): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(inverted_post_order_compute): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(compute_dominance_frontiers_1): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(post_order_compute): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(connect_infinite_loops_to_exit): Remove usage of EXIT_BLOCK_PTR
|
||||
macro.
|
||||
(remove_fake_edges): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(add_noreturn_fake_exit_edges): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(find_pdom): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(remove_fake_exit_edges): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(verify_edge_list): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(print_edge_list): Likewise.
|
||||
(create_edge_list): Likewise.
|
||||
(find_unreachable_blocks): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(mark_dfs_back_edges): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
* cfgbuild.c (find_bb_boundaries): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(find_many_sub_basic_blocks): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(make_edges): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
* cfgcleanup.c (delete_unreachable_blocks): Likewise.
|
||||
(try_optimize_cfg): Likewise.
|
||||
(try_head_merge_bb): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(try_crossjump_to_edge): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(try_crossjump_bb): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(merge_blocks_move): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(outgoing_edges_match): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(try_forward_edges): Likewise.
|
||||
(try_simplify_condjump): Likewise.
|
||||
* cfgexpand.c (gimple_expand_cfg): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(construct_exit_block): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(construct_init_block): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(expand_gimple_basic_block): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(expand_gimple_tailcall): Likewise.
|
||||
* cfghooks.c (can_duplicate_block_p): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(tidy_fallthru_edges): Likewise.
|
||||
(verify_flow_info): Likewise.
|
||||
* cfgloop.c (flow_bb_inside_loop_p): Likewise.
|
||||
(num_loop_branches): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(disambiguate_multiple_latches): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(get_loop_exit_edges): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(bb_loop_header_p): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(get_loop_body_in_bfs_order): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(get_loop_body_in_dom_order): Likewise.
|
||||
(get_loop_body): Likewise.
|
||||
* cfgloopanal.c (mark_irreducible_loops): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
* cfgloopmanip.c (create_preheader): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(remove_path): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(fix_bb_placement): Likewise.
|
||||
* cfgrtl.c (rtl_block_empty_p): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(rtl_can_remove_branch_p): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(cfg_layout_split_edge): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(rtl_flow_call_edges_add): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(cfg_layout_can_merge_blocks_p): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(cfg_layout_redirect_edge_and_branch): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(fixup_fallthru_exit_predecessor): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(fixup_reorder_chain): Likewise.
|
||||
(relink_block_chain): Likewise.
|
||||
(cfg_layout_delete_block): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(rtl_verify_bb_layout): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(cfg_layout_duplicate_bb): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(force_one_exit_fallthru): Likewise.
|
||||
(rtl_verify_fallthru): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(rtl_verify_edges): Likewise.
|
||||
(commit_edge_insertions): Likewise.
|
||||
(commit_one_edge_insertion): Likewise.
|
||||
(rtl_split_edge): Likewise.
|
||||
(force_nonfallthru_and_redirect): Likewise.
|
||||
(outof_cfg_layout_mode): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(skip_insns_after_block): Likewise.
|
||||
(fixup_partition_crossing): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(purge_dead_edges): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(rtl_can_merge_blocks): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(contains_no_active_insn_p): Likewise.
|
||||
(emit_insn_at_entry): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(entry_of_function): Likewise.
|
||||
(last_bb_in_partition): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(fixup_new_cold_bb): Likewise.
|
||||
(patch_jump_insn): Likewise.
|
||||
(try_redirect_by_replacing_jump): Likewise.
|
||||
(block_label): Likewise.
|
||||
(could_fall_through): Likewise.
|
||||
(can_fallthru): Likewise.
|
||||
* cgraphbuild.c (cgraph_rebuild_references): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
(rebuild_cgraph_edges): Likewise.
|
||||
* cgraphunit.c (init_lowered_empty_function): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(expand_thunk): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
* combine.c (get_last_value): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(distribute_links): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(get_last_value_validate): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(try_combine): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(reg_num_sign_bit_copies_for_combine): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(reg_nonzero_bits_for_combine): Likewise.
|
||||
(set_nonzero_bits_and_sign_copies): Likewise.
|
||||
(combine_instructions): Likewise.
|
||||
* cprop.c (one_cprop_pass): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(bypass_conditional_jumps): Likewise.
|
||||
(bypass_block): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(find_implicit_sets): Likewise.
|
||||
(cprop_jump): Likewise.
|
||||
* cse.c (cse_cc_succs): Likewise.
|
||||
(cse_find_path): Likewise.
|
||||
* df-problems.c (df_lr_confluence_0): Likewise.
|
||||
* df-scan.c (df_entry_block_defs_collect): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
(df_exit_block_uses_collect): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
* dominance.c (iterate_fix_dominators): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
(calc_idoms): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(determine_dominators_for_sons): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(calc_dfs_tree): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(prune_bbs_to_update_dominators): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(calc_dfs_tree_nonrec): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
* domwalk.c (cmp_bb_postorder): Likewise.
|
||||
* dse.c (dse_step1): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
* except.c (finish_eh_generation): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(sjlj_emit_function_enter): Likewise.
|
||||
* final.c (compute_alignments): Likewise.
|
||||
* function.c (thread_prologue_and_epilogue_insns): Remove uses of
|
||||
macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(reposition_prologue_and_epilogue_notes): Remove usage of
|
||||
EXIT_BLOCK_PTR macro.
|
||||
(convert_jumps_to_returns): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(regno_clobbered_at_setjmp): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(next_block_for_reg): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
* gcse.c (hoist_code): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(update_bb_reg_pressure): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(compute_code_hoist_vbeinout): Likewise.
|
||||
(should_hoist_expr_to_dom): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(pre_expr_reaches_here_p_work): Likewise.
|
||||
* gimple-iterator.c (gsi_commit_edge_inserts): Likewise.
|
||||
(gimple_find_edge_insert_loc): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
* gimple-ssa-strength-reduction.c (slsr_process_phi): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
* graph.c (draw_cfg_nodes_for_loop): Remove usage of EXIT_BLOCK_PTR
|
||||
macro.
|
||||
* graphite-clast-to-gimple.c (translate_clast_user): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
* graphite-scop-detection.c (build_scops): Likewise.
|
||||
(create_sese_edges): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(scopdet_basic_block_info): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
* haifa-sched.c (restore_bb_notes): Remove usage of EXIT_BLOCK_PTR
|
||||
macro.
|
||||
(unlink_bb_notes): Likewise.
|
||||
(create_check_block_twin): Likewise.
|
||||
(init_before_recovery): Likewise.
|
||||
(sched_extend_bb): Likewise.
|
||||
(priority): Likewise.
|
||||
* hw-doloop.c (reorder_loops): Likewise.
|
||||
(discover_loop): Likewise.
|
||||
* ifcvt.c (dead_or_predicable): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(find_if_case_1): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(block_has_only_trap): Likewise.
|
||||
(cond_exec_find_if_block): Likewise.
|
||||
(merge_if_block): Likewise.
|
||||
* ipa-inline-analysis.c (param_change_prob): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
(record_modified): Likewise.
|
||||
* ipa-pure-const.c (execute_warn_function_noreturn): Remove usage of
|
||||
EXIT_BLOCK_PTR macro.
|
||||
(local_pure_const): Likewise.
|
||||
* ipa-split.c (split_function): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(find_split_points): Likewise.
|
||||
(consider_split): Likewise.
|
||||
(find_return_bb): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(verify_non_ssa_vars): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
* ira-build.c (ira_loop_tree_body_rev_postorder): Likewise.
|
||||
* ira-color.c (print_loop_title): Remove usage of EXIT_BLOCK_PTR
|
||||
macro.
|
||||
* ira-emit.c (entered_from_non_parent_p): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
(ira_emit): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
* ira-int.h (ira_assert): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
* ira.c (split_live_ranges_for_shrink_wrap): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
* lcm.c (compute_rev_insert_delete): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(compute_nearerout): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(compute_farthest): Likewise.
|
||||
(compute_available): Likewise.
|
||||
(compute_insert_delete): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(compute_laterin): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(compute_earliest): Likewise.
|
||||
(compute_antinout_edge): Likewise.
|
||||
* loop-iv.c (simplify_using_initial_values): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
* loop-unswitch.c (unswitch_loop): Remove usage of EXIT_BLOCK_PTR
|
||||
macro.
|
||||
* lra-assigns.c (find_hard_regno_for): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
* lra-constraints.c (lra_inheritance): Remove usage of EXIT_BLOCK_PTR
|
||||
macro.
|
||||
* lra-lives.c (lra_create_live_ranges): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
* lra.c (has_nonexceptional_receiver): Remove usage of EXIT_BLOCK_PTR
|
||||
macro.
|
||||
* lto-streamer-in.c (input_function): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
* lto-streamer-out.c (output_cfg): Likewise.
|
||||
* mcf.c (adjust_cfg_counts): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(create_fixup_graph): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
* mode-switching.c (optimize_mode_switching): Likewise.
|
||||
(create_pre_exit): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
* modulo-sched.c (rest_of_handle_sms): Likewise.
|
||||
(canon_loop): Likewise.
|
||||
* omp-low.c (build_omp_regions): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
* postreload-gcse.c (eliminate_partially_redundant_loads): Remove uses
|
||||
of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
* predict.c (rebuild_frequencies): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(propagate_freq): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(estimate_bb_frequencies): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(tree_estimate_probability_bb): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(expensive_function_p): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(tree_bb_level_predictions): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(counts_to_freqs): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(apply_return_prediction): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(estimate_loops): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(gimple_predict_edge): Likewise.
|
||||
(probably_never_executed): Likewise.
|
||||
* profile.c (find_spanning_tree): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(branch_prob): Likewise.
|
||||
(compute_branch_probabilities): Likewise.
|
||||
(compute_frequency_overlap): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(is_inconsistent): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(read_profile_edge_counts): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(set_bb_counts): Likewise.
|
||||
(correct_negative_edge_counts): Likewise.
|
||||
(get_exec_counts): Likewise.
|
||||
(instrument_values): Likewise.
|
||||
(instrument_edges): Likewise.
|
||||
* reg-stack.c (convert_regs): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(compensate_edges): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(convert_regs_exit): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(convert_regs_entry): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(reg_to_stack): Likewise.
|
||||
* regs.h (REG_N_SETS): Likewise.
|
||||
* reload.c (find_dummy_reload): Likewise.
|
||||
(combine_reloads): Likewise.
|
||||
(push_reload): Likewise.
|
||||
* reload1.c (has_nonexceptional_receiver): Remove usage of
|
||||
EXIT_BLOCK_PTR macro.
|
||||
* resource.c (mark_target_live_regs): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(find_basic_block): Likewise.
|
||||
* sched-ebb.c (ebb_add_block): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(schedule_ebbs): Likewise.
|
||||
* sched-int.h (sel_sched_p): Likewise.
|
||||
* sched-rgn.c (compute_dom_prob_ps): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(rgn_add_block): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(haifa_find_rgns): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(propagate_deps): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(extend_rgns): Likewise.
|
||||
(find_single_block_region): Likewise.
|
||||
* sel-sched-ir.c (sel_remove_loop_preheader): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
(setup_nop_and_exit_insns): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(sel_create_recovery_block): Likewise.
|
||||
(bb_ends_ebb_p): Likewise.
|
||||
(sel_bb_end): Likewise.
|
||||
(sel_bb_head): Likewise.
|
||||
(free_lv_sets): Likewise.
|
||||
(init_lv_sets): Likewise.
|
||||
(tidy_control_flow): Likewise.
|
||||
(maybe_tidy_empty_bb): Likewise.
|
||||
* sel-sched-ir.h (_succ_iter_cond): Likewise.
|
||||
(_succ_iter_start): Likewise.
|
||||
(sel_bb_empty_or_nop_p): Likewise.
|
||||
(get_loop_exit_edges_unique_dests): Likewise.
|
||||
(inner_loop_header_p): Likewise.
|
||||
* sel-sched.c (create_block_for_bookkeeping): Likewise.
|
||||
(find_block_for_bookkeeping): Likewise.
|
||||
* store-motion.c (remove_reachable_equiv_notes): Likewise.
|
||||
(insert_store): Likewise.
|
||||
* trans-mem.c (ipa_tm_transform_clone): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
(tm_memopt_compute_available): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(ipa_tm_scan_irr_function): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(gate_tm_init): Likewise.
|
||||
(tm_region_init): Likewise.
|
||||
* tree-cfg.c (execute_fixup_cfg): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(execute_warn_function_return): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(split_critical_edges): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(print_loops): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(move_sese_region_to_fn): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(gimple_redirect_edge_and_branch): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(gimple_verify_flow_info): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(remove_edge_and_dominated_blocks): Remove usage of EXIT_BLOCK_PTR
|
||||
macro.
|
||||
(make_edges): Remove uses of macros: ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(gimple_flow_call_edges_add): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(make_blocks): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(build_gimple_cfg): Likewise.
|
||||
(gimple_duplicate_bb): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(gimple_can_merge_blocks_p): Likewise.
|
||||
* tree-cfgcleanup.c (tree_forwarder_block_p): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
* tree-complex.c (update_parameter_components): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
* tree-if-conv.c (get_loop_body_in_if_conv_order): Remove usage of
|
||||
EXIT_BLOCK_PTR macro.
|
||||
* tree-inline.c (tree_function_versioning): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(delete_unreachable_blocks_update_callgraph): Likewise.
|
||||
(initialize_cfun): Likewise.
|
||||
(copy_cfg_body): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(copy_edges_for_bb): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(remap_ssa_name): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
* tree-into-ssa.c (update_ssa): Likewise.
|
||||
(maybe_register_def): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(insert_updated_phi_nodes_for): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(rewrite_into_ssa): Likewise.
|
||||
(rewrite_debug_stmt_uses): Likewise.
|
||||
* tree-outof-ssa.c (expand_phi_nodes): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
* tree-profile.c (gimple_gen_ic_func_profiler): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
* tree-scalar-evolution.h (block_before_loop): Likewise.
|
||||
* tree-sra.c (sra_ipa_reset_debug_stmts): Likewise.
|
||||
(dump_dereferences_table): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(analyze_caller_dereference_legality): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(propagate_dereference_distances): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(initialize_parameter_reductions): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
* tree-ssa-ccp.c (gsi_prev_dom_bb_nondebug): Likewise.
|
||||
(optimize_stack_restore): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
* tree-ssa-coalesce.c (create_outofssa_var_map): Likewise.
|
||||
* tree-ssa-dce.c (eliminate_unnecessary_stmts): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(remove_dead_stmt): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(propagate_necessity): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(mark_control_dependent_edges_necessary): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
* tree-ssa-dom.c (eliminate_degenerate_phis): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
(tree_ssa_dominator_optimize): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
* tree-ssa-live.c (verify_live_on_entry): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(calculate_live_on_exit): Likewise.
|
||||
(set_var_live_on_entry): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(loe_visit_block): Likewise.
|
||||
* tree-ssa-live.h (live_on_exit): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(live_on_entry): Likewise.
|
||||
* tree-ssa-loop-ivopts.c (find_interesting_uses): Remove usage of
|
||||
EXIT_BLOCK_PTR macro.
|
||||
* tree-ssa-loop-manip.c (compute_live_loop_exits): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
* tree-ssa-loop-niter.c (simplify_using_initial_conditions): Likewise.
|
||||
(bound_difference): Likewise.
|
||||
* tree-ssa-loop-prefetch.c (may_use_storent_in_loop_p): Remove usage
|
||||
of EXIT_BLOCK_PTR macro.
|
||||
* tree-ssa-loop-unswitch.c (simplify_using_entry_checks): Remove usage
|
||||
of ENTRY_BLOCK_PTR macro.
|
||||
* tree-ssa-math-opts.c (register_division_in): Likewise.
|
||||
* tree-ssa-phiprop.c (tree_ssa_phiprop): Likewise.
|
||||
* tree-ssa-pre.c (compute_avail): Likewise.
|
||||
(compute_antic): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(insert): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
* tree-ssa-propagate.c (ssa_prop_init): Likewise.
|
||||
(simulate_block): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(cfg_blocks_add): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
(add_control_edge): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
* tree-ssa-reassoc.c (do_reassoc): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(build_and_add_sum): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
* tree-ssa-sink.c (nearest_common_dominator_of_uses): Likewise.
|
||||
(execute_sink_code): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
* tree-ssa-uninit.c (find_dom): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(compute_control_dep_chain): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(find_pdom): Likewise.
|
||||
(warn_uninitialized_vars): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
* tree-stdarg.c (reachable_at_most_once): Likewise.
|
||||
* tree-tailcall.c (tree_optimize_tail_calls_1): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(eliminate_tail_call): Likewise.
|
||||
* tsan.c (instrument_func_entry): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
(instrument_func_exit): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
* var-tracking.c (vt_initialize): Remove uses of macros:
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR.
|
||||
(vt_add_function_parameter): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(vt_find_locations): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(vt_stack_adjustments): Remove uses of macros: ENTRY_BLOCK_PTR,
|
||||
EXIT_BLOCK_PTR.
|
||||
* varasm.c (assemble_start_function): Remove usage of ENTRY_BLOCK_PTR
|
||||
macro.
|
||||
* config/bfin/bfin.c (hwloop_optimize): Likewise.
|
||||
* config/nds32/nds32.c (nds32_fp_as_gp_check_available): Remove usage
|
||||
of EXIT_BLOCK_PTR macro.
|
||||
* config/arm/arm.c (require_pic_register): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
(arm_r3_live_at_start_p): Likewise.
|
||||
(any_sibcall_could_use_r3): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
* config/rs6000/rs6000.c (rs6000_emit_prologue): Likewise.
|
||||
* config/frv/frv.c (frv_optimize_membar_global): Likewise.
|
||||
* config/alpha/alpha.c (alpha_gp_save_rtx): Remove usage of
|
||||
ENTRY_BLOCK_PTR macro.
|
||||
* config/i386/i386.c (ix86_count_insn): Likewise.
|
||||
(ix86_seh_fixup_eh_fallthru): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(ix86_pad_short_function): Likewise.
|
||||
(ix86_compute_frame_layout): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(ix86_pad_returns): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(ix86_eax_live_at_start_p): Remove usage of ENTRY_BLOCK_PTR macro.
|
||||
(add_condition_to_bb): Remove usage of EXIT_BLOCK_PTR macro.
|
||||
(ix86_expand_epilogue): Likewise.
|
||||
* config/ia64/ia64.c (ia64_asm_unwind_emit): Likewise.
|
||||
(ia64_expand_prologue): Likewise.
|
||||
|
||||
2013-11-19 Catherine Moore <clm@codesourcery.com>
|
||||
|
||||
* doc/invoke.texi (mfix-rm7000, mno-fix-rm7000): Document.
|
||||
|
|
|
@ -312,8 +312,8 @@ struct GTY(()) control_flow_graph {
|
|||
};
|
||||
|
||||
/* Defines for accessing the fields of the CFG structure for function FN. */
|
||||
#define ENTRY_BLOCK_PTR_FOR_FUNCTION(FN) ((FN)->cfg->x_entry_block_ptr)
|
||||
#define EXIT_BLOCK_PTR_FOR_FUNCTION(FN) ((FN)->cfg->x_exit_block_ptr)
|
||||
#define ENTRY_BLOCK_PTR_FOR_FN(FN) ((FN)->cfg->x_entry_block_ptr)
|
||||
#define EXIT_BLOCK_PTR_FOR_FN(FN) ((FN)->cfg->x_exit_block_ptr)
|
||||
#define basic_block_info_for_function(FN) ((FN)->cfg->x_basic_block_info)
|
||||
#define n_basic_blocks_for_fn(FN) ((FN)->cfg->x_n_basic_blocks)
|
||||
#define n_edges_for_fn(FN) ((FN)->cfg->x_n_edges)
|
||||
|
@ -327,8 +327,6 @@ struct GTY(()) control_flow_graph {
|
|||
((*basic_block_info_for_function (FN))[(N)] = (BB))
|
||||
|
||||
/* Defines for textual backward source compatibility. */
|
||||
#define ENTRY_BLOCK_PTR (cfun->cfg->x_entry_block_ptr)
|
||||
#define EXIT_BLOCK_PTR (cfun->cfg->x_exit_block_ptr)
|
||||
#define basic_block_info (cfun->cfg->x_basic_block_info)
|
||||
#define last_basic_block (cfun->cfg->x_last_basic_block)
|
||||
#define label_to_block_map (cfun->cfg->x_label_to_block_map)
|
||||
|
@ -378,10 +376,10 @@ struct GTY(()) control_flow_graph {
|
|||
exit block). */
|
||||
|
||||
#define FOR_ALL_BB(BB) \
|
||||
for (BB = ENTRY_BLOCK_PTR; BB; BB = BB->next_bb)
|
||||
for (BB = ENTRY_BLOCK_PTR_FOR_FN (cfun); BB; BB = BB->next_bb)
|
||||
|
||||
#define FOR_ALL_BB_FN(BB, FN) \
|
||||
for (BB = ENTRY_BLOCK_PTR_FOR_FUNCTION (FN); BB; BB = BB->next_bb)
|
||||
for (BB = ENTRY_BLOCK_PTR_FOR_FN (FN); BB; BB = BB->next_bb)
|
||||
|
||||
|
||||
/* Stuff for recording basic block info. */
|
||||
|
|
|
@ -275,7 +275,7 @@ find_traces (int *n_traces, struct trace *traces)
|
|||
heap = fibheap_new ();
|
||||
max_entry_frequency = 0;
|
||||
max_entry_count = 0;
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
|
||||
{
|
||||
bbd[e->dest->index].heap = heap;
|
||||
bbd[e->dest->index].node = fibheap_insert (heap, bb_to_key (e->dest),
|
||||
|
@ -348,7 +348,7 @@ rotate_loop (edge back_edge, struct trace *trace, int trace_n)
|
|||
edge_iterator ei;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& bb_visited_trace (e->dest) != trace_n
|
||||
&& (e->flags & EDGE_CAN_FALLTHRU)
|
||||
&& !(e->flags & EDGE_COMPLEX))
|
||||
|
@ -524,7 +524,7 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
|
|||
{
|
||||
gcc_assert (!(e->flags & EDGE_FAKE));
|
||||
|
||||
if (e->dest == EXIT_BLOCK_PTR)
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
if (bb_visited_trace (e->dest)
|
||||
|
@ -605,7 +605,7 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
|
|||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
{
|
||||
if (e == best_edge
|
||||
|| e->dest == EXIT_BLOCK_PTR
|
||||
|| e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| bb_visited_trace (e->dest))
|
||||
continue;
|
||||
|
||||
|
@ -680,7 +680,8 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
|
|||
header is not the first block of the function
|
||||
we can rotate the loop. */
|
||||
|
||||
if (best_edge->dest != ENTRY_BLOCK_PTR->next_bb)
|
||||
if (best_edge->dest
|
||||
!= ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
|
||||
{
|
||||
if (dump_file)
|
||||
{
|
||||
|
@ -776,7 +777,7 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
|
|||
is an end of the trace). */
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
{
|
||||
if (e->dest == EXIT_BLOCK_PTR
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| bb_visited_trace (e->dest))
|
||||
continue;
|
||||
|
||||
|
@ -885,7 +886,8 @@ bb_to_key (basic_block bb)
|
|||
or whose predecessor edge is EDGE_DFS_BACK. */
|
||||
FOR_EACH_EDGE (e, ei, bb->preds)
|
||||
{
|
||||
if ((e->src != ENTRY_BLOCK_PTR && bbd[e->src->index].end_of_trace >= 0)
|
||||
if ((e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& bbd[e->src->index].end_of_trace >= 0)
|
||||
|| (e->flags & EDGE_DFS_BACK))
|
||||
{
|
||||
int edge_freq = EDGE_FREQUENCY (e);
|
||||
|
@ -1098,7 +1100,7 @@ connect_traces (int n_traces, struct trace *traces)
|
|||
{
|
||||
int si = e->src->index;
|
||||
|
||||
if (e->src != ENTRY_BLOCK_PTR
|
||||
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& (e->flags & EDGE_CAN_FALLTHRU)
|
||||
&& !(e->flags & EDGE_COMPLEX)
|
||||
&& bbd[si].end_of_trace >= 0
|
||||
|
@ -1141,7 +1143,7 @@ connect_traces (int n_traces, struct trace *traces)
|
|||
{
|
||||
int di = e->dest->index;
|
||||
|
||||
if (e->dest != EXIT_BLOCK_PTR
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& (e->flags & EDGE_CAN_FALLTHRU)
|
||||
&& !(e->flags & EDGE_COMPLEX)
|
||||
&& bbd[di].start_of_trace >= 0
|
||||
|
@ -1212,7 +1214,7 @@ connect_traces (int n_traces, struct trace *traces)
|
|||
bool try_copy = false;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, traces[t].last->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& (e->flags & EDGE_CAN_FALLTHRU)
|
||||
&& !(e->flags & EDGE_COMPLEX)
|
||||
&& (!best || e->probability > best->probability))
|
||||
|
@ -1237,7 +1239,7 @@ connect_traces (int n_traces, struct trace *traces)
|
|||
{
|
||||
int di = e2->dest->index;
|
||||
|
||||
if (e2->dest == EXIT_BLOCK_PTR
|
||||
if (e2->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| ((e2->flags & EDGE_CAN_FALLTHRU)
|
||||
&& !(e2->flags & EDGE_COMPLEX)
|
||||
&& bbd[di].start_of_trace >= 0
|
||||
|
@ -1253,7 +1255,7 @@ connect_traces (int n_traces, struct trace *traces)
|
|||
{
|
||||
best = e;
|
||||
best2 = e2;
|
||||
if (e2->dest != EXIT_BLOCK_PTR)
|
||||
if (e2->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
best2_len = traces[bbd[di].start_of_trace].length;
|
||||
else
|
||||
best2_len = INT_MAX;
|
||||
|
@ -1282,7 +1284,7 @@ connect_traces (int n_traces, struct trace *traces)
|
|||
traces[t].last->index, best->dest->index);
|
||||
if (!next_bb)
|
||||
fputc ('\n', dump_file);
|
||||
else if (next_bb == EXIT_BLOCK_PTR)
|
||||
else if (next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
fprintf (dump_file, "exit\n");
|
||||
else
|
||||
fprintf (dump_file, "%d\n", next_bb->index);
|
||||
|
@ -1290,7 +1292,7 @@ connect_traces (int n_traces, struct trace *traces)
|
|||
|
||||
new_bb = copy_bb (best->dest, best, traces[t].last, t);
|
||||
traces[t].last = new_bb;
|
||||
if (next_bb && next_bb != EXIT_BLOCK_PTR)
|
||||
if (next_bb && next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
t = bbd[next_bb->index].start_of_trace;
|
||||
traces[last_trace].last->aux = traces[t].first;
|
||||
|
@ -1413,7 +1415,7 @@ fix_up_crossing_landing_pad (eh_landing_pad old_lp, basic_block old_bb)
|
|||
JUMP_LABEL (jump) = post_label;
|
||||
|
||||
/* Create new basic block to be dest for lp. */
|
||||
last_bb = EXIT_BLOCK_PTR->prev_bb;
|
||||
last_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
|
||||
new_bb = create_basic_block (new_label, jump, last_bb);
|
||||
new_bb->aux = last_bb->aux;
|
||||
last_bb->aux = new_bb;
|
||||
|
@ -1663,8 +1665,8 @@ find_rarely_executed_basic_blocks_and_crossing_edges (void)
|
|||
/* We should never have EDGE_CROSSING set yet. */
|
||||
gcc_checking_assert ((flags & EDGE_CROSSING) == 0);
|
||||
|
||||
if (e->src != ENTRY_BLOCK_PTR
|
||||
&& e->dest != EXIT_BLOCK_PTR
|
||||
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& BB_PARTITION (e->src) != BB_PARTITION (e->dest))
|
||||
{
|
||||
crossing_edges.safe_push (e);
|
||||
|
@ -1731,14 +1733,14 @@ add_labels_and_missing_jumps (vec<edge> crossing_edges)
|
|||
basic_block dest = e->dest;
|
||||
rtx label, new_jump;
|
||||
|
||||
if (dest == EXIT_BLOCK_PTR)
|
||||
if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
/* Make sure dest has a label. */
|
||||
label = block_label (dest);
|
||||
|
||||
/* Nothing to do for non-fallthru edges. */
|
||||
if (src == ENTRY_BLOCK_PTR)
|
||||
if (src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
if ((e->flags & EDGE_FALLTHRU) == 0)
|
||||
continue;
|
||||
|
@ -1832,7 +1834,7 @@ fix_up_fall_thru_edges (void)
|
|||
}
|
||||
}
|
||||
|
||||
if (fall_thru && (fall_thru->dest != EXIT_BLOCK_PTR))
|
||||
if (fall_thru && (fall_thru->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
|
||||
{
|
||||
/* Check to see if the fall-thru edge is a crossing edge. */
|
||||
|
||||
|
@ -2066,7 +2068,7 @@ fix_crossing_conditional_branches (void)
|
|||
new_jump = emit_jump_insn (gen_jump (old_label));
|
||||
JUMP_LABEL (new_jump) = old_label;
|
||||
|
||||
last_bb = EXIT_BLOCK_PTR->prev_bb;
|
||||
last_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
|
||||
new_bb = create_basic_block (new_label, new_jump, last_bb);
|
||||
new_bb->aux = last_bb->aux;
|
||||
last_bb->aux = new_bb;
|
||||
|
@ -2319,7 +2321,7 @@ rest_of_handle_reorder_blocks (void)
|
|||
cleanup_cfg (CLEANUP_EXPENSIVE);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bb->aux = bb->next_bb;
|
||||
cfg_layout_finalize ();
|
||||
|
||||
|
@ -2415,7 +2417,7 @@ duplicate_computed_gotos (void)
|
|||
int size, all_flags;
|
||||
|
||||
/* Build the reorder chain for the original order of blocks. */
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bb->aux = bb->next_bb;
|
||||
|
||||
/* Obviously the block has to end in a computed jump. */
|
||||
|
@ -2465,7 +2467,7 @@ duplicate_computed_gotos (void)
|
|||
the exit block or the next block.
|
||||
The destination must have more than one predecessor. */
|
||||
if (!single_succ_p (bb)
|
||||
|| single_succ (bb) == EXIT_BLOCK_PTR
|
||||
|| single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| single_succ (bb) == bb->next_bb
|
||||
|| single_pred_p (single_succ (bb)))
|
||||
continue;
|
||||
|
|
|
@ -1328,7 +1328,8 @@ migrate_btr_def (btr_def def, int min_cost)
|
|||
def_basic_block_freq = basic_block_freq (def->bb);
|
||||
|
||||
for (attempt = get_immediate_dominator (CDI_DOMINATORS, def->bb);
|
||||
!give_up && attempt && attempt != ENTRY_BLOCK_PTR && def->cost >= min_cost;
|
||||
!give_up && attempt && attempt != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& def->cost >= min_cost;
|
||||
attempt = get_immediate_dominator (CDI_DOMINATORS, attempt))
|
||||
{
|
||||
/* Try to move the instruction that sets the target register into
|
||||
|
|
42
gcc/cfg.c
42
gcc/cfg.c
|
@ -70,16 +70,16 @@ init_flow (struct function *the_fun)
|
|||
if (!the_fun->cfg)
|
||||
the_fun->cfg = ggc_alloc_cleared_control_flow_graph ();
|
||||
n_edges_for_fn (the_fun) = 0;
|
||||
ENTRY_BLOCK_PTR_FOR_FUNCTION (the_fun)
|
||||
ENTRY_BLOCK_PTR_FOR_FN (the_fun)
|
||||
= ggc_alloc_cleared_basic_block_def ();
|
||||
ENTRY_BLOCK_PTR_FOR_FUNCTION (the_fun)->index = ENTRY_BLOCK;
|
||||
EXIT_BLOCK_PTR_FOR_FUNCTION (the_fun)
|
||||
ENTRY_BLOCK_PTR_FOR_FN (the_fun)->index = ENTRY_BLOCK;
|
||||
EXIT_BLOCK_PTR_FOR_FN (the_fun)
|
||||
= ggc_alloc_cleared_basic_block_def ();
|
||||
EXIT_BLOCK_PTR_FOR_FUNCTION (the_fun)->index = EXIT_BLOCK;
|
||||
ENTRY_BLOCK_PTR_FOR_FUNCTION (the_fun)->next_bb
|
||||
= EXIT_BLOCK_PTR_FOR_FUNCTION (the_fun);
|
||||
EXIT_BLOCK_PTR_FOR_FUNCTION (the_fun)->prev_bb
|
||||
= ENTRY_BLOCK_PTR_FOR_FUNCTION (the_fun);
|
||||
EXIT_BLOCK_PTR_FOR_FN (the_fun)->index = EXIT_BLOCK;
|
||||
ENTRY_BLOCK_PTR_FOR_FN (the_fun)->next_bb
|
||||
= EXIT_BLOCK_PTR_FOR_FN (the_fun);
|
||||
EXIT_BLOCK_PTR_FOR_FN (the_fun)->prev_bb
|
||||
= ENTRY_BLOCK_PTR_FOR_FN (the_fun);
|
||||
}
|
||||
|
||||
/* Helper function for remove_edge and clear_edges. Frees edge structure
|
||||
|
@ -109,10 +109,10 @@ clear_edges (void)
|
|||
vec_safe_truncate (bb->preds, 0);
|
||||
}
|
||||
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
|
||||
free_edge (e);
|
||||
vec_safe_truncate (EXIT_BLOCK_PTR->preds, 0);
|
||||
vec_safe_truncate (ENTRY_BLOCK_PTR->succs, 0);
|
||||
vec_safe_truncate (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds, 0);
|
||||
vec_safe_truncate (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs, 0);
|
||||
|
||||
gcc_assert (!n_edges_for_fn (cfun));
|
||||
}
|
||||
|
@ -153,8 +153,8 @@ compact_blocks (void)
|
|||
{
|
||||
int i;
|
||||
|
||||
SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR);
|
||||
SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR);
|
||||
SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
if (df)
|
||||
df_compact_blocks ();
|
||||
|
@ -282,8 +282,8 @@ edge
|
|||
cached_make_edge (sbitmap edge_cache, basic_block src, basic_block dst, int flags)
|
||||
{
|
||||
if (edge_cache == NULL
|
||||
|| src == ENTRY_BLOCK_PTR
|
||||
|| dst == EXIT_BLOCK_PTR)
|
||||
|| src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| dst == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return make_edge (src, dst, flags);
|
||||
|
||||
/* Does the requested edge already exist? */
|
||||
|
@ -387,7 +387,7 @@ clear_bb_flags (void)
|
|||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
bb->flags &= BB_FLAGS_TO_PRESERVE;
|
||||
}
|
||||
|
||||
|
@ -411,7 +411,7 @@ check_bb_profile (basic_block bb, FILE * file, int indent, int flags)
|
|||
if (profile_status_for_function (fun) == PROFILE_ABSENT)
|
||||
return;
|
||||
|
||||
if (bb != EXIT_BLOCK_PTR_FOR_FUNCTION (fun))
|
||||
if (bb != EXIT_BLOCK_PTR_FOR_FN (fun))
|
||||
{
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
sum += e->probability;
|
||||
|
@ -428,7 +428,7 @@ check_bb_profile (basic_block bb, FILE * file, int indent, int flags)
|
|||
(flags & TDF_COMMENT) ? ";; " : "", s_indent,
|
||||
(int) lsum, (int) bb->count);
|
||||
}
|
||||
if (bb != ENTRY_BLOCK_PTR_FOR_FUNCTION (fun))
|
||||
if (bb != ENTRY_BLOCK_PTR_FOR_FN (fun))
|
||||
{
|
||||
sum = 0;
|
||||
FOR_EACH_EDGE (e, ei, bb->preds)
|
||||
|
@ -641,7 +641,8 @@ alloc_aux_for_edges (int size)
|
|||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
@ -660,7 +661,8 @@ clear_aux_for_edges (void)
|
|||
basic_block bb;
|
||||
edge e;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
{
|
||||
edge_iterator ei;
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
|
|
110
gcc/cfganal.c
110
gcc/cfganal.c
|
@ -86,7 +86,7 @@ mark_dfs_back_edges (void)
|
|||
bitmap_clear (visited);
|
||||
|
||||
/* Push the first edge on to the stack. */
|
||||
stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
|
||||
stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
|
||||
|
||||
while (sp)
|
||||
{
|
||||
|
@ -101,7 +101,8 @@ mark_dfs_back_edges (void)
|
|||
ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
|
||||
|
||||
/* Check if the edge destination has been visited yet. */
|
||||
if (dest != EXIT_BLOCK_PTR && ! bitmap_bit_p (visited, dest->index))
|
||||
if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && ! bitmap_bit_p (visited,
|
||||
dest->index))
|
||||
{
|
||||
/* Mark that we have visited the destination. */
|
||||
bitmap_set_bit (visited, dest->index);
|
||||
|
@ -118,12 +119,14 @@ mark_dfs_back_edges (void)
|
|||
}
|
||||
else
|
||||
{
|
||||
if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
|
||||
if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& pre[src->index] >= pre[dest->index]
|
||||
&& post[dest->index] == 0)
|
||||
ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
|
||||
|
||||
if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR)
|
||||
if (ei_one_before_end_p (ei)
|
||||
&& src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
post[src->index] = postnum++;
|
||||
|
||||
if (!ei_one_before_end_p (ei))
|
||||
|
@ -163,7 +166,7 @@ find_unreachable_blocks (void)
|
|||
be only one. It isn't inconceivable that we might one day directly
|
||||
support Fortran alternate entry points. */
|
||||
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
|
||||
{
|
||||
*tos++ = e->dest;
|
||||
|
||||
|
@ -217,7 +220,8 @@ create_edge_list (void)
|
|||
/* Determine the number of edges in the flow graph by counting successor
|
||||
edges on each basic block. */
|
||||
num_edges = 0;
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
{
|
||||
num_edges += EDGE_COUNT (bb->succs);
|
||||
}
|
||||
|
@ -229,7 +233,8 @@ create_edge_list (void)
|
|||
num_edges = 0;
|
||||
|
||||
/* Follow successors of blocks, and register these edges. */
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
elist->index_to_edge[num_edges++] = e;
|
||||
|
||||
|
@ -261,12 +266,12 @@ print_edge_list (FILE *f, struct edge_list *elist)
|
|||
for (x = 0; x < elist->num_edges; x++)
|
||||
{
|
||||
fprintf (f, " %-4d - edge(", x);
|
||||
if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
|
||||
if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
fprintf (f, "entry,");
|
||||
else
|
||||
fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
|
||||
|
||||
if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
|
||||
if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
fprintf (f, "exit)\n");
|
||||
else
|
||||
fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
|
||||
|
@ -285,7 +290,8 @@ verify_edge_list (FILE *f, struct edge_list *elist)
|
|||
basic_block bb, p, s;
|
||||
edge_iterator ei;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
{
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
{
|
||||
|
@ -310,8 +316,9 @@ verify_edge_list (FILE *f, struct edge_list *elist)
|
|||
/* We've verified that all the edges are in the list, now lets make sure
|
||||
there are no spurious edges in the list. This is an expensive check! */
|
||||
|
||||
FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
|
||||
{
|
||||
int found_edge = 0;
|
||||
|
||||
|
@ -348,9 +355,9 @@ void
|
|||
control_dependences::set_control_dependence_map_bit (basic_block bb,
|
||||
int edge_index)
|
||||
{
|
||||
if (bb == ENTRY_BLOCK_PTR)
|
||||
if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
return;
|
||||
gcc_assert (bb != EXIT_BLOCK_PTR);
|
||||
gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
bitmap_set_bit (control_dependence_map[bb->index], edge_index);
|
||||
}
|
||||
|
||||
|
@ -367,15 +374,15 @@ control_dependences::clear_control_dependence_bitmap (basic_block bb)
|
|||
static inline basic_block
|
||||
find_pdom (basic_block block)
|
||||
{
|
||||
gcc_assert (block != ENTRY_BLOCK_PTR);
|
||||
gcc_assert (block != ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
if (block == EXIT_BLOCK_PTR)
|
||||
return EXIT_BLOCK_PTR;
|
||||
if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
else
|
||||
{
|
||||
basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
|
||||
if (! bb)
|
||||
return EXIT_BLOCK_PTR;
|
||||
return EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
return bb;
|
||||
}
|
||||
}
|
||||
|
@ -389,15 +396,17 @@ control_dependences::find_control_dependence (int edge_index)
|
|||
basic_block current_block;
|
||||
basic_block ending_block;
|
||||
|
||||
gcc_assert (INDEX_EDGE_PRED_BB (m_el, edge_index) != EXIT_BLOCK_PTR);
|
||||
gcc_assert (INDEX_EDGE_PRED_BB (m_el, edge_index)
|
||||
!= EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
if (INDEX_EDGE_PRED_BB (m_el, edge_index) == ENTRY_BLOCK_PTR)
|
||||
ending_block = single_succ (ENTRY_BLOCK_PTR);
|
||||
if (INDEX_EDGE_PRED_BB (m_el, edge_index) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
ending_block = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
else
|
||||
ending_block = find_pdom (INDEX_EDGE_PRED_BB (m_el, edge_index));
|
||||
|
||||
for (current_block = INDEX_EDGE_SUCC_BB (m_el, edge_index);
|
||||
current_block != ending_block && current_block != EXIT_BLOCK_PTR;
|
||||
current_block != ending_block
|
||||
&& current_block != EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
current_block = find_pdom (current_block))
|
||||
{
|
||||
edge e = INDEX_EDGE (m_el, edge_index);
|
||||
|
@ -523,7 +532,7 @@ remove_fake_edges (void)
|
|||
{
|
||||
basic_block bb;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
|
||||
remove_fake_predecessors (bb);
|
||||
}
|
||||
|
||||
|
@ -532,7 +541,7 @@ remove_fake_edges (void)
|
|||
void
|
||||
remove_fake_exit_edges (void)
|
||||
{
|
||||
remove_fake_predecessors (EXIT_BLOCK_PTR);
|
||||
remove_fake_predecessors (EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
}
|
||||
|
||||
|
||||
|
@ -547,7 +556,7 @@ add_noreturn_fake_exit_edges (void)
|
|||
|
||||
FOR_EACH_BB (bb)
|
||||
if (EDGE_COUNT (bb->succs) == 0)
|
||||
make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
|
||||
make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
|
||||
}
|
||||
|
||||
/* This function adds a fake edge between any infinite loops to the
|
||||
|
@ -564,14 +573,14 @@ add_noreturn_fake_exit_edges (void)
|
|||
void
|
||||
connect_infinite_loops_to_exit (void)
|
||||
{
|
||||
basic_block unvisited_block = EXIT_BLOCK_PTR;
|
||||
basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
basic_block deadend_block;
|
||||
struct depth_first_search_dsS dfs_ds;
|
||||
|
||||
/* Perform depth-first search in the reverse graph to find nodes
|
||||
reachable from the exit block. */
|
||||
flow_dfs_compute_reverse_init (&dfs_ds);
|
||||
flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
|
||||
flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
/* Repeatedly add fake edges, updating the unreachable nodes. */
|
||||
while (1)
|
||||
|
@ -582,7 +591,7 @@ connect_infinite_loops_to_exit (void)
|
|||
break;
|
||||
|
||||
deadend_block = dfs_find_deadend (unvisited_block);
|
||||
make_edge (deadend_block, EXIT_BLOCK_PTR, EDGE_FAKE);
|
||||
make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
|
||||
flow_dfs_compute_reverse_add_bb (&dfs_ds, deadend_block);
|
||||
}
|
||||
|
||||
|
@ -619,7 +628,7 @@ post_order_compute (int *post_order, bool include_entry_exit,
|
|||
bitmap_clear (visited);
|
||||
|
||||
/* Push the first edge on to the stack. */
|
||||
stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
|
||||
stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
|
||||
|
||||
while (sp)
|
||||
{
|
||||
|
@ -633,7 +642,8 @@ post_order_compute (int *post_order, bool include_entry_exit,
|
|||
dest = ei_edge (ei)->dest;
|
||||
|
||||
/* Check if the edge destination has been visited yet. */
|
||||
if (dest != EXIT_BLOCK_PTR && ! bitmap_bit_p (visited, dest->index))
|
||||
if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& ! bitmap_bit_p (visited, dest->index))
|
||||
{
|
||||
/* Mark that we have visited the destination. */
|
||||
bitmap_set_bit (visited, dest->index);
|
||||
|
@ -647,7 +657,8 @@ post_order_compute (int *post_order, bool include_entry_exit,
|
|||
}
|
||||
else
|
||||
{
|
||||
if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR)
|
||||
if (ei_one_before_end_p (ei)
|
||||
&& src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
post_order[post_order_num++] = src->index;
|
||||
|
||||
if (!ei_one_before_end_p (ei))
|
||||
|
@ -671,7 +682,8 @@ post_order_compute (int *post_order, bool include_entry_exit,
|
|||
{
|
||||
basic_block b;
|
||||
basic_block next_bb;
|
||||
for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
|
||||
for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
|
||||
!= EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
|
||||
{
|
||||
next_bb = b->next_bb;
|
||||
|
||||
|
@ -813,7 +825,8 @@ inverted_post_order_compute (int *post_order)
|
|||
}
|
||||
else
|
||||
{
|
||||
if (bb != EXIT_BLOCK_PTR && ei_one_before_end_p (ei))
|
||||
if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& ei_one_before_end_p (ei))
|
||||
post_order[post_order_num++] = bb->index;
|
||||
|
||||
if (!ei_one_before_end_p (ei))
|
||||
|
@ -826,7 +839,8 @@ inverted_post_order_compute (int *post_order)
|
|||
/* Detect any infinite loop and activate the kludge.
|
||||
Note that this doesn't check EXIT_BLOCK itself
|
||||
since EXIT_BLOCK is always added after the outer do-while loop. */
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
if (!bitmap_bit_p (visited, bb->index))
|
||||
{
|
||||
has_unvisited_bb = true;
|
||||
|
@ -859,7 +873,7 @@ inverted_post_order_compute (int *post_order)
|
|||
{
|
||||
/* No blocks are reachable from EXIT at all.
|
||||
Find a dead-end from the ENTRY, and restart the iteration. */
|
||||
basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR);
|
||||
basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
gcc_assert (be != NULL);
|
||||
bitmap_set_bit (visited, be->index);
|
||||
stack[sp++] = ei_start (be->preds);
|
||||
|
@ -923,7 +937,7 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
|
|||
bitmap_clear (visited);
|
||||
|
||||
/* Push the first edge on to the stack. */
|
||||
stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->succs);
|
||||
stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs);
|
||||
|
||||
while (sp)
|
||||
{
|
||||
|
@ -937,7 +951,7 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
|
|||
dest = ei_edge (ei)->dest;
|
||||
|
||||
/* Check if the edge destination has been visited yet. */
|
||||
if (dest != EXIT_BLOCK_PTR_FOR_FUNCTION (fn)
|
||||
if (dest != EXIT_BLOCK_PTR_FOR_FN (fn)
|
||||
&& ! bitmap_bit_p (visited, dest->index))
|
||||
{
|
||||
/* Mark that we have visited the destination. */
|
||||
|
@ -960,7 +974,7 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
|
|||
else
|
||||
{
|
||||
if (ei_one_before_end_p (ei)
|
||||
&& src != ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)
|
||||
&& src != ENTRY_BLOCK_PTR_FOR_FN (fn)
|
||||
&& rev_post_order)
|
||||
/* There are no more successors for the SRC node
|
||||
so assign its reverse completion number. */
|
||||
|
@ -1230,7 +1244,7 @@ compute_dominance_frontiers_1 (bitmap_head *frontiers)
|
|||
{
|
||||
basic_block runner = p->src;
|
||||
basic_block domsb;
|
||||
if (runner == ENTRY_BLOCK_PTR)
|
||||
if (runner == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
domsb = get_immediate_dominator (CDI_DOMINATORS, b);
|
||||
|
@ -1337,7 +1351,7 @@ bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
|
|||
for (e = NULL, ix = 0; ix < EDGE_COUNT (b->succs); ix++)
|
||||
{
|
||||
e = EDGE_SUCC (b, ix);
|
||||
if (e->dest == EXIT_BLOCK_PTR)
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
bitmap_copy (dst, src[e->dest->index]);
|
||||
|
@ -1353,7 +1367,7 @@ bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
|
|||
SBITMAP_ELT_TYPE *p, *r;
|
||||
|
||||
e = EDGE_SUCC (b, ix);
|
||||
if (e->dest == EXIT_BLOCK_PTR)
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
p = src[e->dest->index]->elms;
|
||||
|
@ -1378,7 +1392,7 @@ bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
|
|||
for (e = NULL, ix = 0; ix < EDGE_COUNT (b->preds); ix++)
|
||||
{
|
||||
e = EDGE_PRED (b, ix);
|
||||
if (e->src == ENTRY_BLOCK_PTR)
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
bitmap_copy (dst, src[e->src->index]);
|
||||
|
@ -1394,7 +1408,7 @@ bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
|
|||
SBITMAP_ELT_TYPE *p, *r;
|
||||
|
||||
e = EDGE_PRED (b, ix);
|
||||
if (e->src == ENTRY_BLOCK_PTR)
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
p = src[e->src->index]->elms;
|
||||
|
@ -1419,7 +1433,7 @@ bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
|
|||
for (ix = 0; ix < EDGE_COUNT (b->succs); ix++)
|
||||
{
|
||||
e = EDGE_SUCC (b, ix);
|
||||
if (e->dest == EXIT_BLOCK_PTR)
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
bitmap_copy (dst, src[e->dest->index]);
|
||||
|
@ -1435,7 +1449,7 @@ bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
|
|||
SBITMAP_ELT_TYPE *p, *r;
|
||||
|
||||
e = EDGE_SUCC (b, ix);
|
||||
if (e->dest == EXIT_BLOCK_PTR)
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
p = src[e->dest->index]->elms;
|
||||
|
@ -1460,7 +1474,7 @@ bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
|
|||
for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
|
||||
{
|
||||
e = EDGE_PRED (b, ix);
|
||||
if (e->src== ENTRY_BLOCK_PTR)
|
||||
if (e->src== ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
bitmap_copy (dst, src[e->src->index]);
|
||||
|
@ -1476,7 +1490,7 @@ bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
|
|||
SBITMAP_ELT_TYPE *p, *r;
|
||||
|
||||
e = EDGE_PRED (b, ix);
|
||||
if (e->src == ENTRY_BLOCK_PTR)
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
p = src[e->src->index]->elms;
|
||||
|
@ -1504,7 +1518,7 @@ single_pred_before_succ_order (void)
|
|||
|
||||
bitmap_clear (visited);
|
||||
|
||||
MARK_VISITED (ENTRY_BLOCK_PTR);
|
||||
MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
FOR_EACH_BB (x)
|
||||
{
|
||||
if (VISITED_P (x))
|
||||
|
|
|
@ -213,8 +213,8 @@ make_edges (basic_block min, basic_block max, int update_p)
|
|||
|
||||
/* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block
|
||||
is always the entry. */
|
||||
if (min == ENTRY_BLOCK_PTR->next_bb)
|
||||
make_edge (ENTRY_BLOCK_PTR, min, EDGE_FALLTHRU);
|
||||
if (min == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
|
||||
make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), min, EDGE_FALLTHRU);
|
||||
|
||||
FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb)
|
||||
{
|
||||
|
@ -233,14 +233,14 @@ make_edges (basic_block min, basic_block max, int update_p)
|
|||
if (update_p)
|
||||
{
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR)
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bitmap_set_bit (edge_cache, e->dest->index);
|
||||
}
|
||||
}
|
||||
|
||||
if (LABEL_P (BB_HEAD (bb))
|
||||
&& LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
|
||||
cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
|
||||
cached_make_edge (NULL, ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0);
|
||||
|
||||
/* Examine the last instruction of the block, and discover the
|
||||
ways we can leave the block. */
|
||||
|
@ -294,7 +294,7 @@ make_edges (basic_block min, basic_block max, int update_p)
|
|||
|
||||
/* Returns create an exit out. */
|
||||
else if (returnjump_p (insn))
|
||||
cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
|
||||
cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
|
||||
|
||||
/* Recognize asm goto and do the right thing. */
|
||||
else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
|
||||
|
@ -318,7 +318,7 @@ make_edges (basic_block min, basic_block max, int update_p)
|
|||
worry about EH edges, since we wouldn't have created the sibling call
|
||||
in the first place. */
|
||||
if (code == CALL_INSN && SIBLING_CALL_P (insn))
|
||||
cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
|
||||
cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
|
||||
EDGE_SIBCALL | EDGE_ABNORMAL);
|
||||
|
||||
/* If this is a CALL_INSN, then mark it as reaching the active EH
|
||||
|
@ -359,7 +359,7 @@ make_edges (basic_block min, basic_block max, int update_p)
|
|||
|
||||
/* Find out if we can drop through to the next block. */
|
||||
insn = NEXT_INSN (insn);
|
||||
e = find_edge (bb, EXIT_BLOCK_PTR);
|
||||
e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
if (e && e->flags & EDGE_FALLTHRU)
|
||||
insn = NULL;
|
||||
|
||||
|
@ -369,8 +369,9 @@ make_edges (basic_block min, basic_block max, int update_p)
|
|||
insn = NEXT_INSN (insn);
|
||||
|
||||
if (!insn)
|
||||
cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
|
||||
else if (bb->next_bb != EXIT_BLOCK_PTR)
|
||||
cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
|
||||
EDGE_FALLTHRU);
|
||||
else if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
if (insn == BB_HEAD (bb->next_bb))
|
||||
cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU);
|
||||
|
@ -480,7 +481,7 @@ find_bb_boundaries (basic_block bb)
|
|||
remove_edge (fallthru);
|
||||
flow_transfer_insn = NULL_RTX;
|
||||
if (code == CODE_LABEL && LABEL_ALT_ENTRY_P (insn))
|
||||
make_edge (ENTRY_BLOCK_PTR, bb, 0);
|
||||
make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, 0);
|
||||
}
|
||||
else if (code == BARRIER)
|
||||
{
|
||||
|
@ -607,7 +608,7 @@ find_many_sub_basic_blocks (sbitmap blocks)
|
|||
break;
|
||||
|
||||
min = max = bb;
|
||||
for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
|
||||
for (; bb != EXIT_BLOCK_PTR_FOR_FN (cfun); bb = bb->next_bb)
|
||||
if (STATE (bb) != BLOCK_ORIGINAL)
|
||||
max = bb;
|
||||
|
||||
|
|
|
@ -134,7 +134,7 @@ try_simplify_condjump (basic_block cbranch_block)
|
|||
unconditional jump. */
|
||||
jump_block = cbranch_fallthru_edge->dest;
|
||||
if (!single_pred_p (jump_block)
|
||||
|| jump_block->next_bb == EXIT_BLOCK_PTR
|
||||
|| jump_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| !FORWARDER_BLOCK_P (jump_block))
|
||||
return false;
|
||||
jump_dest_block = single_succ (jump_block);
|
||||
|
@ -157,7 +157,7 @@ try_simplify_condjump (basic_block cbranch_block)
|
|||
unconditional branch. */
|
||||
cbranch_dest_block = cbranch_jump_edge->dest;
|
||||
|
||||
if (cbranch_dest_block == EXIT_BLOCK_PTR
|
||||
if (cbranch_dest_block == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| !can_fallthru (jump_block, cbranch_dest_block))
|
||||
return false;
|
||||
|
||||
|
@ -455,7 +455,7 @@ try_forward_edges (int mode, basic_block b)
|
|||
bb-reorder.c:partition_hot_cold_basic_blocks for complete
|
||||
details. */
|
||||
|
||||
if (first != EXIT_BLOCK_PTR
|
||||
if (first != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& find_reg_note (BB_END (first), REG_CROSSING_JUMP, NULL_RTX))
|
||||
return changed;
|
||||
|
||||
|
@ -467,7 +467,7 @@ try_forward_edges (int mode, basic_block b)
|
|||
|
||||
if (FORWARDER_BLOCK_P (target)
|
||||
&& !(single_succ_edge (target)->flags & EDGE_CROSSING)
|
||||
&& single_succ (target) != EXIT_BLOCK_PTR)
|
||||
&& single_succ (target) != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
/* Bypass trivial infinite loops. */
|
||||
new_target = single_succ (target);
|
||||
|
@ -580,7 +580,7 @@ try_forward_edges (int mode, basic_block b)
|
|||
e->goto_locus = goto_locus;
|
||||
|
||||
/* Don't force if target is exit block. */
|
||||
if (threaded && target != EXIT_BLOCK_PTR)
|
||||
if (threaded && target != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
notice_new_block (redirect_edge_and_branch_force (e, target));
|
||||
if (dump_file)
|
||||
|
@ -793,7 +793,7 @@ merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
|
|||
fprintf (dump_file, "Merged %d and %d without moving.\n",
|
||||
b_index, c_index);
|
||||
|
||||
return b->prev_bb == ENTRY_BLOCK_PTR ? b : b->prev_bb;
|
||||
return b->prev_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? b : b->prev_bb;
|
||||
}
|
||||
|
||||
/* Otherwise we will need to move code around. Do that only if expensive
|
||||
|
@ -831,7 +831,7 @@ merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
|
|||
if (! c_has_outgoing_fallthru)
|
||||
{
|
||||
merge_blocks_move_successor_nojumps (b, c);
|
||||
return next == ENTRY_BLOCK_PTR ? next->next_bb : next;
|
||||
return next == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? next->next_bb : next;
|
||||
}
|
||||
|
||||
/* If B does not have an incoming fallthru, then it can be moved
|
||||
|
@ -843,7 +843,7 @@ merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
|
|||
{
|
||||
basic_block bb;
|
||||
|
||||
if (b_fallthru_edge->src == ENTRY_BLOCK_PTR)
|
||||
if (b_fallthru_edge->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
return NULL;
|
||||
bb = force_nonfallthru (b_fallthru_edge);
|
||||
if (bb)
|
||||
|
@ -851,7 +851,7 @@ merge_blocks_move (edge e, basic_block b, basic_block c, int mode)
|
|||
}
|
||||
|
||||
merge_blocks_move_predecessor_nojumps (b, c);
|
||||
return next == ENTRY_BLOCK_PTR ? next->next_bb : next;
|
||||
return next == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? next->next_bb : next;
|
||||
}
|
||||
|
||||
return NULL;
|
||||
|
@ -1267,7 +1267,7 @@ walk_to_nondebug_insn (rtx *i1, basic_block *bb1, bool follow_fallthru,
|
|||
return;
|
||||
|
||||
fallthru = find_fallthru_edge ((*bb1)->preds);
|
||||
if (!fallthru || fallthru->src == ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun)
|
||||
if (!fallthru || fallthru->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| !single_succ_p (fallthru->src))
|
||||
return;
|
||||
|
||||
|
@ -1540,7 +1540,8 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
|
|||
whether they went through the prologue. Sibcalls are fine, we know
|
||||
that we either didn't need or inserted an epilogue before them. */
|
||||
if (crtl->shrink_wrapped
|
||||
&& single_succ_p (bb1) && single_succ (bb1) == EXIT_BLOCK_PTR
|
||||
&& single_succ_p (bb1)
|
||||
&& single_succ (bb1) == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& !JUMP_P (BB_END (bb1))
|
||||
&& !(CALL_P (BB_END (bb1)) && SIBLING_CALL_P (BB_END (bb1))))
|
||||
return false;
|
||||
|
@ -1902,7 +1903,8 @@ try_crossjump_to_edge (int mode, edge e1, edge e2,
|
|||
e2 = single_pred_edge (src2), src2 = e2->src;
|
||||
|
||||
/* Nothing to do if we reach ENTRY, or a common source block. */
|
||||
if (src1 == ENTRY_BLOCK_PTR || src2 == ENTRY_BLOCK_PTR)
|
||||
if (src1 == ENTRY_BLOCK_PTR_FOR_FN (cfun) || src2
|
||||
== ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
return false;
|
||||
if (src1 == src2)
|
||||
return false;
|
||||
|
@ -2146,7 +2148,7 @@ try_crossjump_bb (int mode, basic_block bb)
|
|||
/* Don't crossjump if this block ends in a computed jump,
|
||||
unless we are optimizing for size. */
|
||||
if (optimize_bb_for_size_p (bb)
|
||||
&& bb != EXIT_BLOCK_PTR
|
||||
&& bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& computed_jump_p (BB_END (bb)))
|
||||
return false;
|
||||
|
||||
|
@ -2287,7 +2289,7 @@ try_head_merge_bb (basic_block bb)
|
|||
/* Don't crossjump if this block ends in a computed jump,
|
||||
unless we are optimizing for size. */
|
||||
if (optimize_bb_for_size_p (bb)
|
||||
&& bb != EXIT_BLOCK_PTR
|
||||
&& bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& computed_jump_p (BB_END (bb)))
|
||||
return false;
|
||||
|
||||
|
@ -2303,7 +2305,7 @@ try_head_merge_bb (basic_block bb)
|
|||
}
|
||||
|
||||
for (ix = 0; ix < nedges; ix++)
|
||||
if (EDGE_SUCC (bb, ix)->dest == EXIT_BLOCK_PTR)
|
||||
if (EDGE_SUCC (bb, ix)->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return false;
|
||||
|
||||
for (ix = 0; ix < nedges; ix++)
|
||||
|
@ -2623,7 +2625,8 @@ try_optimize_cfg (int mode)
|
|||
"\n\ntry_optimize_cfg iteration %i\n\n",
|
||||
iterations);
|
||||
|
||||
for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR;)
|
||||
for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
|
||||
!= EXIT_BLOCK_PTR_FOR_FN (cfun);)
|
||||
{
|
||||
basic_block c;
|
||||
edge s;
|
||||
|
@ -2640,7 +2643,8 @@ try_optimize_cfg (int mode)
|
|||
if (EDGE_COUNT (b->preds) == 0
|
||||
|| (EDGE_COUNT (b->succs) == 0
|
||||
&& trivially_empty_bb_p (b)
|
||||
&& single_succ_edge (ENTRY_BLOCK_PTR)->dest != b))
|
||||
&& single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->dest
|
||||
!= b))
|
||||
{
|
||||
c = b->prev_bb;
|
||||
if (EDGE_COUNT (b->preds) > 0)
|
||||
|
@ -2681,7 +2685,7 @@ try_optimize_cfg (int mode)
|
|||
delete_basic_block (b);
|
||||
changed = true;
|
||||
/* Avoid trying to remove ENTRY_BLOCK_PTR. */
|
||||
b = (c == ENTRY_BLOCK_PTR ? c->next_bb : c);
|
||||
b = (c == ENTRY_BLOCK_PTR_FOR_FN (cfun) ? c->next_bb : c);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -2696,7 +2700,7 @@ try_optimize_cfg (int mode)
|
|||
if CASE_DROPS_THRU, this can be a tablejump with
|
||||
some element going to the same place as the
|
||||
default (fallthru). */
|
||||
&& (single_pred (b) == ENTRY_BLOCK_PTR
|
||||
&& (single_pred (b) == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| !JUMP_P (BB_END (single_pred (b)))
|
||||
|| ! label_is_jump_target_p (BB_HEAD (b),
|
||||
BB_END (single_pred (b)))))
|
||||
|
@ -2723,7 +2727,8 @@ try_optimize_cfg (int mode)
|
|||
"Deleting fallthru block %i.\n",
|
||||
b->index);
|
||||
|
||||
c = b->prev_bb == ENTRY_BLOCK_PTR ? b->next_bb : b->prev_bb;
|
||||
c = ((b->prev_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
? b->next_bb : b->prev_bb);
|
||||
redirect_edge_succ_nodup (single_pred_edge (b),
|
||||
single_succ (b));
|
||||
delete_basic_block (b);
|
||||
|
@ -2736,7 +2741,7 @@ try_optimize_cfg (int mode)
|
|||
if (single_succ_p (b)
|
||||
&& (s = single_succ_edge (b))
|
||||
&& !(s->flags & EDGE_COMPLEX)
|
||||
&& (c = s->dest) != EXIT_BLOCK_PTR
|
||||
&& (c = s->dest) != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& single_pred_p (c)
|
||||
&& b != c)
|
||||
{
|
||||
|
@ -2780,7 +2785,7 @@ try_optimize_cfg (int mode)
|
|||
can either delete the jump entirely, or replace it
|
||||
with a simple unconditional jump. */
|
||||
if (single_succ_p (b)
|
||||
&& single_succ (b) != EXIT_BLOCK_PTR
|
||||
&& single_succ (b) != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& onlyjump_p (BB_END (b))
|
||||
&& !find_reg_note (BB_END (b), REG_CROSSING_JUMP, NULL_RTX)
|
||||
&& try_redirect_by_replacing_jump (single_succ_edge (b),
|
||||
|
@ -2819,7 +2824,7 @@ try_optimize_cfg (int mode)
|
|||
}
|
||||
|
||||
if ((mode & CLEANUP_CROSSJUMP)
|
||||
&& try_crossjump_bb (mode, EXIT_BLOCK_PTR))
|
||||
&& try_crossjump_bb (mode, EXIT_BLOCK_PTR_FOR_FN (cfun)))
|
||||
changed = true;
|
||||
|
||||
if (block_was_dirty)
|
||||
|
@ -2876,7 +2881,8 @@ delete_unreachable_blocks (void)
|
|||
if (MAY_HAVE_DEBUG_INSNS && current_ir_type () == IR_GIMPLE
|
||||
&& dom_info_available_p (CDI_DOMINATORS))
|
||||
{
|
||||
for (b = EXIT_BLOCK_PTR->prev_bb; b != ENTRY_BLOCK_PTR; b = prev_bb)
|
||||
for (b = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
|
||||
b != ENTRY_BLOCK_PTR_FOR_FN (cfun); b = prev_bb)
|
||||
{
|
||||
prev_bb = b->prev_bb;
|
||||
|
||||
|
@ -2912,7 +2918,8 @@ delete_unreachable_blocks (void)
|
|||
}
|
||||
else
|
||||
{
|
||||
for (b = EXIT_BLOCK_PTR->prev_bb; b != ENTRY_BLOCK_PTR; b = prev_bb)
|
||||
for (b = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
|
||||
b != ENTRY_BLOCK_PTR_FOR_FN (cfun); b = prev_bb)
|
||||
{
|
||||
prev_bb = b->prev_bb;
|
||||
|
||||
|
|
|
@ -3363,7 +3363,7 @@ expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
|
|||
{
|
||||
if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
|
||||
{
|
||||
if (e->dest != EXIT_BLOCK_PTR)
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
e->dest->count -= e->count;
|
||||
e->dest->frequency -= EDGE_FREQUENCY (e);
|
||||
|
@ -3399,7 +3399,8 @@ expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
|
|||
delete_insn (NEXT_INSN (last));
|
||||
}
|
||||
|
||||
e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
|
||||
e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
|
||||
| EDGE_SIBCALL);
|
||||
e->probability += probability;
|
||||
e->count += count;
|
||||
BB_END (bb) = last;
|
||||
|
@ -4840,9 +4841,9 @@ expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
|
|||
gimple ret_stmt = gsi_stmt (gsi);
|
||||
|
||||
gcc_assert (single_succ_p (bb));
|
||||
gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
|
||||
gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
if (bb->next_bb == EXIT_BLOCK_PTR
|
||||
if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& !gimple_return_retval (ret_stmt))
|
||||
{
|
||||
gsi_remove (&gsi, false);
|
||||
|
@ -5184,17 +5185,17 @@ construct_init_block (void)
|
|||
int flags;
|
||||
|
||||
/* Multiple entry points not supported yet. */
|
||||
gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
|
||||
init_rtl_bb_info (ENTRY_BLOCK_PTR);
|
||||
init_rtl_bb_info (EXIT_BLOCK_PTR);
|
||||
ENTRY_BLOCK_PTR->flags |= BB_RTL;
|
||||
EXIT_BLOCK_PTR->flags |= BB_RTL;
|
||||
gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
|
||||
init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
|
||||
|
||||
e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
|
||||
e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
|
||||
|
||||
/* When entry edge points to first basic block, we don't need jump,
|
||||
otherwise we have to jump into proper target. */
|
||||
if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
|
||||
if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
|
||||
{
|
||||
tree label = gimple_block_label (e->dest);
|
||||
|
||||
|
@ -5206,11 +5207,11 @@ construct_init_block (void)
|
|||
|
||||
init_block = create_basic_block (NEXT_INSN (get_insns ()),
|
||||
get_last_insn (),
|
||||
ENTRY_BLOCK_PTR);
|
||||
init_block->frequency = ENTRY_BLOCK_PTR->frequency;
|
||||
init_block->count = ENTRY_BLOCK_PTR->count;
|
||||
if (current_loops && ENTRY_BLOCK_PTR->loop_father)
|
||||
add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
|
||||
init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
|
||||
if (current_loops && ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father)
|
||||
add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
|
||||
if (e)
|
||||
{
|
||||
first_block = e->dest;
|
||||
|
@ -5218,9 +5219,9 @@ construct_init_block (void)
|
|||
e = make_edge (init_block, first_block, flags);
|
||||
}
|
||||
else
|
||||
e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
|
||||
e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
|
||||
e->probability = REG_BR_PROB_BASE;
|
||||
e->count = ENTRY_BLOCK_PTR->count;
|
||||
e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
|
||||
|
||||
update_bb_for_insn (init_block);
|
||||
return init_block;
|
||||
|
@ -5251,9 +5252,9 @@ construct_exit_block (void)
|
|||
edge e, e2;
|
||||
unsigned ix;
|
||||
edge_iterator ei;
|
||||
rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
|
||||
rtx orig_end = BB_END (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
|
||||
|
||||
rtl_profile_for_bb (EXIT_BLOCK_PTR);
|
||||
rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
/* Make sure the locus is set to the end of the function, so that
|
||||
epilogue line numbers and warnings are set properly. */
|
||||
|
@ -5268,30 +5269,30 @@ construct_exit_block (void)
|
|||
return;
|
||||
/* While emitting the function end we could move end of the last basic block.
|
||||
*/
|
||||
BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
|
||||
BB_END (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb) = orig_end;
|
||||
while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
|
||||
head = NEXT_INSN (head);
|
||||
exit_block = create_basic_block (NEXT_INSN (head), end,
|
||||
EXIT_BLOCK_PTR->prev_bb);
|
||||
exit_block->frequency = EXIT_BLOCK_PTR->frequency;
|
||||
exit_block->count = EXIT_BLOCK_PTR->count;
|
||||
if (current_loops && EXIT_BLOCK_PTR->loop_father)
|
||||
add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
|
||||
exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
|
||||
exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
|
||||
if (current_loops && EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father)
|
||||
add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
|
||||
|
||||
ix = 0;
|
||||
while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
|
||||
while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
|
||||
{
|
||||
e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
|
||||
e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
|
||||
if (!(e->flags & EDGE_ABNORMAL))
|
||||
redirect_edge_succ (e, exit_block);
|
||||
else
|
||||
ix++;
|
||||
}
|
||||
|
||||
e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
|
||||
e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
|
||||
e->probability = REG_BR_PROB_BASE;
|
||||
e->count = EXIT_BLOCK_PTR->count;
|
||||
FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
|
||||
e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
|
||||
FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
if (e2 != e)
|
||||
{
|
||||
e->count -= e2->count;
|
||||
|
@ -5521,7 +5522,7 @@ gimple_expand_cfg (void)
|
|||
/* Dominators are not kept up-to-date as we may create new basic-blocks. */
|
||||
free_dominance_info (CDI_DOMINATORS);
|
||||
|
||||
rtl_profile_for_bb (ENTRY_BLOCK_PTR);
|
||||
rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
insn_locations_init ();
|
||||
if (!DECL_IS_BUILTIN (current_function_decl))
|
||||
|
@ -5685,11 +5686,12 @@ gimple_expand_cfg (void)
|
|||
|
||||
/* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
|
||||
remaining edges later. */
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
|
||||
e->flags &= ~EDGE_EXECUTABLE;
|
||||
|
||||
lab_rtx_for_bb = pointer_map_create ();
|
||||
FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
|
||||
next_bb)
|
||||
bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
|
||||
|
||||
if (MAY_HAVE_DEBUG_INSNS)
|
||||
|
@ -5734,7 +5736,8 @@ gimple_expand_cfg (void)
|
|||
split edges which edge insertions might do. */
|
||||
rebuild_jump_labels (get_insns ());
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
@ -5745,8 +5748,8 @@ gimple_expand_cfg (void)
|
|||
rebuild_jump_labels_chain (e->insns.r);
|
||||
/* Put insns after parm birth, but before
|
||||
NOTE_INSNS_FUNCTION_BEG. */
|
||||
if (e->src == ENTRY_BLOCK_PTR
|
||||
&& single_succ_p (ENTRY_BLOCK_PTR))
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
|
||||
{
|
||||
rtx insns = e->insns.r;
|
||||
e->insns.r = NULL_RTX;
|
||||
|
@ -5767,7 +5770,8 @@ gimple_expand_cfg (void)
|
|||
/* We're done expanding trees to RTL. */
|
||||
currently_expanding_to_rtl = 0;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb,
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
|
|
@ -102,10 +102,10 @@ verify_flow_info (void)
|
|||
edge_checksum = XCNEWVEC (size_t, last_basic_block);
|
||||
|
||||
/* Check bb chain & numbers. */
|
||||
last_bb_seen = ENTRY_BLOCK_PTR;
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
|
||||
last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
|
||||
{
|
||||
if (bb != EXIT_BLOCK_PTR
|
||||
if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& bb != BASIC_BLOCK (bb->index))
|
||||
{
|
||||
error ("bb %d on wrong place", bb->index);
|
||||
|
@ -234,21 +234,21 @@ verify_flow_info (void)
|
|||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
|
||||
edge_checksum[e->dest->index] += (size_t) e;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
edge_checksum[e->dest->index] -= (size_t) e;
|
||||
}
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
if (edge_checksum[bb->index])
|
||||
{
|
||||
error ("basic block %i edge lists are corrupted", bb->index);
|
||||
err = 1;
|
||||
}
|
||||
|
||||
last_bb_seen = ENTRY_BLOCK_PTR;
|
||||
last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
/* Clean up. */
|
||||
free (last_visited);
|
||||
|
@ -938,10 +938,11 @@ tidy_fallthru_edges (void)
|
|||
if (!cfg_hooks->tidy_fallthru_edge)
|
||||
return;
|
||||
|
||||
if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
|
||||
if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return;
|
||||
|
||||
FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
|
||||
FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb,
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb, next_bb)
|
||||
{
|
||||
edge s;
|
||||
|
||||
|
@ -1011,7 +1012,7 @@ can_duplicate_block_p (const_basic_block bb)
|
|||
internal_error ("%s does not support can_duplicate_block_p",
|
||||
cfg_hooks->name);
|
||||
|
||||
if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
|
||||
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
return false;
|
||||
|
||||
return cfg_hooks->can_duplicate_block_p (bb);
|
||||
|
@ -1409,7 +1410,7 @@ account_profile_record (struct profile_record *record, int after_pass)
|
|||
|
||||
FOR_ALL_BB (bb)
|
||||
{
|
||||
if (bb != EXIT_BLOCK_PTR_FOR_FUNCTION (cfun)
|
||||
if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& profile_status != PROFILE_ABSENT)
|
||||
{
|
||||
sum = 0;
|
||||
|
@ -1424,7 +1425,7 @@ account_profile_record (struct profile_record *record, int after_pass)
|
|||
&& (lsum - bb->count > 100 || lsum - bb->count < -100))
|
||||
record->num_mismatched_count_out[after_pass]++;
|
||||
}
|
||||
if (bb != ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun)
|
||||
if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& profile_status != PROFILE_ABSENT)
|
||||
{
|
||||
sum = 0;
|
||||
|
@ -1440,8 +1441,8 @@ account_profile_record (struct profile_record *record, int after_pass)
|
|||
if (lsum - bb->count > 100 || lsum - bb->count < -100)
|
||||
record->num_mismatched_count_in[after_pass]++;
|
||||
}
|
||||
if (bb == ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun)
|
||||
|| bb == EXIT_BLOCK_PTR_FOR_FUNCTION (cfun))
|
||||
if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
gcc_assert (cfg_hooks->account_profile_record);
|
||||
cfg_hooks->account_profile_record (bb, after_pass, record);
|
||||
|
|
|
@ -352,10 +352,10 @@ init_loops_structure (struct function *fn,
|
|||
/* Dummy loop containing whole function. */
|
||||
root = alloc_loop ();
|
||||
root->num_nodes = n_basic_blocks_for_fn (fn);
|
||||
root->latch = EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
|
||||
root->header = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
|
||||
ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->loop_father = root;
|
||||
EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->loop_father = root;
|
||||
root->latch = EXIT_BLOCK_PTR_FOR_FN (fn);
|
||||
root->header = ENTRY_BLOCK_PTR_FOR_FN (fn);
|
||||
ENTRY_BLOCK_PTR_FOR_FN (fn)->loop_father = root;
|
||||
EXIT_BLOCK_PTR_FOR_FN (fn)->loop_father = root;
|
||||
|
||||
loops->larray->quick_push (root);
|
||||
loops->tree_root = root;
|
||||
|
@ -382,7 +382,7 @@ bb_loop_header_p (basic_block header)
|
|||
FOR_EACH_EDGE (e, ei, header->preds)
|
||||
{
|
||||
basic_block latch = e->src;
|
||||
if (latch != ENTRY_BLOCK_PTR
|
||||
if (latch != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& dominated_by_p (CDI_DOMINATORS, latch, header))
|
||||
return true;
|
||||
}
|
||||
|
@ -745,7 +745,7 @@ disambiguate_multiple_latches (struct loop *loop)
|
|||
block. This would cause problems if the entry edge was the one from the
|
||||
entry block. To avoid having to handle this case specially, split
|
||||
such entry edge. */
|
||||
e = find_edge (ENTRY_BLOCK_PTR, loop->header);
|
||||
e = find_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), loop->header);
|
||||
if (e)
|
||||
split_edge (e);
|
||||
|
||||
|
@ -781,7 +781,8 @@ flow_bb_inside_loop_p (const struct loop *loop, const_basic_block bb)
|
|||
{
|
||||
struct loop *source_loop;
|
||||
|
||||
if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
|
||||
if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return 0;
|
||||
|
||||
source_loop = bb->loop_father;
|
||||
|
@ -826,13 +827,13 @@ get_loop_body (const struct loop *loop)
|
|||
|
||||
body = XNEWVEC (basic_block, loop->num_nodes);
|
||||
|
||||
if (loop->latch == EXIT_BLOCK_PTR)
|
||||
if (loop->latch == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
/* There may be blocks unreachable from EXIT_BLOCK, hence we need to
|
||||
special-case the fake loop that contains the whole function. */
|
||||
gcc_assert (loop->num_nodes == (unsigned) n_basic_blocks_for_fn (cfun));
|
||||
body[tv++] = loop->header;
|
||||
body[tv++] = EXIT_BLOCK_PTR;
|
||||
body[tv++] = EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
FOR_EACH_BB (bb)
|
||||
body[tv++] = bb;
|
||||
}
|
||||
|
@ -886,7 +887,7 @@ get_loop_body_in_dom_order (const struct loop *loop)
|
|||
|
||||
tovisit = XNEWVEC (basic_block, loop->num_nodes);
|
||||
|
||||
gcc_assert (loop->latch != EXIT_BLOCK_PTR);
|
||||
gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
tv = 0;
|
||||
fill_sons_in_loop (loop, loop->header, tovisit, &tv);
|
||||
|
@ -921,7 +922,7 @@ get_loop_body_in_bfs_order (const struct loop *loop)
|
|||
unsigned int vc = 1;
|
||||
|
||||
gcc_assert (loop->num_nodes);
|
||||
gcc_assert (loop->latch != EXIT_BLOCK_PTR);
|
||||
gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
blocks = XNEWVEC (basic_block, loop->num_nodes);
|
||||
visited = BITMAP_ALLOC (NULL);
|
||||
|
@ -1143,7 +1144,7 @@ get_loop_exit_edges (const struct loop *loop)
|
|||
edge_iterator ei;
|
||||
struct loop_exit *exit;
|
||||
|
||||
gcc_assert (loop->latch != EXIT_BLOCK_PTR);
|
||||
gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
/* If we maintain the lists of exits, use them. Otherwise we must
|
||||
scan the body of the loop. */
|
||||
|
@ -1175,7 +1176,7 @@ num_loop_branches (const struct loop *loop)
|
|||
unsigned i, n;
|
||||
basic_block * body;
|
||||
|
||||
gcc_assert (loop->latch != EXIT_BLOCK_PTR);
|
||||
gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
body = get_loop_body (loop);
|
||||
n = 0;
|
||||
|
|
|
@ -85,7 +85,8 @@ mark_irreducible_loops (void)
|
|||
gcc_assert (current_loops != NULL);
|
||||
|
||||
/* Reset the flags. */
|
||||
FOR_BB_BETWEEN (act, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (act, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
{
|
||||
act->flags &= ~BB_IRREDUCIBLE_LOOP;
|
||||
FOR_EACH_EDGE (e, ei, act->succs)
|
||||
|
@ -95,11 +96,12 @@ mark_irreducible_loops (void)
|
|||
/* Create the edge lists. */
|
||||
g = new_graph (last_basic_block + num);
|
||||
|
||||
FOR_BB_BETWEEN (act, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (act, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
FOR_EACH_EDGE (e, ei, act->succs)
|
||||
{
|
||||
/* Ignore edges to exit. */
|
||||
if (e->dest == EXIT_BLOCK_PTR)
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
src = BB_REPR (act);
|
||||
|
|
|
@ -92,7 +92,7 @@ fix_bb_placement (basic_block bb)
|
|||
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
{
|
||||
if (e->dest == EXIT_BLOCK_PTR)
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
act = e->dest->loop_father;
|
||||
|
@ -352,7 +352,8 @@ remove_path (edge e)
|
|||
bitmap_set_bit (seen, rem_bbs[i]->index);
|
||||
if (!irred_invalidated)
|
||||
FOR_EACH_EDGE (ae, ei, e->src->succs)
|
||||
if (ae != e && ae->dest != EXIT_BLOCK_PTR && !bitmap_bit_p (seen, ae->dest->index)
|
||||
if (ae != e && ae->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& !bitmap_bit_p (seen, ae->dest->index)
|
||||
&& ae->flags & EDGE_IRREDUCIBLE_LOOP)
|
||||
{
|
||||
irred_invalidated = true;
|
||||
|
@ -363,7 +364,8 @@ remove_path (edge e)
|
|||
{
|
||||
bb = rem_bbs[i];
|
||||
FOR_EACH_EDGE (ae, ei, rem_bbs[i]->succs)
|
||||
if (ae->dest != EXIT_BLOCK_PTR && !bitmap_bit_p (seen, ae->dest->index))
|
||||
if (ae->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& !bitmap_bit_p (seen, ae->dest->index))
|
||||
{
|
||||
bitmap_set_bit (seen, ae->dest->index);
|
||||
bord_bbs[n_bord_bbs++] = ae->dest;
|
||||
|
@ -1519,7 +1521,7 @@ create_preheader (struct loop *loop, int flags)
|
|||
|
||||
/* We do not allow entry block to be the loop preheader, since we
|
||||
cannot emit code there. */
|
||||
if (single_entry->src == ENTRY_BLOCK_PTR)
|
||||
if (single_entry->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
need_forwarder_block = true;
|
||||
else
|
||||
{
|
||||
|
|
172
gcc/cfgrtl.c
172
gcc/cfgrtl.c
|
@ -501,7 +501,7 @@ rtx
|
|||
entry_of_function (void)
|
||||
{
|
||||
return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
|
||||
BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
|
||||
BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
|
||||
}
|
||||
|
||||
/* Emit INSN at the entry point of the function, ensuring that it is only
|
||||
|
@ -509,7 +509,7 @@ entry_of_function (void)
|
|||
void
|
||||
emit_insn_at_entry (rtx insn)
|
||||
{
|
||||
edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
|
||||
edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
|
||||
edge e = ei_safe_edge (ei);
|
||||
gcc_assert (e->flags & EDGE_FALLTHRU);
|
||||
|
||||
|
@ -573,7 +573,7 @@ contains_no_active_insn_p (const_basic_block bb)
|
|||
{
|
||||
rtx insn;
|
||||
|
||||
if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
|
||||
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| !single_succ_p (bb))
|
||||
return false;
|
||||
|
||||
|
@ -620,7 +620,7 @@ can_fallthru (basic_block src, basic_block target)
|
|||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
if (target == EXIT_BLOCK_PTR)
|
||||
if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return true;
|
||||
if (src->next_bb != target)
|
||||
return false;
|
||||
|
@ -630,7 +630,7 @@ can_fallthru (basic_block src, basic_block target)
|
|||
return false;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, src->succs)
|
||||
if (e->dest == EXIT_BLOCK_PTR
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& e->flags & EDGE_FALLTHRU)
|
||||
return false;
|
||||
|
||||
|
@ -650,10 +650,10 @@ could_fall_through (basic_block src, basic_block target)
|
|||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
if (target == EXIT_BLOCK_PTR)
|
||||
if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return true;
|
||||
FOR_EACH_EDGE (e, ei, src->succs)
|
||||
if (e->dest == EXIT_BLOCK_PTR
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& e->flags & EDGE_FALLTHRU)
|
||||
return 0;
|
||||
return true;
|
||||
|
@ -958,7 +958,8 @@ rtl_can_merge_blocks (basic_block a, basic_block b)
|
|||
/* Must be simple edge. */
|
||||
&& !(single_succ_edge (a)->flags & EDGE_COMPLEX)
|
||||
&& a->next_bb == b
|
||||
&& a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
|
||||
&& a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& b != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
/* If the jump insn has side effects,
|
||||
we can't kill the edge. */
|
||||
&& (!JUMP_P (BB_END (a))
|
||||
|
@ -972,7 +973,7 @@ rtl_can_merge_blocks (basic_block a, basic_block b)
|
|||
rtx
|
||||
block_label (basic_block block)
|
||||
{
|
||||
if (block == EXIT_BLOCK_PTR)
|
||||
if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return NULL_RTX;
|
||||
|
||||
if (!LABEL_P (BB_HEAD (block)))
|
||||
|
@ -1084,13 +1085,13 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
|
|||
INSN_UID (insn), e->dest->index, target->index);
|
||||
if (!redirect_jump (insn, block_label (target), 0))
|
||||
{
|
||||
gcc_assert (target == EXIT_BLOCK_PTR);
|
||||
gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
/* Cannot do anything for target exit block. */
|
||||
else if (target == EXIT_BLOCK_PTR)
|
||||
else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return NULL;
|
||||
|
||||
/* Or replace possibly complicated jump insn by simple jump insn. */
|
||||
|
@ -1178,7 +1179,7 @@ patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
|
|||
int j;
|
||||
rtx new_label = block_label (new_bb);
|
||||
|
||||
if (new_bb == EXIT_BLOCK_PTR)
|
||||
if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return false;
|
||||
if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
|
||||
vec = XVEC (PATTERN (tmp), 0);
|
||||
|
@ -1211,7 +1212,7 @@ patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
|
|||
int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
|
||||
rtx new_label, note;
|
||||
|
||||
if (new_bb == EXIT_BLOCK_PTR)
|
||||
if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return false;
|
||||
new_label = block_label (new_bb);
|
||||
|
||||
|
@ -1268,7 +1269,7 @@ patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
|
|||
target is exit block on some arches. */
|
||||
if (!redirect_jump (insn, block_label (new_bb), 0))
|
||||
{
|
||||
gcc_assert (new_bb == EXIT_BLOCK_PTR);
|
||||
gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -1324,7 +1325,8 @@ fixup_partition_crossing (edge e)
|
|||
{
|
||||
rtx note;
|
||||
|
||||
if (e->src == ENTRY_BLOCK_PTR || e->dest == EXIT_BLOCK_PTR)
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
|
||||
== EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return;
|
||||
/* If we redirected an existing edge, it may already be marked
|
||||
crossing, even though the new src is missing a reg crossing note.
|
||||
|
@ -1392,7 +1394,7 @@ fixup_new_cold_bb (basic_block bb)
|
|||
boundary fixup by calling fixup_partition_crossing itself. */
|
||||
if ((e->flags & EDGE_FALLTHRU)
|
||||
&& BB_PARTITION (bb) != BB_PARTITION (e->dest)
|
||||
&& e->dest != EXIT_BLOCK_PTR)
|
||||
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
force_nonfallthru (e);
|
||||
else
|
||||
fixup_partition_crossing (e);
|
||||
|
@ -1470,7 +1472,8 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
|
|||
/* In the case the last instruction is conditional jump to the next
|
||||
instruction, first redirect the jump itself and then continue
|
||||
by creating a basic block afterwards to redirect fallthru edge. */
|
||||
if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
|
||||
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& any_condjump_p (BB_END (e->src))
|
||||
&& JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
|
||||
{
|
||||
|
@ -1512,7 +1515,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
|
|||
else
|
||||
{
|
||||
gcc_assert (e->flags & EDGE_FALLTHRU);
|
||||
if (e->src == ENTRY_BLOCK_PTR)
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
/* We can't redirect the entry block. Create an empty block
|
||||
at the start of the function which we use to add the new
|
||||
|
@ -1521,16 +1524,18 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
|
|||
edge_iterator ei;
|
||||
bool found = false;
|
||||
|
||||
basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
|
||||
basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
/* Change the existing edge's source to be the new block, and add
|
||||
a new edge from the entry block to the new block. */
|
||||
e->src = bb;
|
||||
for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
|
||||
for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
|
||||
(tmp = ei_safe_edge (ei)); )
|
||||
{
|
||||
if (tmp == e)
|
||||
{
|
||||
ENTRY_BLOCK_PTR->succs->unordered_remove (ei.index);
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
|
@ -1541,14 +1546,15 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
|
|||
gcc_assert (found);
|
||||
|
||||
vec_safe_push (bb->succs, e);
|
||||
make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
|
||||
make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
|
||||
EDGE_FALLTHRU);
|
||||
}
|
||||
}
|
||||
|
||||
/* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
|
||||
don't point to the target or fallthru label. */
|
||||
if (JUMP_P (BB_END (e->src))
|
||||
&& target != EXIT_BLOCK_PTR
|
||||
&& target != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& (e->flags & EDGE_FALLTHRU)
|
||||
&& (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
|
||||
{
|
||||
|
@ -1650,7 +1656,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
|
|||
|
||||
loc = e->goto_locus;
|
||||
e->flags &= ~EDGE_FALLTHRU;
|
||||
if (target == EXIT_BLOCK_PTR)
|
||||
if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
if (jump_label == ret_rtx)
|
||||
{
|
||||
|
@ -1784,7 +1790,7 @@ static basic_block
|
|||
last_bb_in_partition (basic_block start_bb)
|
||||
{
|
||||
basic_block bb;
|
||||
FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
{
|
||||
if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
|
||||
return bb;
|
||||
|
@ -1820,14 +1826,15 @@ rtl_split_edge (edge edge_in)
|
|||
}
|
||||
|
||||
/* Create the basic block note. */
|
||||
if (edge_in->dest != EXIT_BLOCK_PTR)
|
||||
if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
before = BB_HEAD (edge_in->dest);
|
||||
else
|
||||
before = NULL_RTX;
|
||||
|
||||
/* If this is a fall through edge to the exit block, the blocks might be
|
||||
not adjacent, and the right place is after the source. */
|
||||
if ((edge_in->flags & EDGE_FALLTHRU) && edge_in->dest == EXIT_BLOCK_PTR)
|
||||
if ((edge_in->flags & EDGE_FALLTHRU)
|
||||
&& edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
before = NEXT_INSN (BB_END (edge_in->src));
|
||||
bb = create_basic_block (before, NULL, edge_in->src);
|
||||
|
@ -1835,7 +1842,7 @@ rtl_split_edge (edge edge_in)
|
|||
}
|
||||
else
|
||||
{
|
||||
if (edge_in->src == ENTRY_BLOCK_PTR)
|
||||
if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
|
||||
BB_COPY_PARTITION (bb, edge_in->dest);
|
||||
|
@ -1873,7 +1880,7 @@ rtl_split_edge (edge edge_in)
|
|||
|
||||
/* Can't allow a region crossing edge to be fallthrough. */
|
||||
if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
|
||||
&& edge_in->dest != EXIT_BLOCK_PTR)
|
||||
&& edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
new_bb = force_nonfallthru (single_succ_edge (bb));
|
||||
gcc_assert (!new_bb);
|
||||
|
@ -1888,7 +1895,7 @@ rtl_split_edge (edge edge_in)
|
|||
}
|
||||
else
|
||||
{
|
||||
if (edge_in->src != ENTRY_BLOCK_PTR)
|
||||
if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
/* For asm goto even splitting of fallthru edge might
|
||||
need insn patching, as other labels might point to the
|
||||
|
@ -1896,7 +1903,7 @@ rtl_split_edge (edge edge_in)
|
|||
rtx last = BB_END (edge_in->src);
|
||||
if (last
|
||||
&& JUMP_P (last)
|
||||
&& edge_in->dest != EXIT_BLOCK_PTR
|
||||
&& edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& extract_asm_operands (PATTERN (last)) != NULL_RTX
|
||||
&& patch_jump_insn (last, before, bb))
|
||||
df_set_bb_dirty (edge_in->src);
|
||||
|
@ -1943,7 +1950,7 @@ commit_one_edge_insertion (edge e)
|
|||
|
||||
/* Figure out where to put these insns. If the destination has
|
||||
one predecessor, insert there. Except for the exit block. */
|
||||
if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
|
||||
if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
bb = e->dest;
|
||||
|
||||
|
@ -1972,7 +1979,7 @@ commit_one_edge_insertion (edge e)
|
|||
the basic block. */
|
||||
else if ((e->flags & EDGE_ABNORMAL) == 0
|
||||
&& single_succ_p (e->src)
|
||||
&& e->src != ENTRY_BLOCK_PTR
|
||||
&& e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& (!JUMP_P (BB_END (e->src))
|
||||
|| simplejump_p (BB_END (e->src))))
|
||||
{
|
||||
|
@ -2025,7 +2032,7 @@ commit_one_edge_insertion (edge e)
|
|||
to EXIT. */
|
||||
|
||||
e = single_succ_edge (bb);
|
||||
gcc_assert (e->dest == EXIT_BLOCK_PTR
|
||||
gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
|
||||
|
||||
e->flags &= ~EDGE_FALLTHRU;
|
||||
|
@ -2057,7 +2064,8 @@ commit_edge_insertions (void)
|
|||
verify_flow_info ();
|
||||
#endif
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
@ -2428,8 +2436,8 @@ rtl_verify_edges (void)
|
|||
n_fallthru++, fallthru = e;
|
||||
|
||||
is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
|
||||
&& e->src != ENTRY_BLOCK_PTR
|
||||
&& e->dest != EXIT_BLOCK_PTR);
|
||||
&& e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
has_crossing_edge |= is_crossing;
|
||||
if (e->flags & EDGE_CROSSING)
|
||||
{
|
||||
|
@ -2832,8 +2840,8 @@ rtl_verify_fallthru (void)
|
|||
break;
|
||||
}
|
||||
}
|
||||
else if (e->src != ENTRY_BLOCK_PTR
|
||||
&& e->dest != EXIT_BLOCK_PTR)
|
||||
else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
|
@ -2872,10 +2880,10 @@ rtl_verify_bb_layout (void)
|
|||
rtx x;
|
||||
int num_bb_notes;
|
||||
const rtx rtx_first = get_insns ();
|
||||
basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
|
||||
basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
|
||||
|
||||
num_bb_notes = 0;
|
||||
last_bb_seen = ENTRY_BLOCK_PTR;
|
||||
last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
for (x = rtx_first; x; x = NEXT_INSN (x))
|
||||
{
|
||||
|
@ -3062,7 +3070,7 @@ purge_dead_edges (basic_block bb)
|
|||
ei_next (&ei);
|
||||
continue;
|
||||
}
|
||||
else if (e->dest != EXIT_BLOCK_PTR
|
||||
else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& BB_HEAD (e->dest) == JUMP_LABEL (insn))
|
||||
/* If the destination block is the target of the jump,
|
||||
keep the edge. */
|
||||
|
@ -3070,7 +3078,8 @@ purge_dead_edges (basic_block bb)
|
|||
ei_next (&ei);
|
||||
continue;
|
||||
}
|
||||
else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
|
||||
else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& returnjump_p (insn))
|
||||
/* If the destination block is the exit block, and this
|
||||
instruction is a return, then keep the edge. */
|
||||
{
|
||||
|
@ -3319,7 +3328,7 @@ skip_insns_after_block (basic_block bb)
|
|||
rtx insn, last_insn, next_head, prev;
|
||||
|
||||
next_head = NULL_RTX;
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
next_head = BB_HEAD (bb->next_bb);
|
||||
|
||||
for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
|
||||
|
@ -3468,7 +3477,7 @@ outof_cfg_layout_mode (void)
|
|||
basic_block bb;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bb->aux = bb->next_bb;
|
||||
|
||||
cfg_layout_finalize ();
|
||||
|
@ -3577,7 +3586,8 @@ relink_block_chain (bool stay_in_cfglayout_mode)
|
|||
if (dump_file)
|
||||
{
|
||||
fprintf (dump_file, "Reordered sequence:\n");
|
||||
for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
|
||||
for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
|
||||
NUM_FIXED_BLOCKS;
|
||||
bb;
|
||||
bb = (basic_block) bb->aux, index++)
|
||||
{
|
||||
|
@ -3595,15 +3605,15 @@ relink_block_chain (bool stay_in_cfglayout_mode)
|
|||
}
|
||||
|
||||
/* Now reorder the blocks. */
|
||||
prev_bb = ENTRY_BLOCK_PTR;
|
||||
bb = ENTRY_BLOCK_PTR->next_bb;
|
||||
prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
|
||||
for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
|
||||
{
|
||||
bb->prev_bb = prev_bb;
|
||||
prev_bb->next_bb = bb;
|
||||
}
|
||||
prev_bb->next_bb = EXIT_BLOCK_PTR;
|
||||
EXIT_BLOCK_PTR->prev_bb = prev_bb;
|
||||
prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
|
||||
|
||||
/* Then, clean up the aux fields. */
|
||||
FOR_ALL_BB (bb)
|
||||
|
@ -3644,7 +3654,8 @@ fixup_reorder_chain (void)
|
|||
/* First do the bulk reordering -- rechain the blocks without regard to
|
||||
the needed changes to jumps and labels. */
|
||||
|
||||
for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
|
||||
for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
|
||||
bb->aux)
|
||||
{
|
||||
if (BB_HEADER (bb))
|
||||
{
|
||||
|
@ -3687,7 +3698,8 @@ fixup_reorder_chain (void)
|
|||
/* Now add jumps and labels as needed to match the blocks new
|
||||
outgoing edges. */
|
||||
|
||||
for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
|
||||
for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
|
||||
bb->aux)
|
||||
{
|
||||
edge e_fall, e_taken, e;
|
||||
rtx bb_end_insn;
|
||||
|
@ -3728,7 +3740,7 @@ fixup_reorder_chain (void)
|
|||
|
||||
/* If the old fallthru is still next, nothing to do. */
|
||||
if (bb->aux == e_fall->dest
|
||||
|| e_fall->dest == EXIT_BLOCK_PTR)
|
||||
|| e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
/* The degenerated case of conditional jump jumping to the next
|
||||
|
@ -3749,7 +3761,8 @@ fixup_reorder_chain (void)
|
|||
if (note
|
||||
&& XINT (note, 0) < REG_BR_PROB_BASE / 2
|
||||
&& invert_jump (bb_end_insn,
|
||||
(e_fall->dest == EXIT_BLOCK_PTR
|
||||
(e_fall->dest
|
||||
== EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
? NULL_RTX
|
||||
: label_for_bb (e_fall->dest)), 0))
|
||||
{
|
||||
|
@ -3771,7 +3784,8 @@ fixup_reorder_chain (void)
|
|||
/* Otherwise we can try to invert the jump. This will
|
||||
basically never fail, however, keep up the pretense. */
|
||||
else if (invert_jump (bb_end_insn,
|
||||
(e_fall->dest == EXIT_BLOCK_PTR
|
||||
(e_fall->dest
|
||||
== EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
? NULL_RTX
|
||||
: label_for_bb (e_fall->dest)), 0))
|
||||
{
|
||||
|
@ -3793,7 +3807,7 @@ fixup_reorder_chain (void)
|
|||
__builtin_unreachable ()), nothing to do. */
|
||||
if (! e_fall
|
||||
|| bb->aux == e_fall->dest
|
||||
|| e_fall->dest == EXIT_BLOCK_PTR)
|
||||
|| e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
/* Otherwise we'll have to use the fallthru fixup below. */
|
||||
|
@ -3820,7 +3834,7 @@ fixup_reorder_chain (void)
|
|||
continue;
|
||||
|
||||
/* A fallthru to exit block. */
|
||||
if (e_fall->dest == EXIT_BLOCK_PTR)
|
||||
if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -3880,7 +3894,7 @@ fixup_reorder_chain (void)
|
|||
continue;
|
||||
}
|
||||
dest = e->dest;
|
||||
if (dest == EXIT_BLOCK_PTR)
|
||||
if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
/* Non-fallthru edges to the exit block cannot be split. */
|
||||
if (!(e->flags & EDGE_FALLTHRU))
|
||||
|
@ -3958,13 +3972,13 @@ fixup_fallthru_exit_predecessor (void)
|
|||
value. */
|
||||
gcc_assert (reload_completed);
|
||||
|
||||
e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
|
||||
e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
|
||||
if (e)
|
||||
bb = e->src;
|
||||
|
||||
if (bb && bb->aux)
|
||||
{
|
||||
basic_block c = ENTRY_BLOCK_PTR->next_bb;
|
||||
basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
|
||||
|
||||
/* If the very first block is the one with the fall-through exit
|
||||
edge, we have to split that block. */
|
||||
|
@ -4000,7 +4014,7 @@ force_one_exit_fallthru (void)
|
|||
edge_iterator ei;
|
||||
basic_block forwarder, bb;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
if (e->flags & EDGE_FALLTHRU)
|
||||
{
|
||||
if (predecessor == NULL)
|
||||
|
@ -4018,7 +4032,8 @@ force_one_exit_fallthru (void)
|
|||
/* Exit has several fallthru predecessors. Create a forwarder block for
|
||||
them. */
|
||||
forwarder = split_edge (predecessor);
|
||||
for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
|
||||
for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
|
||||
(e = ei_safe_edge (ei)); )
|
||||
{
|
||||
if (e->src == forwarder
|
||||
|| !(e->flags & EDGE_FALLTHRU))
|
||||
|
@ -4166,7 +4181,7 @@ cfg_layout_duplicate_bb (basic_block bb)
|
|||
insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
|
||||
new_bb = create_basic_block (insn,
|
||||
insn ? get_last_insn () : NULL,
|
||||
EXIT_BLOCK_PTR->prev_bb);
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
|
||||
|
||||
BB_COPY_PARTITION (new_bb, bb);
|
||||
if (BB_HEADER (bb))
|
||||
|
@ -4313,14 +4328,14 @@ cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
|
|||
if (e->dest == dest)
|
||||
return e;
|
||||
|
||||
if (e->src != ENTRY_BLOCK_PTR
|
||||
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& (ret = try_redirect_by_replacing_jump (e, dest, true)))
|
||||
{
|
||||
df_set_bb_dirty (src);
|
||||
return ret;
|
||||
}
|
||||
|
||||
if (e->src == ENTRY_BLOCK_PTR
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
|
||||
{
|
||||
if (dump_file)
|
||||
|
@ -4447,7 +4462,7 @@ cfg_layout_delete_block (basic_block bb)
|
|||
set_last_insn (insn);
|
||||
}
|
||||
}
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
to = &BB_HEADER (bb->next_bb);
|
||||
else
|
||||
to = &cfg_layout_function_footer;
|
||||
|
@ -4504,7 +4519,7 @@ cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
|
|||
if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
|
||||
{
|
||||
edge e = find_fallthru_edge (b->succs);
|
||||
if (e && e->dest == EXIT_BLOCK_PTR)
|
||||
if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -4515,7 +4530,8 @@ cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
|
|||
&& a != b
|
||||
/* Must be simple edge. */
|
||||
&& !(single_succ_edge (a)->flags & EDGE_COMPLEX)
|
||||
&& a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
|
||||
&& a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& b != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
/* If the jump insn has side effects, we can't kill the edge.
|
||||
When not optimizing, try_redirect_by_replacing_jump will
|
||||
not allow us to redirect an edge by replacing a table jump. */
|
||||
|
@ -4634,11 +4650,11 @@ static basic_block
|
|||
cfg_layout_split_edge (edge e)
|
||||
{
|
||||
basic_block new_bb =
|
||||
create_basic_block (e->src != ENTRY_BLOCK_PTR
|
||||
create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
? NEXT_INSN (BB_END (e->src)) : get_insns (),
|
||||
NULL_RTX, e->src);
|
||||
|
||||
if (e->dest == EXIT_BLOCK_PTR)
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
BB_COPY_PARTITION (new_bb, e->src);
|
||||
else
|
||||
BB_COPY_PARTITION (new_bb, e->dest);
|
||||
|
@ -4663,7 +4679,8 @@ rtl_block_empty_p (basic_block bb)
|
|||
{
|
||||
rtx insn;
|
||||
|
||||
if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
|
||||
if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return true;
|
||||
|
||||
FOR_BB_INSNS (bb, insn)
|
||||
|
@ -4770,7 +4787,8 @@ rtl_flow_call_edges_add (sbitmap blocks)
|
|||
if (! blocks)
|
||||
check_last_block = true;
|
||||
else
|
||||
check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
|
||||
check_last_block = bitmap_bit_p (blocks,
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
|
||||
|
||||
/* In the last basic block, before epilogue generation, there will be
|
||||
a fallthru edge to EXIT. Special care is required if the last insn
|
||||
|
@ -4786,7 +4804,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
|
|||
Handle this by adding a dummy instruction in a new last basic block. */
|
||||
if (check_last_block)
|
||||
{
|
||||
basic_block bb = EXIT_BLOCK_PTR->prev_bb;
|
||||
basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
|
||||
rtx insn = BB_END (bb);
|
||||
|
||||
/* Back up past insns that must be kept in the same block as a call. */
|
||||
|
@ -4798,7 +4816,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
|
|||
{
|
||||
edge e;
|
||||
|
||||
e = find_edge (bb, EXIT_BLOCK_PTR);
|
||||
e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
if (e)
|
||||
{
|
||||
insert_insn_on_edge (gen_use (const0_rtx), e);
|
||||
|
@ -4846,7 +4864,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
|
|||
#ifdef ENABLE_CHECKING
|
||||
if (split_at_insn == BB_END (bb))
|
||||
{
|
||||
e = find_edge (bb, EXIT_BLOCK_PTR);
|
||||
e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
gcc_assert (e == NULL);
|
||||
}
|
||||
#endif
|
||||
|
@ -4860,7 +4878,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
|
|||
blocks_split++;
|
||||
}
|
||||
|
||||
make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
|
||||
make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
|
||||
}
|
||||
|
||||
if (insn == BB_HEAD (bb))
|
||||
|
@ -4952,7 +4970,7 @@ rtl_can_remove_branch_p (const_edge e)
|
|||
const_rtx insn = BB_END (src), set;
|
||||
|
||||
/* The conditions are taken from try_redirect_by_replacing_jump. */
|
||||
if (target == EXIT_BLOCK_PTR)
|
||||
if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return false;
|
||||
|
||||
if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
|
||||
|
|
|
@ -198,7 +198,7 @@ record_eh_tables (struct cgraph_node *node, struct function *fun)
|
|||
int
|
||||
compute_call_stmt_bb_frequency (tree decl, basic_block bb)
|
||||
{
|
||||
int entry_freq = ENTRY_BLOCK_PTR_FOR_FUNCTION
|
||||
int entry_freq = ENTRY_BLOCK_PTR_FOR_FN
|
||||
(DECL_STRUCT_FUNCTION (decl))->frequency;
|
||||
int freq = bb->frequency;
|
||||
|
||||
|
@ -441,7 +441,7 @@ rebuild_cgraph_edges (void)
|
|||
cgraph_node_remove_callees (node);
|
||||
ipa_remove_all_references (&node->ref_list);
|
||||
|
||||
node->count = ENTRY_BLOCK_PTR->count;
|
||||
node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
|
@ -493,7 +493,7 @@ cgraph_rebuild_references (void)
|
|||
else
|
||||
i++;
|
||||
|
||||
node->count = ENTRY_BLOCK_PTR->count;
|
||||
node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
|
|
|
@ -1336,10 +1336,10 @@ init_lowered_empty_function (tree decl, bool in_ssa)
|
|||
loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
|
||||
|
||||
/* Create BB for body of the function and connect it properly. */
|
||||
bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
|
||||
make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
|
||||
make_edge (bb, EXIT_BLOCK_PTR, 0);
|
||||
add_bb_to_loop (bb, ENTRY_BLOCK_PTR->loop_father);
|
||||
bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
|
||||
make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
|
||||
add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
|
||||
|
||||
return bb;
|
||||
}
|
||||
|
@ -1627,7 +1627,7 @@ expand_thunk (struct cgraph_node *node, bool output_asm_thunks)
|
|||
gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
|
||||
make_edge (bb, then_bb, EDGE_TRUE_VALUE);
|
||||
make_edge (bb, else_bb, EDGE_FALSE_VALUE);
|
||||
make_edge (return_bb, EXIT_BLOCK_PTR, 0);
|
||||
make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
|
||||
make_edge (then_bb, return_bb, EDGE_FALLTHRU);
|
||||
make_edge (else_bb, return_bb, EDGE_FALLTHRU);
|
||||
bsi = gsi_last_bb (then_bb);
|
||||
|
|
|
@ -1157,7 +1157,7 @@ combine_instructions (rtx f, unsigned int nregs)
|
|||
setup_incoming_promotions (first);
|
||||
/* Allow the entry block and the first block to fall into the same EBB.
|
||||
Conceptually the incoming promotions are assigned to the entry block. */
|
||||
last_bb = ENTRY_BLOCK_PTR;
|
||||
last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
create_log_links ();
|
||||
FOR_EACH_BB (this_basic_block)
|
||||
|
@ -1209,7 +1209,7 @@ combine_instructions (rtx f, unsigned int nregs)
|
|||
label_tick = label_tick_ebb_start = 1;
|
||||
init_reg_last ();
|
||||
setup_incoming_promotions (first);
|
||||
last_bb = ENTRY_BLOCK_PTR;
|
||||
last_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
FOR_EACH_BB (this_basic_block)
|
||||
{
|
||||
|
@ -1592,7 +1592,7 @@ set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
|
|||
/* If this register is undefined at the start of the file, we can't
|
||||
say what its contents were. */
|
||||
&& ! REGNO_REG_SET_P
|
||||
(DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x))
|
||||
(DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb), REGNO (x))
|
||||
&& HWI_COMPUTABLE_MODE_P (GET_MODE (x)))
|
||||
{
|
||||
reg_stat_type *rsp = ®_stat[REGNO (x)];
|
||||
|
@ -3938,7 +3938,7 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p,
|
|||
ni2dest = SET_DEST (newi2pat);
|
||||
|
||||
for (insn = NEXT_INSN (i3);
|
||||
insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
|
||||
insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| insn != BB_HEAD (this_basic_block->next_bb));
|
||||
insn = NEXT_INSN (insn))
|
||||
{
|
||||
|
@ -4054,7 +4054,8 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p,
|
|||
&& ! find_reg_note (i2, REG_UNUSED,
|
||||
SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
|
||||
for (temp = NEXT_INSN (i2);
|
||||
temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
|
||||
temp
|
||||
&& (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| BB_HEAD (this_basic_block) != temp);
|
||||
temp = NEXT_INSN (temp))
|
||||
if (temp != i3 && INSN_P (temp))
|
||||
|
@ -9468,7 +9469,8 @@ reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode,
|
|||
|| (REGNO (x) >= FIRST_PSEUDO_REGISTER
|
||||
&& REG_N_SETS (REGNO (x)) == 1
|
||||
&& !REGNO_REG_SET_P
|
||||
(DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
|
||||
(DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
|
||||
REGNO (x)))))
|
||||
{
|
||||
*nonzero &= rsp->last_set_nonzero_bits;
|
||||
return NULL;
|
||||
|
@ -9535,7 +9537,8 @@ reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode,
|
|||
|| (REGNO (x) >= FIRST_PSEUDO_REGISTER
|
||||
&& REG_N_SETS (REGNO (x)) == 1
|
||||
&& !REGNO_REG_SET_P
|
||||
(DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
|
||||
(DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
|
||||
REGNO (x)))))
|
||||
{
|
||||
*result = rsp->last_set_sign_bit_copies;
|
||||
return NULL;
|
||||
|
@ -12564,7 +12567,8 @@ get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
|
|||
|| (! (regno >= FIRST_PSEUDO_REGISTER
|
||||
&& REG_N_SETS (regno) == 1
|
||||
&& (!REGNO_REG_SET_P
|
||||
(DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno)))
|
||||
(DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb),
|
||||
regno)))
|
||||
&& rsp->last_set_label > tick))
|
||||
{
|
||||
if (replace)
|
||||
|
@ -12679,7 +12683,7 @@ get_last_value (const_rtx x)
|
|||
&& (regno < FIRST_PSEUDO_REGISTER
|
||||
|| REG_N_SETS (regno) != 1
|
||||
|| REGNO_REG_SET_P
|
||||
(DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno))))
|
||||
(DF_LR_IN (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb), regno))))
|
||||
return 0;
|
||||
|
||||
/* If the value was set in a later insn than the ones we are processing,
|
||||
|
@ -13740,7 +13744,7 @@ distribute_links (struct insn_link *links)
|
|||
since most links don't point very far away. */
|
||||
|
||||
for (insn = NEXT_INSN (link->insn);
|
||||
(insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
|
||||
(insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| BB_HEAD (this_basic_block->next_bb) != insn));
|
||||
insn = NEXT_INSN (insn))
|
||||
if (DEBUG_INSN_P (insn))
|
||||
|
|
|
@ -4835,7 +4835,8 @@ alpha_gp_save_rtx (void)
|
|||
label. Emit the sequence properly on the edge. We are only
|
||||
invoked from dw2_build_landing_pads and finish_eh_generation
|
||||
will call commit_edge_insertions thanks to a kludge. */
|
||||
insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
|
||||
insert_insn_on_edge (seq,
|
||||
single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
|
||||
cfun->machine->gp_save_rtx = m;
|
||||
}
|
||||
|
|
|
@ -5943,7 +5943,8 @@ require_pic_register (void)
|
|||
we can't yet emit instructions directly in the final
|
||||
insn stream. Queue the insns on the entry edge, they will
|
||||
be committed after everything else is expanded. */
|
||||
insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
|
||||
insert_insn_on_edge (seq,
|
||||
single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -18386,7 +18387,8 @@ arm_r3_live_at_start_p (void)
|
|||
/* Just look at cfg info, which is still close enough to correct at this
|
||||
point. This gives false positives for broken functions that might use
|
||||
uninitialized data that happens to be allocated in r3, but who cares? */
|
||||
return REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), 3);
|
||||
return REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
|
||||
3);
|
||||
}
|
||||
|
||||
/* Compute the number of bytes used to store the static chain register on the
|
||||
|
@ -19919,7 +19921,7 @@ any_sibcall_could_use_r3 (void)
|
|||
|
||||
if (!crtl->tail_call_emit)
|
||||
return false;
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
if (e->flags & EDGE_SIBCALL)
|
||||
{
|
||||
rtx call = BB_END (e->src);
|
||||
|
|
|
@ -3600,7 +3600,7 @@ hwloop_optimize (hwloop_info loop)
|
|||
|
||||
if (single_pred_p (bb)
|
||||
&& single_pred_edge (bb)->flags & EDGE_FALLTHRU
|
||||
&& single_pred (bb) != ENTRY_BLOCK_PTR)
|
||||
&& single_pred (bb) != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
bb = single_pred (bb);
|
||||
last_insn = BB_END (bb);
|
||||
|
|
|
@ -8027,7 +8027,7 @@ frv_optimize_membar_global (basic_block bb, struct frv_io *first_io,
|
|||
/* We need to keep the membar if there is an edge to the exit block. */
|
||||
FOR_EACH_EDGE (succ, ei, bb->succs)
|
||||
/* for (succ = bb->succ; succ != 0; succ = succ->succ_next) */
|
||||
if (succ->dest == EXIT_BLOCK_PTR)
|
||||
if (succ->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return;
|
||||
|
||||
/* Work out the union of all successor blocks. */
|
||||
|
|
|
@ -5593,7 +5593,7 @@ ix86_eax_live_at_start_p (void)
|
|||
to correct at this point. This gives false positives for broken
|
||||
functions that might use uninitialized data that happens to be
|
||||
allocated in eax, but who cares? */
|
||||
return REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), 0);
|
||||
return REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)), 0);
|
||||
}
|
||||
|
||||
static bool
|
||||
|
@ -9301,7 +9301,7 @@ ix86_compute_frame_layout (struct ix86_frame *frame)
|
|||
Recompute the value as needed. Do not recompute when amount of registers
|
||||
didn't change as reload does multiple calls to the function and does not
|
||||
expect the decision to change within single iteration. */
|
||||
else if (!optimize_bb_for_size_p (ENTRY_BLOCK_PTR)
|
||||
else if (!optimize_bb_for_size_p (ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
&& cfun->machine->use_fast_prologue_epilogue_nregs != frame->nregs)
|
||||
{
|
||||
int count = frame->nregs;
|
||||
|
@ -11390,7 +11390,7 @@ ix86_expand_epilogue (int style)
|
|||
/* Leave results in shorter dependency chains on CPUs that are
|
||||
able to grok it fast. */
|
||||
else if (TARGET_USE_LEAVE
|
||||
|| optimize_bb_for_size_p (EXIT_BLOCK_PTR)
|
||||
|| optimize_bb_for_size_p (EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
|| !cfun->machine->use_fast_prologue_epilogue)
|
||||
ix86_emit_leave ();
|
||||
else
|
||||
|
@ -29838,7 +29838,7 @@ add_condition_to_bb (tree function_decl, tree version_decl,
|
|||
make_edge (bb1, bb3, EDGE_FALSE_VALUE);
|
||||
|
||||
remove_edge (e23);
|
||||
make_edge (bb2, EXIT_BLOCK_PTR, 0);
|
||||
make_edge (bb2, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
|
||||
|
||||
pop_cfun ();
|
||||
|
||||
|
@ -36573,7 +36573,7 @@ ix86_pad_returns (void)
|
|||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
{
|
||||
basic_block bb = e->src;
|
||||
rtx ret = BB_END (bb);
|
||||
|
@ -36673,14 +36673,14 @@ ix86_count_insn (basic_block bb)
|
|||
edge prev_e;
|
||||
edge_iterator prev_ei;
|
||||
|
||||
if (e->src == ENTRY_BLOCK_PTR)
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
min_prev_count = 0;
|
||||
break;
|
||||
}
|
||||
FOR_EACH_EDGE (prev_e, prev_ei, e->src->preds)
|
||||
{
|
||||
if (prev_e->src == ENTRY_BLOCK_PTR)
|
||||
if (prev_e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
int count = ix86_count_insn_bb (e->src);
|
||||
if (count < min_prev_count)
|
||||
|
@ -36704,7 +36704,7 @@ ix86_pad_short_function (void)
|
|||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
{
|
||||
rtx ret = BB_END (e->src);
|
||||
if (JUMP_P (ret) && ANY_RETURN_P (PATTERN (ret)))
|
||||
|
@ -36744,7 +36744,7 @@ ix86_seh_fixup_eh_fallthru (void)
|
|||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
{
|
||||
rtx insn, next;
|
||||
|
||||
|
|
|
@ -3492,7 +3492,7 @@ ia64_expand_prologue (void)
|
|||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
if ((e->flags & EDGE_FAKE) == 0
|
||||
&& (e->flags & EDGE_FALLTHRU) != 0)
|
||||
break;
|
||||
|
@ -10187,7 +10187,8 @@ ia64_asm_unwind_emit (FILE *asm_out_file, rtx insn)
|
|||
|
||||
if (NOTE_INSN_BASIC_BLOCK_P (insn))
|
||||
{
|
||||
last_block = NOTE_BASIC_BLOCK (insn)->next_bb == EXIT_BLOCK_PTR;
|
||||
last_block = NOTE_BASIC_BLOCK (insn)->next_bb
|
||||
== EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
/* Restore unwind state from immediately before the epilogue. */
|
||||
if (need_copy_state)
|
||||
|
|
|
@ -4566,7 +4566,7 @@ nds32_fp_as_gp_check_available (void)
|
|||
|| frame_pointer_needed
|
||||
|| NDS32_REQUIRED_CALLEE_SAVED_P (FP_REGNUM)
|
||||
|| (cfun->stdarg == 1)
|
||||
|| (find_fallthru_edge (EXIT_BLOCK_PTR->preds) == NULL))
|
||||
|| (find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == NULL))
|
||||
return 0;
|
||||
|
||||
/* Now we can check the possibility of using fp_as_gp optimization. */
|
||||
|
|
|
@ -22953,7 +22953,7 @@ rs6000_emit_prologue (void)
|
|||
&& DEFAULT_ABI == ABI_V4
|
||||
&& flag_pic
|
||||
&& ! info->lr_save_p
|
||||
&& EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0);
|
||||
&& EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0);
|
||||
if (save_LR_around_toc_setup)
|
||||
{
|
||||
rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
|
||||
|
|
15
gcc/cprop.c
15
gcc/cprop.c
|
@ -967,7 +967,7 @@ cprop_jump (basic_block bb, rtx setcc, rtx jump, rtx from, rtx src)
|
|||
edge_iterator ei;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& BB_HEAD (e->dest) == JUMP_LABEL (jump))
|
||||
{
|
||||
e->flags |= EDGE_FALLTHRU;
|
||||
|
@ -1376,7 +1376,7 @@ find_implicit_sets (void)
|
|||
? BRANCH_EDGE (bb)->dest : FALLTHRU_EDGE (bb)->dest;
|
||||
|
||||
/* If DEST doesn't go anywhere, ignore it. */
|
||||
if (! dest || dest == EXIT_BLOCK_PTR)
|
||||
if (! dest || dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
/* We have found a suitable implicit set. Try to record it now as
|
||||
|
@ -1612,7 +1612,7 @@ bypass_block (basic_block bb, rtx setcc, rtx jump)
|
|||
old_dest = e->dest;
|
||||
if (dest != NULL
|
||||
&& dest != old_dest
|
||||
&& dest != EXIT_BLOCK_PTR)
|
||||
&& dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
redirect_edge_and_branch_force (e, dest);
|
||||
|
||||
|
@ -1664,15 +1664,15 @@ bypass_conditional_jumps (void)
|
|||
rtx dest;
|
||||
|
||||
/* Note we start at block 1. */
|
||||
if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
|
||||
if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return 0;
|
||||
|
||||
bypass_last_basic_block = last_basic_block;
|
||||
mark_dfs_back_edges ();
|
||||
|
||||
changed = 0;
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb,
|
||||
EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
{
|
||||
/* Check for more than one predecessor. */
|
||||
if (!single_pred_p (bb))
|
||||
|
@ -1836,7 +1836,8 @@ one_cprop_pass (void)
|
|||
/* Allocate vars to track sets of regs. */
|
||||
reg_set_bitmap = ALLOC_REG_SET (NULL);
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb->next_bb, EXIT_BLOCK_PTR,
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun),
|
||||
next_bb)
|
||||
{
|
||||
/* Reset tables used to keep track of what's still valid [since
|
||||
|
|
|
@ -6200,7 +6200,7 @@ cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
|
|||
&& e == BRANCH_EDGE (previous_bb_in_path))
|
||||
{
|
||||
bb = FALLTHRU_EDGE (previous_bb_in_path)->dest;
|
||||
if (bb != EXIT_BLOCK_PTR
|
||||
if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& single_pred_p (bb)
|
||||
/* We used to assert here that we would only see blocks
|
||||
that we have not visited yet. But we may end up
|
||||
|
@ -6254,7 +6254,7 @@ cse_find_path (basic_block first_bb, struct cse_basic_block_data *data,
|
|||
|
||||
if (e
|
||||
&& !((e->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
|
||||
&& e->dest != EXIT_BLOCK_PTR
|
||||
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& single_pred_p (e->dest)
|
||||
/* Avoid visiting basic blocks twice. The large comment
|
||||
above explains why this can happen. */
|
||||
|
@ -7166,7 +7166,7 @@ cse_cc_succs (basic_block bb, basic_block orig_bb, rtx cc_reg, rtx cc_src,
|
|||
continue;
|
||||
|
||||
if (EDGE_COUNT (e->dest->preds) != 1
|
||||
|| e->dest == EXIT_BLOCK_PTR
|
||||
|| e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
/* Avoid endless recursion on unreachable blocks. */
|
||||
|| e->dest == orig_bb)
|
||||
continue;
|
||||
|
|
|
@ -1007,7 +1007,7 @@ static void
|
|||
df_lr_confluence_0 (basic_block bb)
|
||||
{
|
||||
bitmap op1 = &df_lr_get_bb_info (bb->index)->out;
|
||||
if (bb != EXIT_BLOCK_PTR)
|
||||
if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bitmap_copy (op1, &df->hardware_regs_used);
|
||||
}
|
||||
|
||||
|
|
|
@ -3873,7 +3873,7 @@ df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
|
|||
EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
|
||||
{
|
||||
df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
|
||||
ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_DEF, 0);
|
||||
}
|
||||
|
||||
df_canonize_collection_rec (collection_rec);
|
||||
|
@ -4034,17 +4034,17 @@ df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exi
|
|||
|
||||
EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
|
||||
df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
|
||||
EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
|
||||
|
||||
#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
|
||||
/* It is deliberate that this is not put in the exit block uses but
|
||||
I do not know why. */
|
||||
if (reload_completed
|
||||
&& !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
|
||||
&& bb_has_eh_pred (EXIT_BLOCK_PTR)
|
||||
&& bb_has_eh_pred (EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
&& fixed_regs[ARG_POINTER_REGNUM])
|
||||
df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
|
||||
EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
|
||||
#endif
|
||||
|
||||
df_canonize_collection_rec (collection_rec);
|
||||
|
|
|
@ -240,14 +240,14 @@ calc_dfs_tree_nonrec (struct dom_info *di, basic_block bb, bool reverse)
|
|||
if (reverse)
|
||||
{
|
||||
ei = ei_start (bb->preds);
|
||||
en_block = EXIT_BLOCK_PTR;
|
||||
ex_block = ENTRY_BLOCK_PTR;
|
||||
en_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
ex_block = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
}
|
||||
else
|
||||
{
|
||||
ei = ei_start (bb->succs);
|
||||
en_block = ENTRY_BLOCK_PTR;
|
||||
ex_block = EXIT_BLOCK_PTR;
|
||||
en_block = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
ex_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
}
|
||||
|
||||
/* When the stack is empty we break out of this loop. */
|
||||
|
@ -333,7 +333,8 @@ static void
|
|||
calc_dfs_tree (struct dom_info *di, bool reverse)
|
||||
{
|
||||
/* The first block is the ENTRY_BLOCK (or EXIT_BLOCK if REVERSE). */
|
||||
basic_block begin = reverse ? EXIT_BLOCK_PTR : ENTRY_BLOCK_PTR;
|
||||
basic_block begin = (reverse
|
||||
? EXIT_BLOCK_PTR_FOR_FN (cfun) : ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
di->dfs_order[last_basic_block] = di->dfsnum;
|
||||
di->dfs_to_bb[di->dfsnum] = begin;
|
||||
di->dfsnum++;
|
||||
|
@ -501,9 +502,9 @@ calc_idoms (struct dom_info *di, bool reverse)
|
|||
edge_iterator ei, einext;
|
||||
|
||||
if (reverse)
|
||||
en_block = EXIT_BLOCK_PTR;
|
||||
en_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
else
|
||||
en_block = ENTRY_BLOCK_PTR;
|
||||
en_block = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
/* Go backwards in DFS order, to first look at the leafs. */
|
||||
v = di->nodes;
|
||||
|
@ -1097,7 +1098,7 @@ prune_bbs_to_update_dominators (vec<basic_block> bbs,
|
|||
|
||||
for (i = 0; bbs.iterate (i, &bb);)
|
||||
{
|
||||
if (bb == ENTRY_BLOCK_PTR)
|
||||
if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
goto succeed;
|
||||
|
||||
if (single_pred_p (bb))
|
||||
|
@ -1171,7 +1172,7 @@ determine_dominators_for_sons (struct graph *g, vec<basic_block> bbs,
|
|||
if (son[y] == -1)
|
||||
return;
|
||||
if (y == (int) bbs.length ())
|
||||
ybb = ENTRY_BLOCK_PTR;
|
||||
ybb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
else
|
||||
ybb = bbs[y];
|
||||
|
||||
|
@ -1344,7 +1345,7 @@ iterate_fix_dominators (enum cdi_direction dir, vec<basic_block> bbs,
|
|||
set_immediate_dominator (CDI_DOMINATORS, bb, NULL);
|
||||
*map->insert (bb) = i;
|
||||
}
|
||||
*map->insert (ENTRY_BLOCK_PTR) = n;
|
||||
*map->insert (ENTRY_BLOCK_PTR_FOR_FN (cfun)) = n;
|
||||
|
||||
g = new_graph (n + 1);
|
||||
for (y = 0; y < g->n_vertices; y++)
|
||||
|
|
|
@ -169,8 +169,8 @@ dom_walker::walk (basic_block bb)
|
|||
{
|
||||
/* Don't worry about unreachable blocks. */
|
||||
if (EDGE_COUNT (bb->preds) > 0
|
||||
|| bb == ENTRY_BLOCK_PTR
|
||||
|| bb == EXIT_BLOCK_PTR)
|
||||
|| bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
/* Callback for subclasses to do custom things before we have walked
|
||||
the dominator children, but before we walk statements. */
|
||||
|
|
|
@ -2751,7 +2751,7 @@ dse_step1 (void)
|
|||
if (stores_off_frame_dead_at_return
|
||||
&& (EDGE_COUNT (bb->succs) == 0
|
||||
|| (single_succ_p (bb)
|
||||
&& single_succ (bb) == EXIT_BLOCK_PTR
|
||||
&& single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& ! crtl->calls_eh_return)))
|
||||
{
|
||||
insn_info_t i_ptr = active_local_stores;
|
||||
|
|
|
@ -1241,7 +1241,7 @@ sjlj_emit_function_enter (rtx dispatch_label)
|
|||
}
|
||||
|
||||
if (fn_begin_outside_block)
|
||||
insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
|
||||
insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
else
|
||||
emit_insn_after (seq, fn_begin);
|
||||
}
|
||||
|
@ -1509,7 +1509,7 @@ finish_eh_generation (void)
|
|||
|
||||
if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
|
||||
/* Kludge for Alpha (see alpha_gp_save_rtx). */
|
||||
|| single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
|
||||
|| single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
|
||||
commit_edge_insertions ();
|
||||
|
||||
/* Redirect all EH edges from the post_landing_pad to the landing pad. */
|
||||
|
|
|
@ -762,7 +762,7 @@ compute_alignments (void)
|
|||
&& (branch_frequency > freq_threshold
|
||||
|| (bb->frequency > bb->prev_bb->frequency * 10
|
||||
&& (bb->prev_bb->frequency
|
||||
<= ENTRY_BLOCK_PTR->frequency / 2))))
|
||||
<= ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency / 2))))
|
||||
{
|
||||
log = JUMP_ALIGN (label);
|
||||
if (dump_file)
|
||||
|
|
|
@ -3978,7 +3978,8 @@ regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
|
|||
return false;
|
||||
|
||||
return ((REG_N_SETS (regno) > 1
|
||||
|| REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR), regno))
|
||||
|| REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
|
||||
regno))
|
||||
&& REGNO_REG_SET_P (setjmp_crosses, regno));
|
||||
}
|
||||
|
||||
|
@ -5400,7 +5401,7 @@ next_block_for_reg (basic_block bb, int regno, int end_regno)
|
|||
|
||||
/* We can sometimes encounter dead code. Don't try to move it
|
||||
into the exit block. */
|
||||
if (!live_edge || live_edge->dest == EXIT_BLOCK_PTR)
|
||||
if (!live_edge || live_edge->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return NULL;
|
||||
|
||||
/* Reject targets of abnormal edges. This is needed for correctness
|
||||
|
@ -5725,7 +5726,7 @@ convert_jumps_to_returns (basic_block last_bb, bool simple_p,
|
|||
|
||||
src_bbs.create (EDGE_COUNT (last_bb->preds));
|
||||
FOR_EACH_EDGE (e, ei, last_bb->preds)
|
||||
if (e->src != ENTRY_BLOCK_PTR)
|
||||
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
src_bbs.quick_push (e->src);
|
||||
|
||||
label = BB_HEAD (last_bb);
|
||||
|
@ -5805,7 +5806,7 @@ convert_jumps_to_returns (basic_block last_bb, bool simple_p,
|
|||
}
|
||||
|
||||
/* Fix up the CFG for the successful change we just made. */
|
||||
redirect_edge_succ (e, EXIT_BLOCK_PTR);
|
||||
redirect_edge_succ (e, EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
e->flags &= ~EDGE_CROSSING;
|
||||
}
|
||||
src_bbs.release ();
|
||||
|
@ -5897,7 +5898,7 @@ thread_prologue_and_epilogue_insns (void)
|
|||
|
||||
df_analyze ();
|
||||
|
||||
rtl_profile_for_bb (ENTRY_BLOCK_PTR);
|
||||
rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
inserted = false;
|
||||
seq = NULL_RTX;
|
||||
|
@ -5907,8 +5908,8 @@ thread_prologue_and_epilogue_insns (void)
|
|||
/* Can't deal with multiple successors of the entry block at the
|
||||
moment. Function should always have at least one entry
|
||||
point. */
|
||||
gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
|
||||
entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
|
||||
gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
orig_entry_edge = entry_edge;
|
||||
|
||||
split_prologue_seq = NULL_RTX;
|
||||
|
@ -6081,7 +6082,7 @@ thread_prologue_and_epilogue_insns (void)
|
|||
basic_block tmp_bb = vec.pop ();
|
||||
|
||||
FOR_EACH_EDGE (e, ei, tmp_bb->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& bitmap_set_bit (&bb_flags, e->dest->index))
|
||||
vec.quick_push (e->dest);
|
||||
}
|
||||
|
@ -6089,7 +6090,7 @@ thread_prologue_and_epilogue_insns (void)
|
|||
/* Find the set of basic blocks that need no prologue, have a
|
||||
single successor, can be duplicated, meet a max size
|
||||
requirement, and go to the exit via like blocks. */
|
||||
vec.quick_push (EXIT_BLOCK_PTR);
|
||||
vec.quick_push (EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
while (!vec.is_empty ())
|
||||
{
|
||||
basic_block tmp_bb = vec.pop ();
|
||||
|
@ -6266,7 +6267,7 @@ thread_prologue_and_epilogue_insns (void)
|
|||
{
|
||||
/* Otherwise put the copy at the end of the function. */
|
||||
copy_bb = create_basic_block (NULL_RTX, NULL_RTX,
|
||||
EXIT_BLOCK_PTR->prev_bb);
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
|
||||
BB_COPY_PARTITION (copy_bb, bb);
|
||||
}
|
||||
|
||||
|
@ -6280,7 +6281,7 @@ thread_prologue_and_epilogue_insns (void)
|
|||
dup_block_and_redirect (tbb, copy_bb, insert_point,
|
||||
&bb_flags);
|
||||
tbb = single_succ (tbb);
|
||||
if (tbb == EXIT_BLOCK_PTR)
|
||||
if (tbb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
break;
|
||||
e = split_block (copy_bb, PREV_INSN (insert_point));
|
||||
copy_bb = e->dest;
|
||||
|
@ -6294,7 +6295,8 @@ thread_prologue_and_epilogue_insns (void)
|
|||
if (CALL_P (PREV_INSN (insert_point))
|
||||
&& SIBLING_CALL_P (PREV_INSN (insert_point)))
|
||||
eflags = EDGE_SIBCALL | EDGE_ABNORMAL;
|
||||
make_single_succ_edge (copy_bb, EXIT_BLOCK_PTR, eflags);
|
||||
make_single_succ_edge (copy_bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
|
||||
eflags);
|
||||
|
||||
/* verify_flow_info doesn't like a note after a
|
||||
sibling call. */
|
||||
|
@ -6325,15 +6327,15 @@ thread_prologue_and_epilogue_insns (void)
|
|||
|
||||
/* If the exit block has no non-fake predecessors, we don't need
|
||||
an epilogue. */
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
if ((e->flags & EDGE_FAKE) == 0)
|
||||
break;
|
||||
if (e == NULL)
|
||||
goto epilogue_done;
|
||||
|
||||
rtl_profile_for_bb (EXIT_BLOCK_PTR);
|
||||
rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
|
||||
exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
|
||||
|
||||
/* If we're allowed to generate a simple return instruction, then by
|
||||
definition we don't need a full epilogue. If the last basic
|
||||
|
@ -6349,10 +6351,10 @@ thread_prologue_and_epilogue_insns (void)
|
|||
|
||||
/* convert_jumps_to_returns may add to EXIT_BLOCK_PTR->preds
|
||||
(but won't remove). Stop at end of current preds. */
|
||||
last = EDGE_COUNT (EXIT_BLOCK_PTR->preds);
|
||||
last = EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
|
||||
for (i = 0; i < last; i++)
|
||||
{
|
||||
e = EDGE_I (EXIT_BLOCK_PTR->preds, i);
|
||||
e = EDGE_I (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds, i);
|
||||
if (LABEL_P (BB_HEAD (e->src))
|
||||
&& !bitmap_bit_p (&bb_flags, e->src->index)
|
||||
&& !active_insn_between (BB_HEAD (e->src), BB_END (e->src)))
|
||||
|
@ -6416,7 +6418,7 @@ thread_prologue_and_epilogue_insns (void)
|
|||
code. In order to be able to properly annotate these with unwind
|
||||
info, try to split them now. If we get a valid split, drop an
|
||||
EPILOGUE_BEG note and mark the insns as epilogue insns. */
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
{
|
||||
rtx prev, last, trial;
|
||||
|
||||
|
@ -6507,7 +6509,7 @@ epilogue_done:
|
|||
|
||||
/* The epilogue insns we inserted may cause the exit edge to no longer
|
||||
be fallthru. */
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
{
|
||||
if (((e->flags & EDGE_FALLTHRU) != 0)
|
||||
&& returnjump_p (BB_END (e->src)))
|
||||
|
@ -6544,7 +6546,7 @@ epilogue_done:
|
|||
}
|
||||
|
||||
/* Also check returns we might need to add to tail blocks. */
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
if (EDGE_COUNT (e->src->preds) != 0
|
||||
&& (e->flags & EDGE_FAKE) != 0
|
||||
&& !bitmap_bit_p (&bb_flags, e->src->index))
|
||||
|
@ -6559,7 +6561,7 @@ epilogue_done:
|
|||
inserting new BBs at the end of the function. Do this
|
||||
after the call to split_block above which may split
|
||||
the original exit pred. */
|
||||
exit_pred = EXIT_BLOCK_PTR->prev_bb;
|
||||
exit_pred = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
|
||||
|
||||
FOR_EACH_VEC_ELT (unconverted_simple_returns, i, e)
|
||||
{
|
||||
|
@ -6596,7 +6598,7 @@ epilogue_done:
|
|||
emit_barrier_after (start);
|
||||
|
||||
*pdest_bb = bb;
|
||||
make_edge (bb, EXIT_BLOCK_PTR, 0);
|
||||
make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
|
||||
}
|
||||
redirect_edge_and_branch_force (e, *pdest_bb);
|
||||
}
|
||||
|
@ -6605,7 +6607,7 @@ epilogue_done:
|
|||
|
||||
if (entry_edge != orig_entry_edge)
|
||||
{
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
if (EDGE_COUNT (e->src->preds) != 0
|
||||
&& (e->flags & EDGE_FAKE) != 0
|
||||
&& !bitmap_bit_p (&bb_flags, e->src->index))
|
||||
|
@ -6618,7 +6620,9 @@ epilogue_done:
|
|||
|
||||
#ifdef HAVE_sibcall_epilogue
|
||||
/* Emit sibling epilogues before any sibling call sites. */
|
||||
for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
|
||||
for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds); (e =
|
||||
ei_safe_edge (ei));
|
||||
)
|
||||
{
|
||||
basic_block bb = e->src;
|
||||
rtx insn = BB_END (bb);
|
||||
|
@ -6749,7 +6753,7 @@ reposition_prologue_and_epilogue_notes (void)
|
|||
edge_iterator ei;
|
||||
edge e;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
{
|
||||
rtx insn, first = NULL, note = NULL;
|
||||
basic_block bb = e->src;
|
||||
|
|
16
gcc/gcse.c
16
gcc/gcse.c
|
@ -2063,7 +2063,7 @@ pre_expr_reaches_here_p_work (basic_block occr_bb, struct expr *expr,
|
|||
{
|
||||
basic_block pred_bb = pred->src;
|
||||
|
||||
if (pred->src == ENTRY_BLOCK_PTR
|
||||
if (pred->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
/* Has predecessor has already been visited? */
|
||||
|| visited[pred_bb->index])
|
||||
;/* Nothing to do. */
|
||||
|
@ -2830,7 +2830,7 @@ compute_code_hoist_vbeinout (void)
|
|||
the convergence. */
|
||||
FOR_EACH_BB_REVERSE (bb)
|
||||
{
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
bitmap_intersection_of_succs (hoist_vbeout[bb->index],
|
||||
hoist_vbein, bb);
|
||||
|
@ -2908,7 +2908,7 @@ update_bb_reg_pressure (basic_block bb, rtx from)
|
|||
FOR_EACH_EDGE (succ, ei, bb->succs)
|
||||
{
|
||||
succ_bb = succ->dest;
|
||||
if (succ_bb == EXIT_BLOCK_PTR)
|
||||
if (succ_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
if (bitmap_bit_p (BB_DATA (succ_bb)->live_in, REGNO (dreg)))
|
||||
|
@ -3041,7 +3041,7 @@ should_hoist_expr_to_dom (basic_block expr_bb, struct expr *expr,
|
|||
{
|
||||
basic_block pred_bb = pred->src;
|
||||
|
||||
if (pred->src == ENTRY_BLOCK_PTR)
|
||||
if (pred->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
break;
|
||||
else if (pred_bb == expr_bb)
|
||||
continue;
|
||||
|
@ -3185,16 +3185,16 @@ hoist_code (void)
|
|||
bb_size[bb->index] = to_head;
|
||||
}
|
||||
|
||||
gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1
|
||||
&& (EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
|
||||
== ENTRY_BLOCK_PTR->next_bb));
|
||||
gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1
|
||||
&& (EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0)->dest
|
||||
== ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb));
|
||||
|
||||
from_bbs = BITMAP_ALLOC (NULL);
|
||||
if (flag_ira_hoist_pressure)
|
||||
hoisted_bbs = BITMAP_ALLOC (NULL);
|
||||
|
||||
dom_tree_walk = get_all_dominated_blocks (CDI_DOMINATORS,
|
||||
ENTRY_BLOCK_PTR->next_bb);
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb);
|
||||
|
||||
/* Walk over each basic block looking for potentially hoistable
|
||||
expressions, nothing gets hoisted from the entry block. */
|
||||
|
|
|
@ -713,7 +713,7 @@ gimple_find_edge_insert_loc (edge e, gimple_stmt_iterator *gsi,
|
|||
restart:
|
||||
if (single_pred_p (dest)
|
||||
&& gimple_seq_empty_p (phi_nodes (dest))
|
||||
&& dest != EXIT_BLOCK_PTR)
|
||||
&& dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
*gsi = gsi_start_bb (dest);
|
||||
if (gsi_end_p (*gsi))
|
||||
|
@ -744,7 +744,7 @@ gimple_find_edge_insert_loc (edge e, gimple_stmt_iterator *gsi,
|
|||
src = e->src;
|
||||
if ((e->flags & EDGE_ABNORMAL) == 0
|
||||
&& single_succ_p (src)
|
||||
&& src != ENTRY_BLOCK_PTR)
|
||||
&& src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
*gsi = gsi_last_bb (src);
|
||||
if (gsi_end_p (*gsi))
|
||||
|
@ -830,7 +830,8 @@ gsi_commit_edge_inserts (void)
|
|||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
gsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
|
||||
gsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
|
||||
NULL);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
|
|
|
@ -735,7 +735,7 @@ slsr_process_phi (gimple phi, bool speed)
|
|||
derived_base_name = arg;
|
||||
|
||||
if (SSA_NAME_IS_DEFAULT_DEF (arg))
|
||||
arg_bb = single_succ (ENTRY_BLOCK_PTR);
|
||||
arg_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
else
|
||||
gimple_bb (SSA_NAME_DEF_STMT (arg));
|
||||
}
|
||||
|
|
|
@ -195,7 +195,7 @@ draw_cfg_nodes_for_loop (pretty_printer *pp, int funcdef_no,
|
|||
const char *fillcolors[3] = { "grey88", "grey77", "grey66" };
|
||||
|
||||
if (loop->header != NULL
|
||||
&& loop->latch != EXIT_BLOCK_PTR)
|
||||
&& loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
pp_printf (pp,
|
||||
"\tsubgraph cluster_%d_%d {\n"
|
||||
"\tstyle=\"filled\";\n"
|
||||
|
@ -214,7 +214,7 @@ draw_cfg_nodes_for_loop (pretty_printer *pp, int funcdef_no,
|
|||
if (loop->header == NULL)
|
||||
return;
|
||||
|
||||
if (loop->latch == EXIT_BLOCK_PTR)
|
||||
if (loop->latch == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
body = get_loop_body (loop);
|
||||
else
|
||||
body = get_loop_body_in_bfs_order (loop);
|
||||
|
@ -228,7 +228,7 @@ draw_cfg_nodes_for_loop (pretty_printer *pp, int funcdef_no,
|
|||
|
||||
free (body);
|
||||
|
||||
if (loop->latch != EXIT_BLOCK_PTR)
|
||||
if (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
pp_printf (pp, "\t}\n");
|
||||
}
|
||||
|
||||
|
|
|
@ -1098,7 +1098,7 @@ translate_clast_user (struct clast_user_stmt *stmt, edge next_e,
|
|||
gimple_bb_p gbb = PBB_BLACK_BOX (pbb);
|
||||
vec<tree> iv_map;
|
||||
|
||||
if (GBB_BB (gbb) == ENTRY_BLOCK_PTR)
|
||||
if (GBB_BB (gbb) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
return next_e;
|
||||
|
||||
nb_loops = number_of_loops (cfun);
|
||||
|
|
|
@ -448,7 +448,7 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
|
|||
gimple stmt;
|
||||
|
||||
/* XXX: ENTRY_BLOCK_PTR could be optimized in later steps. */
|
||||
basic_block entry_block = ENTRY_BLOCK_PTR;
|
||||
basic_block entry_block = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
stmt = harmful_stmt_in_bb (entry_block, outermost_loop, bb);
|
||||
result.difficult = (stmt != NULL);
|
||||
result.exit = NULL;
|
||||
|
@ -1030,7 +1030,7 @@ create_sese_edges (vec<sd_region> regions)
|
|||
FOR_EACH_VEC_ELT (regions, i, s)
|
||||
/* Don't handle multiple edges exiting the function. */
|
||||
if (!find_single_exit_edge (s)
|
||||
&& s->exit != EXIT_BLOCK_PTR)
|
||||
&& s->exit != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
create_single_exit_edge (s);
|
||||
|
||||
unmark_exit_edges (regions);
|
||||
|
@ -1402,7 +1402,8 @@ build_scops (vec<scop_p> *scops)
|
|||
stack_vec<sd_region, 3> regions;
|
||||
|
||||
canonicalize_loop_closed_ssa_form ();
|
||||
build_scops_1 (single_succ (ENTRY_BLOCK_PTR), ENTRY_BLOCK_PTR->loop_father,
|
||||
build_scops_1 (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father,
|
||||
®ions, loop);
|
||||
create_sese_edges (regions);
|
||||
build_graphite_scops (regions, scops);
|
||||
|
|
|
@ -1615,7 +1615,7 @@ priority (rtx insn)
|
|||
|
||||
/* Selective scheduling does not define RECOVERY_BLOCK macro. */
|
||||
rec = sel_sched_p () ? NULL : RECOVERY_BLOCK (insn);
|
||||
if (!rec || rec == EXIT_BLOCK_PTR)
|
||||
if (!rec || rec == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
prev_first = PREV_INSN (insn);
|
||||
twin = insn;
|
||||
|
@ -7522,7 +7522,7 @@ static void
|
|||
sched_extend_bb (void)
|
||||
{
|
||||
/* The following is done to keep current_sched_info->next_tail non null. */
|
||||
rtx end = BB_END (EXIT_BLOCK_PTR->prev_bb);
|
||||
rtx end = BB_END (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
|
||||
rtx insn = DEBUG_INSN_P (end) ? prev_nondebug_insn (end) : end;
|
||||
if (NEXT_INSN (end) == 0
|
||||
|| (!NOTE_P (insn)
|
||||
|
@ -7533,7 +7533,7 @@ sched_extend_bb (void)
|
|||
rtx note = emit_note_after (NOTE_INSN_DELETED, end);
|
||||
/* Make note appear outside BB. */
|
||||
set_block_for_insn (note, NULL);
|
||||
BB_END (EXIT_BLOCK_PTR->prev_bb) = end;
|
||||
BB_END (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb) = end;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7551,7 +7551,7 @@ init_before_recovery (basic_block *before_recovery_ptr)
|
|||
basic_block last;
|
||||
edge e;
|
||||
|
||||
last = EXIT_BLOCK_PTR->prev_bb;
|
||||
last = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
|
||||
e = find_fallthru_edge_from (last);
|
||||
|
||||
if (e)
|
||||
|
@ -7591,7 +7591,8 @@ init_before_recovery (basic_block *before_recovery_ptr)
|
|||
|
||||
redirect_edge_succ (e, single);
|
||||
make_single_succ_edge (single, empty, 0);
|
||||
make_single_succ_edge (empty, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
|
||||
make_single_succ_edge (empty, EXIT_BLOCK_PTR_FOR_FN (cfun),
|
||||
EDGE_FALLTHRU);
|
||||
|
||||
label = block_label (empty);
|
||||
x = emit_jump_insn_after (gen_jump (label), BB_END (single));
|
||||
|
@ -7734,14 +7735,14 @@ create_check_block_twin (rtx insn, bool mutate_p)
|
|||
}
|
||||
else
|
||||
{
|
||||
rec = EXIT_BLOCK_PTR;
|
||||
rec = EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
label = NULL_RTX;
|
||||
}
|
||||
|
||||
/* Emit CHECK. */
|
||||
check = targetm.sched.gen_spec_check (insn, label, todo_spec);
|
||||
|
||||
if (rec != EXIT_BLOCK_PTR)
|
||||
if (rec != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
/* To have mem_reg alive at the beginning of second_bb,
|
||||
we emit check BEFORE insn, so insn after splitting
|
||||
|
@ -7774,7 +7775,7 @@ create_check_block_twin (rtx insn, bool mutate_p)
|
|||
|
||||
/* Initialize TWIN (twin is a duplicate of original instruction
|
||||
in the recovery block). */
|
||||
if (rec != EXIT_BLOCK_PTR)
|
||||
if (rec != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
sd_iterator_def sd_it;
|
||||
dep_t dep;
|
||||
|
@ -7811,7 +7812,7 @@ create_check_block_twin (rtx insn, bool mutate_p)
|
|||
provide correct value for INSN_TICK (TWIN). */
|
||||
sd_copy_back_deps (twin, insn, true);
|
||||
|
||||
if (rec != EXIT_BLOCK_PTR)
|
||||
if (rec != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
/* In case of branchy check, fix CFG. */
|
||||
{
|
||||
basic_block first_bb, second_bb;
|
||||
|
@ -7823,7 +7824,7 @@ create_check_block_twin (rtx insn, bool mutate_p)
|
|||
sched_create_recovery_edges (first_bb, rec, second_bb);
|
||||
|
||||
sched_init_only_bb (second_bb, first_bb);
|
||||
sched_init_only_bb (rec, EXIT_BLOCK_PTR);
|
||||
sched_init_only_bb (rec, EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
jump = BB_END (rec);
|
||||
haifa_init_insn (jump);
|
||||
|
@ -7864,7 +7865,7 @@ create_check_block_twin (rtx insn, bool mutate_p)
|
|||
init_dep_1 (new_dep, pro, check, DEP_TYPE (dep), ds);
|
||||
sd_add_dep (new_dep, false);
|
||||
|
||||
if (rec != EXIT_BLOCK_PTR)
|
||||
if (rec != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
DEP_CON (new_dep) = twin;
|
||||
sd_add_dep (new_dep, false);
|
||||
|
@ -7913,7 +7914,7 @@ create_check_block_twin (rtx insn, bool mutate_p)
|
|||
/* Future speculations: call the helper. */
|
||||
process_insn_forw_deps_be_in_spec (insn, twin, fs);
|
||||
|
||||
if (rec != EXIT_BLOCK_PTR)
|
||||
if (rec != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
/* Which types of dependencies should we use here is,
|
||||
generally, machine-dependent question... But, for now,
|
||||
|
@ -8127,7 +8128,7 @@ unlink_bb_notes (basic_block first, basic_block last)
|
|||
bb_header = XNEWVEC (rtx, last_basic_block);
|
||||
|
||||
/* Make a sentinel. */
|
||||
if (last->next_bb != EXIT_BLOCK_PTR)
|
||||
if (last->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bb_header[last->next_bb->index] = 0;
|
||||
|
||||
first = first->next_bb;
|
||||
|
@ -8171,7 +8172,7 @@ restore_bb_notes (basic_block first)
|
|||
first = first->next_bb;
|
||||
/* Remember: FIRST is actually a second basic block in the ebb. */
|
||||
|
||||
while (first != EXIT_BLOCK_PTR
|
||||
while (first != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& bb_header[first->index])
|
||||
{
|
||||
rtx prev, label, note, next;
|
||||
|
|
|
@ -260,7 +260,7 @@ discover_loop (hwloop_info loop, basic_block tail_bb, rtx tail_insn, rtx reg)
|
|||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
if (bb == EXIT_BLOCK_PTR)
|
||||
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
/* We've reached the exit block. The loop must be bad. */
|
||||
if (dump_file)
|
||||
|
@ -539,7 +539,7 @@ reorder_loops (hwloop_info loops)
|
|||
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bb->aux = bb->next_bb;
|
||||
else
|
||||
bb->aux = NULL;
|
||||
|
|
32
gcc/ifcvt.c
32
gcc/ifcvt.c
|
@ -3185,7 +3185,8 @@ merge_if_block (struct ce_if_block * ce_info)
|
|||
/* There should still be something at the end of the THEN or ELSE
|
||||
blocks taking us to our final destination. */
|
||||
gcc_assert (JUMP_P (last)
|
||||
|| (EDGE_SUCC (combo_bb, 0)->dest == EXIT_BLOCK_PTR
|
||||
|| (EDGE_SUCC (combo_bb, 0)->dest
|
||||
== EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& CALL_P (last)
|
||||
&& SIBLING_CALL_P (last))
|
||||
|| ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
|
||||
|
@ -3199,7 +3200,7 @@ merge_if_block (struct ce_if_block * ce_info)
|
|||
may be zero incoming edges if the THEN block didn't actually join
|
||||
back up (as with a call to a non-return function). */
|
||||
else if (EDGE_COUNT (join_bb->preds) < 2
|
||||
&& join_bb != EXIT_BLOCK_PTR)
|
||||
&& join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
/* We can merge the JOIN cleanly and update the dataflow try
|
||||
again on this pass.*/
|
||||
|
@ -3216,7 +3217,7 @@ merge_if_block (struct ce_if_block * ce_info)
|
|||
&& single_succ (combo_bb) == join_bb);
|
||||
|
||||
/* Remove the jump and cruft from the end of the COMBO block. */
|
||||
if (join_bb != EXIT_BLOCK_PTR)
|
||||
if (join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
tidy_fallthru_edge (single_succ_edge (combo_bb));
|
||||
}
|
||||
|
||||
|
@ -3495,7 +3496,7 @@ cond_exec_find_if_block (struct ce_if_block * ce_info)
|
|||
code processing. ??? we should fix this in the future. */
|
||||
if (EDGE_COUNT (then_bb->succs) == 0)
|
||||
{
|
||||
if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR)
|
||||
if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
rtx last_insn = BB_END (then_bb);
|
||||
|
||||
|
@ -3586,7 +3587,8 @@ cond_exec_find_if_block (struct ce_if_block * ce_info)
|
|||
next = then_bb;
|
||||
if (else_bb && (next = next->next_bb) != else_bb)
|
||||
return FALSE;
|
||||
if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
|
||||
if ((next = next->next_bb) != join_bb
|
||||
&& join_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
if (else_bb)
|
||||
join_bb = NULL;
|
||||
|
@ -3725,7 +3727,7 @@ block_has_only_trap (basic_block bb)
|
|||
rtx trap;
|
||||
|
||||
/* We're not the exit block. */
|
||||
if (bb == EXIT_BLOCK_PTR)
|
||||
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return NULL_RTX;
|
||||
|
||||
/* The block must have no successors. */
|
||||
|
@ -3881,7 +3883,7 @@ find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
|
|||
predictable_edge_p (then_edge)))))
|
||||
return FALSE;
|
||||
|
||||
if (else_bb == EXIT_BLOCK_PTR)
|
||||
if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
rtx jump = BB_END (else_edge->src);
|
||||
gcc_assert (JUMP_P (jump));
|
||||
|
@ -3902,12 +3904,12 @@ find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
|
|||
|
||||
if (then_bb->next_bb == else_bb
|
||||
&& then_bb->prev_bb == test_bb
|
||||
&& else_bb != EXIT_BLOCK_PTR)
|
||||
&& else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
|
||||
new_bb = 0;
|
||||
}
|
||||
else if (else_bb == EXIT_BLOCK_PTR)
|
||||
else if (else_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
new_bb = force_nonfallthru_and_redirect (FALLTHRU_EDGE (test_bb),
|
||||
else_bb, else_target);
|
||||
else
|
||||
|
@ -4196,9 +4198,9 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
|
|||
saved in caller-saved regs. A caller-saved reg requires the
|
||||
prologue, killing a shrink-wrap opportunity. */
|
||||
if ((flag_shrink_wrap && HAVE_simple_return && !epilogue_completed)
|
||||
&& ENTRY_BLOCK_PTR->next_bb == test_bb
|
||||
&& ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == test_bb
|
||||
&& single_succ_p (new_dest)
|
||||
&& single_succ (new_dest) == EXIT_BLOCK_PTR
|
||||
&& single_succ (new_dest) == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& bitmap_intersect_p (df_get_live_in (new_dest), merge_set))
|
||||
{
|
||||
regset return_regs;
|
||||
|
@ -4213,8 +4215,10 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
|
|||
&& targetm.calls.function_value_regno_p (i))
|
||||
bitmap_set_bit (return_regs, INCOMING_REGNO (i));
|
||||
|
||||
bitmap_and_into (return_regs, df_get_live_out (ENTRY_BLOCK_PTR));
|
||||
bitmap_and_into (return_regs, df_get_live_in (EXIT_BLOCK_PTR));
|
||||
bitmap_and_into (return_regs,
|
||||
df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
bitmap_and_into (return_regs,
|
||||
df_get_live_in (EXIT_BLOCK_PTR_FOR_FN (cfun)));
|
||||
if (!bitmap_empty_p (return_regs))
|
||||
{
|
||||
FOR_BB_INSNS_REVERSE (new_dest, insn)
|
||||
|
@ -4259,7 +4263,7 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
|
|||
{
|
||||
if (JUMP_P (BB_END (dest_edge->src)))
|
||||
new_dest_label = JUMP_LABEL (BB_END (dest_edge->src));
|
||||
else if (new_dest == EXIT_BLOCK_PTR)
|
||||
else if (new_dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
new_dest_label = ret_rtx;
|
||||
else
|
||||
new_dest_label = block_label (new_dest);
|
||||
|
|
|
@ -1841,9 +1841,9 @@ compute_bb_predicates (struct cgraph_node *node,
|
|||
}
|
||||
|
||||
/* Entry block is always executable. */
|
||||
ENTRY_BLOCK_PTR_FOR_FUNCTION (my_function)->aux
|
||||
ENTRY_BLOCK_PTR_FOR_FN (my_function)->aux
|
||||
= pool_alloc (edge_predicate_pool);
|
||||
*(struct predicate *) ENTRY_BLOCK_PTR_FOR_FUNCTION (my_function)->aux
|
||||
*(struct predicate *) ENTRY_BLOCK_PTR_FOR_FN (my_function)->aux
|
||||
= true_predicate ();
|
||||
|
||||
/* A simple dataflow propagation of predicates forward in the CFG.
|
||||
|
@ -2066,7 +2066,7 @@ record_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
|
|||
return false;
|
||||
bitmap_set_bit (info->bb_set,
|
||||
SSA_NAME_IS_DEFAULT_DEF (vdef)
|
||||
? ENTRY_BLOCK_PTR->index
|
||||
? ENTRY_BLOCK_PTR_FOR_FN (cfun)->index
|
||||
: gimple_bb (SSA_NAME_DEF_STMT (vdef))->index);
|
||||
return false;
|
||||
}
|
||||
|
@ -2102,7 +2102,7 @@ param_change_prob (gimple stmt, int i)
|
|||
return REG_BR_PROB_BASE;
|
||||
|
||||
if (SSA_NAME_IS_DEFAULT_DEF (op))
|
||||
init_freq = ENTRY_BLOCK_PTR->frequency;
|
||||
init_freq = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
|
||||
else
|
||||
init_freq = gimple_bb (SSA_NAME_DEF_STMT (op))->frequency;
|
||||
|
||||
|
@ -2142,8 +2142,8 @@ param_change_prob (gimple stmt, int i)
|
|||
/* Assume that every memory is initialized at entry.
|
||||
TODO: Can we easilly determine if value is always defined
|
||||
and thus we may skip entry block? */
|
||||
if (ENTRY_BLOCK_PTR->frequency)
|
||||
max = ENTRY_BLOCK_PTR->frequency;
|
||||
if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency)
|
||||
max = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
|
||||
else
|
||||
max = 1;
|
||||
|
||||
|
|
|
@ -1587,7 +1587,7 @@ local_pure_const (void)
|
|||
|
||||
/* Do NORETURN discovery. */
|
||||
if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
|
||||
&& EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
|
||||
&& EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == 0)
|
||||
{
|
||||
warn_function_noreturn (cfun->decl);
|
||||
if (dump_file)
|
||||
|
@ -1723,7 +1723,7 @@ static unsigned int
|
|||
execute_warn_function_noreturn (void)
|
||||
{
|
||||
if (!TREE_THIS_VOLATILE (current_function_decl)
|
||||
&& EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
|
||||
&& EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == 0)
|
||||
warn_function_noreturn (current_function_decl);
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -210,7 +210,7 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
|
|||
bool ok = true;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, current->entry_bb->preds)
|
||||
if (e->src != ENTRY_BLOCK_PTR
|
||||
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& !bitmap_bit_p (current->split_bbs, e->src->index))
|
||||
{
|
||||
worklist.safe_push (e->src);
|
||||
|
@ -223,7 +223,7 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
|
|||
basic_block bb = worklist.pop ();
|
||||
|
||||
FOR_EACH_EDGE (e, ei, bb->preds)
|
||||
if (e->src != ENTRY_BLOCK_PTR
|
||||
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& bitmap_set_bit (seen, e->src->index))
|
||||
{
|
||||
gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
|
||||
|
@ -396,7 +396,7 @@ consider_split (struct split_point *current, bitmap non_ssa_vars,
|
|||
|
||||
/* Do not split when we would end up calling function anyway. */
|
||||
if (incoming_freq
|
||||
>= (ENTRY_BLOCK_PTR->frequency
|
||||
>= (ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
|
||||
* PARAM_VALUE (PARAM_PARTIAL_INLINING_ENTRY_PROBABILITY) / 100))
|
||||
{
|
||||
/* When profile is guessed, we can not expect it to give us
|
||||
|
@ -406,13 +406,13 @@ consider_split (struct split_point *current, bitmap non_ssa_vars,
|
|||
is likely noticeable win. */
|
||||
if (back_edge
|
||||
&& profile_status != PROFILE_READ
|
||||
&& incoming_freq < ENTRY_BLOCK_PTR->frequency)
|
||||
&& incoming_freq < ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency)
|
||||
{
|
||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||
fprintf (dump_file,
|
||||
" Split before loop, accepting despite low frequencies %i %i.\n",
|
||||
incoming_freq,
|
||||
ENTRY_BLOCK_PTR->frequency);
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -583,7 +583,7 @@ consider_split (struct split_point *current, bitmap non_ssa_vars,
|
|||
|
||||
/* split_function fixes up at most one PHI non-virtual PHI node in return_bb,
|
||||
for the return value. If there are other PHIs, give up. */
|
||||
if (return_bb != EXIT_BLOCK_PTR)
|
||||
if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
gimple_stmt_iterator psi;
|
||||
|
||||
|
@ -650,15 +650,15 @@ static basic_block
|
|||
find_return_bb (void)
|
||||
{
|
||||
edge e;
|
||||
basic_block return_bb = EXIT_BLOCK_PTR;
|
||||
basic_block return_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
gimple_stmt_iterator bsi;
|
||||
bool found_return = false;
|
||||
tree retval = NULL_TREE;
|
||||
|
||||
if (!single_pred_p (EXIT_BLOCK_PTR))
|
||||
if (!single_pred_p (EXIT_BLOCK_PTR_FOR_FN (cfun)))
|
||||
return return_bb;
|
||||
|
||||
e = single_pred_edge (EXIT_BLOCK_PTR);
|
||||
e = single_pred_edge (EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
for (bsi = gsi_last_bb (e->src); !gsi_end_p (bsi); gsi_prev (&bsi))
|
||||
{
|
||||
gimple stmt = gsi_stmt (bsi);
|
||||
|
@ -937,7 +937,7 @@ find_split_points (int overall_time, int overall_size)
|
|||
current.split_size = 0;
|
||||
current.ssa_names_to_pass = BITMAP_ALLOC (NULL);
|
||||
|
||||
first.bb = ENTRY_BLOCK_PTR;
|
||||
first.bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
first.edge_num = 0;
|
||||
first.overall_time = 0;
|
||||
first.overall_size = 0;
|
||||
|
@ -946,7 +946,7 @@ find_split_points (int overall_time, int overall_size)
|
|||
first.used_ssa_names = 0;
|
||||
first.bbs_visited = 0;
|
||||
stack.safe_push (first);
|
||||
ENTRY_BLOCK_PTR->aux = (void *)(intptr_t)-1;
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(intptr_t)-1;
|
||||
|
||||
while (!stack.is_empty ())
|
||||
{
|
||||
|
@ -957,7 +957,7 @@ find_split_points (int overall_time, int overall_size)
|
|||
articulation, we want to have processed everything reachable
|
||||
from articulation but nothing that reaches into it. */
|
||||
if (entry->edge_num == EDGE_COUNT (entry->bb->succs)
|
||||
&& entry->bb != ENTRY_BLOCK_PTR)
|
||||
&& entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
int pos = stack.length ();
|
||||
entry->can_split &= visit_bb (entry->bb, return_bb,
|
||||
|
@ -1009,7 +1009,7 @@ find_split_points (int overall_time, int overall_size)
|
|||
entry->edge_num++;
|
||||
|
||||
/* New BB to visit, push it to the stack. */
|
||||
if (dest != return_bb && dest != EXIT_BLOCK_PTR
|
||||
if (dest != return_bb && dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& !dest->aux)
|
||||
{
|
||||
stack_entry new_entry;
|
||||
|
@ -1037,7 +1037,7 @@ find_split_points (int overall_time, int overall_size)
|
|||
}
|
||||
/* We are done with examining the edges. Pop off the value from stack
|
||||
and merge stuff we accumulate during the walk. */
|
||||
else if (entry->bb != ENTRY_BLOCK_PTR)
|
||||
else if (entry->bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
stack_entry *prev = &stack[stack.length () - 2];
|
||||
|
||||
|
@ -1063,7 +1063,7 @@ find_split_points (int overall_time, int overall_size)
|
|||
else
|
||||
stack.pop ();
|
||||
}
|
||||
ENTRY_BLOCK_PTR->aux = NULL;
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = NULL;
|
||||
FOR_EACH_BB (bb)
|
||||
bb->aux = NULL;
|
||||
stack.release ();
|
||||
|
@ -1139,7 +1139,7 @@ split_function (struct split_point *split_point)
|
|||
if (!split_part_return_p)
|
||||
;
|
||||
/* We have no return block, so nothing is needed. */
|
||||
else if (return_bb == EXIT_BLOCK_PTR)
|
||||
else if (return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
;
|
||||
/* When we do not want to return value, we need to construct
|
||||
new return block with empty return statement.
|
||||
|
@ -1166,7 +1166,7 @@ split_function (struct split_point *split_point)
|
|||
break;
|
||||
}
|
||||
}
|
||||
e = make_edge (new_return_bb, EXIT_BLOCK_PTR, 0);
|
||||
e = make_edge (new_return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
|
||||
e->probability = REG_BR_PROB_BASE;
|
||||
e->count = new_return_bb->count;
|
||||
if (current_loops)
|
||||
|
@ -1183,7 +1183,7 @@ split_function (struct split_point *split_point)
|
|||
|
||||
Note this can happen whether or not we have a return value. If we have
|
||||
a return value, then RETURN_BB may have PHIs for real operands too. */
|
||||
if (return_bb != EXIT_BLOCK_PTR)
|
||||
if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
bool phi_p = false;
|
||||
for (gsi = gsi_start_phis (return_bb); !gsi_end_p (gsi);)
|
||||
|
@ -1325,7 +1325,7 @@ split_function (struct split_point *split_point)
|
|||
push_cfun (DECL_STRUCT_FUNCTION (node->decl));
|
||||
var = BLOCK_VARS (DECL_INITIAL (node->decl));
|
||||
i = vec_safe_length (*debug_args);
|
||||
cgsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
|
||||
cgsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
do
|
||||
{
|
||||
i -= 2;
|
||||
|
@ -1366,13 +1366,14 @@ split_function (struct split_point *split_point)
|
|||
else
|
||||
{
|
||||
e = make_edge (call_bb, return_bb,
|
||||
return_bb == EXIT_BLOCK_PTR ? 0 : EDGE_FALLTHRU);
|
||||
return_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
? 0 : EDGE_FALLTHRU);
|
||||
e->count = call_bb->count;
|
||||
e->probability = REG_BR_PROB_BASE;
|
||||
|
||||
/* If there is return basic block, see what value we need to store
|
||||
return value into and put call just before it. */
|
||||
if (return_bb != EXIT_BLOCK_PTR)
|
||||
if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
real_retval = retval = find_retval (return_bb);
|
||||
|
||||
|
|
|
@ -1745,7 +1745,7 @@ ira_loop_tree_body_rev_postorder (ira_loop_tree_node_t loop_node ATTRIBUTE_UNUSE
|
|||
ira_loop_tree_node_t pred_node;
|
||||
basic_block pred_bb = e->src;
|
||||
|
||||
if (e->src == ENTRY_BLOCK_PTR)
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
pred_node = IRA_BB_NODE_BY_INDEX (pred_bb->index);
|
||||
|
|
|
@ -3100,7 +3100,7 @@ print_loop_title (ira_loop_tree_node_t loop_tree_node)
|
|||
{
|
||||
fprintf (ira_dump_file, " %d", subloop_node->bb->index);
|
||||
FOR_EACH_EDGE (e, ei, subloop_node->bb->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& ((dest_loop_node = IRA_BB_NODE (e->dest)->parent)
|
||||
!= loop_tree_node))
|
||||
fprintf (ira_dump_file, "(->%d:l%d)",
|
||||
|
|
|
@ -403,7 +403,7 @@ entered_from_non_parent_p (ira_loop_tree_node_t loop_node)
|
|||
if (bb_node->bb != NULL)
|
||||
{
|
||||
FOR_EACH_EDGE (e, ei, bb_node->bb->preds)
|
||||
if (e->src != ENTRY_BLOCK_PTR
|
||||
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& (src_loop_node = IRA_BB_NODE (e->src)->parent) != loop_node)
|
||||
{
|
||||
for (parent = src_loop_node->parent;
|
||||
|
@ -1263,7 +1263,7 @@ ira_emit (bool loops_p)
|
|||
at_bb_start[bb->index] = NULL;
|
||||
at_bb_end[bb->index] = NULL;
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR)
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
generate_edge_moves (e);
|
||||
}
|
||||
allocno_last_set
|
||||
|
|
|
@ -43,8 +43,9 @@ along with GCC; see the file COPYING3. If not see
|
|||
executed, frequency is always equivalent. Otherwise rescale the
|
||||
edge frequency. */
|
||||
#define REG_FREQ_FROM_EDGE_FREQ(freq) \
|
||||
(optimize_size || (flag_branch_probabilities && !ENTRY_BLOCK_PTR->count) \
|
||||
? REG_FREQ_MAX : (freq * REG_FREQ_MAX / BB_FREQ_MAX) \
|
||||
(optimize_size || (flag_branch_probabilities \
|
||||
&& !ENTRY_BLOCK_PTR_FOR_FN (cfun)->count) \
|
||||
? REG_FREQ_MAX : (freq * REG_FREQ_MAX / BB_FREQ_MAX) \
|
||||
? (freq * REG_FREQ_MAX / BB_FREQ_MAX) : 1)
|
||||
|
||||
/* A modified value of flag `-fira-verbose' used internally. */
|
||||
|
|
|
@ -4865,7 +4865,7 @@ static bool
|
|||
split_live_ranges_for_shrink_wrap (void)
|
||||
{
|
||||
basic_block bb, call_dom = NULL;
|
||||
basic_block first = single_succ (ENTRY_BLOCK_PTR);
|
||||
basic_block first = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
rtx insn, last_interesting_insn = NULL;
|
||||
bitmap_head need_new, reachable;
|
||||
vec<basic_block> queue;
|
||||
|
@ -4910,7 +4910,7 @@ split_live_ranges_for_shrink_wrap (void)
|
|||
|
||||
bb = queue.pop ();
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& bitmap_set_bit (&reachable, e->dest->index))
|
||||
queue.quick_push (e->dest);
|
||||
}
|
||||
|
|
40
gcc/lcm.c
40
gcc/lcm.c
|
@ -121,8 +121,8 @@ compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
|
|||
|
||||
/* Mark blocks which are predecessors of the exit block so that we
|
||||
can easily identify them below. */
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
e->src->aux = EXIT_BLOCK_PTR;
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
e->src->aux = EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
/* Iterate until the worklist is empty. */
|
||||
while (qlen)
|
||||
|
@ -134,7 +134,7 @@ compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
|
|||
if (qout >= qend)
|
||||
qout = worklist;
|
||||
|
||||
if (bb->aux == EXIT_BLOCK_PTR)
|
||||
if (bb->aux == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
/* Do not clear the aux field for blocks which are predecessors of
|
||||
the EXIT block. That way we never add then to the worklist
|
||||
again. */
|
||||
|
@ -153,7 +153,7 @@ compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
|
|||
to add the predecessors of this block to the worklist
|
||||
if they are not already on the worklist. */
|
||||
FOR_EACH_EDGE (e, ei, bb->preds)
|
||||
if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
|
||||
if (!e->src->aux && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
*qin++ = e->src;
|
||||
e->src->aux = e;
|
||||
|
@ -188,11 +188,11 @@ compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin,
|
|||
{
|
||||
pred = INDEX_EDGE_PRED_BB (edge_list, x);
|
||||
succ = INDEX_EDGE_SUCC_BB (edge_list, x);
|
||||
if (pred == ENTRY_BLOCK_PTR)
|
||||
if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
bitmap_copy (earliest[x], antin[succ->index]);
|
||||
else
|
||||
{
|
||||
if (succ == EXIT_BLOCK_PTR)
|
||||
if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bitmap_clear (earliest[x]);
|
||||
else
|
||||
{
|
||||
|
@ -276,7 +276,7 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
|
|||
do not want to be overly optimistic. Consider an outgoing edge from
|
||||
the entry block. That edge should always have a LATER value the
|
||||
same as EARLIEST for that edge. */
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
|
||||
bitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
|
||||
|
||||
/* Add all the blocks to the worklist. This prevents an early exit from
|
||||
|
@ -317,7 +317,7 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
|
|||
antloc[e->src->index])
|
||||
/* If LATER for an outgoing edge was changed, then we need
|
||||
to add the target of the outgoing edge to the worklist. */
|
||||
&& e->dest != EXIT_BLOCK_PTR && e->dest->aux == 0)
|
||||
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest->aux == 0)
|
||||
{
|
||||
*qin++ = e->dest;
|
||||
e->dest->aux = e;
|
||||
|
@ -331,7 +331,7 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
|
|||
for the EXIT block. We allocated an extra entry in the LATERIN array
|
||||
for just this purpose. */
|
||||
bitmap_ones (laterin[last_basic_block]);
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
bitmap_and (laterin[last_basic_block],
|
||||
laterin[last_basic_block],
|
||||
later[(size_t) e->aux]);
|
||||
|
@ -358,7 +358,7 @@ compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc,
|
|||
{
|
||||
basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
|
||||
|
||||
if (b == EXIT_BLOCK_PTR)
|
||||
if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bitmap_and_compl (insert[x], later[x], laterin[last_basic_block]);
|
||||
else
|
||||
bitmap_and_compl (insert[x], later[x], laterin[b->index]);
|
||||
|
@ -500,8 +500,8 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
|
|||
|
||||
/* Mark blocks which are successors of the entry block so that we
|
||||
can easily identify them below. */
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
|
||||
e->dest->aux = ENTRY_BLOCK_PTR;
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
|
||||
e->dest->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
/* Iterate until the worklist is empty. */
|
||||
while (qlen)
|
||||
|
@ -516,7 +516,7 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
|
|||
/* If one of the predecessor blocks is the ENTRY block, then the
|
||||
intersection of avouts is the null set. We can identify such blocks
|
||||
by the special value in the AUX field in the block structure. */
|
||||
if (bb->aux == ENTRY_BLOCK_PTR)
|
||||
if (bb->aux == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
/* Do not clear the aux field for blocks which are successors of the
|
||||
ENTRY block. That way we never add then to the worklist again. */
|
||||
bitmap_clear (avin[bb->index]);
|
||||
|
@ -534,7 +534,7 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
|
|||
to add the successors of this block to the worklist
|
||||
if they are not already on the worklist. */
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
|
||||
if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
*qin++ = e->dest;
|
||||
e->dest->aux = e;
|
||||
|
@ -570,11 +570,11 @@ compute_farthest (struct edge_list *edge_list, int n_exprs,
|
|||
{
|
||||
pred = INDEX_EDGE_PRED_BB (edge_list, x);
|
||||
succ = INDEX_EDGE_SUCC_BB (edge_list, x);
|
||||
if (succ == EXIT_BLOCK_PTR)
|
||||
if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bitmap_copy (farthest[x], st_avout[pred->index]);
|
||||
else
|
||||
{
|
||||
if (pred == ENTRY_BLOCK_PTR)
|
||||
if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
bitmap_clear (farthest[x]);
|
||||
else
|
||||
{
|
||||
|
@ -624,7 +624,7 @@ compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
|
|||
do not want to be overly optimistic. Consider an incoming edge to
|
||||
the exit block. That edge should always have a NEARER value the
|
||||
same as FARTHEST for that edge. */
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
bitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
|
||||
|
||||
/* Add all the blocks to the worklist. This prevents an early exit
|
||||
|
@ -656,7 +656,7 @@ compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
|
|||
st_avloc[e->dest->index])
|
||||
/* If NEARER for an incoming edge was changed, then we need
|
||||
to add the source of the incoming edge to the worklist. */
|
||||
&& e->src != ENTRY_BLOCK_PTR && e->src->aux == 0)
|
||||
&& e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && e->src->aux == 0)
|
||||
{
|
||||
*tos++ = e->src;
|
||||
e->src->aux = e;
|
||||
|
@ -667,7 +667,7 @@ compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
|
|||
for the ENTRY block. We allocated an extra entry in the NEAREROUT array
|
||||
for just this purpose. */
|
||||
bitmap_ones (nearerout[last_basic_block]);
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
|
||||
bitmap_and (nearerout[last_basic_block],
|
||||
nearerout[last_basic_block],
|
||||
nearer[(size_t) e->aux]);
|
||||
|
@ -693,7 +693,7 @@ compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc,
|
|||
for (x = 0; x < NUM_EDGES (edge_list); x++)
|
||||
{
|
||||
basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
|
||||
if (b == ENTRY_BLOCK_PTR)
|
||||
if (b == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
bitmap_and_compl (insert[x], nearer[x], nearerout[last_basic_block]);
|
||||
else
|
||||
bitmap_and_compl (insert[x], nearer[x], nearerout[b->index]);
|
||||
|
|
|
@ -1937,7 +1937,7 @@ simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
|
|||
return;
|
||||
|
||||
e = loop_preheader_edge (loop);
|
||||
if (e->src == ENTRY_BLOCK_PTR)
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
return;
|
||||
|
||||
altered = ALLOC_REG_SET (®_obstack);
|
||||
|
@ -2068,7 +2068,7 @@ simplify_using_initial_values (struct loop *loop, enum rtx_code op, rtx *expr)
|
|||
}
|
||||
|
||||
if (!single_pred_p (e->src)
|
||||
|| single_pred (e->src) == ENTRY_BLOCK_PTR)
|
||||
|| single_pred (e->src) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
break;
|
||||
e = single_pred_edge (e->src);
|
||||
}
|
||||
|
|
|
@ -433,7 +433,7 @@ unswitch_loop (struct loop *loop, basic_block unswitch_on, rtx cond, rtx cinsn)
|
|||
|
||||
/* Create a block with the condition. */
|
||||
prob = true_edge->probability;
|
||||
switch_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
|
||||
switch_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
|
||||
seq = compare_and_jump_seq (XEXP (cond, 0), XEXP (cond, 1), GET_CODE (cond),
|
||||
block_label (true_edge->dest),
|
||||
prob, cinsn);
|
||||
|
|
|
@ -612,7 +612,7 @@ find_hard_regno_for (int regno, int *cost, int try_only_hard_regno)
|
|||
&& ! df_regs_ever_live_p (hard_regno + j))
|
||||
/* It needs save restore. */
|
||||
hard_regno_costs[hard_regno]
|
||||
+= 2 * ENTRY_BLOCK_PTR->next_bb->frequency + 1;
|
||||
+= 2 * ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->frequency + 1;
|
||||
priority = targetm.register_priority (hard_regno);
|
||||
if (best_hard_regno < 0 || hard_regno_costs[hard_regno] < best_cost
|
||||
|| (hard_regno_costs[hard_regno] == best_cost
|
||||
|
|
|
@ -5295,7 +5295,8 @@ lra_inheritance (void)
|
|||
{
|
||||
if (lra_dump_file != NULL)
|
||||
fprintf (lra_dump_file, " %d", bb->index);
|
||||
if (bb->next_bb == EXIT_BLOCK_PTR || LABEL_P (BB_HEAD (bb->next_bb)))
|
||||
if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| LABEL_P (BB_HEAD (bb->next_bb)))
|
||||
break;
|
||||
e = find_fallthru_edge (bb->succs);
|
||||
if (! e)
|
||||
|
|
|
@ -1002,7 +1002,8 @@ lra_create_live_ranges (bool all_p)
|
|||
for (i = n_blocks_inverted - 1; i >= 0; --i)
|
||||
{
|
||||
bb = BASIC_BLOCK (post_order_rev_cfg[i]);
|
||||
if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR)
|
||||
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb
|
||||
== ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
process_bb_lives (bb, curr_point);
|
||||
}
|
||||
|
|
|
@ -2065,8 +2065,8 @@ has_nonexceptional_receiver (void)
|
|||
bb->flags &= ~BB_REACHABLE;
|
||||
|
||||
/* Place the exit block on our worklist. */
|
||||
EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
|
||||
*tos++ = EXIT_BLOCK_PTR;
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
|
||||
*tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
/* Iterate: find everything reachable from what we've already seen. */
|
||||
while (tos != worklist)
|
||||
|
|
|
@ -659,7 +659,7 @@ input_cfg (struct lto_input_block *ib, struct function *fn,
|
|||
index = streamer_read_hwi (ib);
|
||||
}
|
||||
|
||||
p_bb = ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
|
||||
p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
|
||||
index = streamer_read_hwi (ib);
|
||||
while (index != -1)
|
||||
{
|
||||
|
@ -996,7 +996,7 @@ input_function (tree fn_decl, struct data_in *data_in,
|
|||
of a gimple body is used by the cgraph routines, but we should
|
||||
really use the presence of the CFG. */
|
||||
{
|
||||
edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
|
||||
edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
|
||||
gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
|
||||
}
|
||||
|
||||
|
|
|
@ -1594,7 +1594,7 @@ output_cfg (struct output_block *ob, struct function *fn)
|
|||
|
||||
streamer_write_hwi (ob, -1);
|
||||
|
||||
bb = ENTRY_BLOCK_PTR;
|
||||
bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
while (bb->next_bb)
|
||||
{
|
||||
streamer_write_hwi (ob, bb->next_bb->index);
|
||||
|
|
13
gcc/mcf.c
13
gcc/mcf.c
|
@ -508,7 +508,7 @@ create_fixup_graph (fixup_graph_type *fixup_graph)
|
|||
|
||||
/* Compute constants b, k_pos, k_neg used in the cost function calculation.
|
||||
b = sqrt(avg_vertex_weight(cfg)); k_pos = b; k_neg = 50b. */
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
total_vertex_weight += bb->count;
|
||||
|
||||
sqrt_avg_vertex_weight = mcf_sqrt (total_vertex_weight /
|
||||
|
@ -523,7 +523,7 @@ create_fixup_graph (fixup_graph_type *fixup_graph)
|
|||
if (dump_file)
|
||||
fprintf (dump_file, "\nVertex transformation:\n");
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
{
|
||||
/* v'->v'': index1->(index1+1). */
|
||||
i = 2 * bb->index;
|
||||
|
@ -1125,7 +1125,8 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
|
|||
if (dump_file)
|
||||
fprintf (dump_file, "\nadjust_cfg_counts():\n");
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
{
|
||||
i = 2 * bb->index;
|
||||
|
||||
|
@ -1238,8 +1239,10 @@ adjust_cfg_counts (fixup_graph_type *fixup_graph)
|
|||
}
|
||||
}
|
||||
|
||||
ENTRY_BLOCK_PTR->count = sum_edge_counts (ENTRY_BLOCK_PTR->succs);
|
||||
EXIT_BLOCK_PTR->count = sum_edge_counts (EXIT_BLOCK_PTR->preds);
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
|
||||
sum_edge_counts (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
|
||||
sum_edge_counts (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
|
||||
|
||||
/* Compute edge probabilities. */
|
||||
FOR_ALL_BB (bb)
|
||||
|
|
|
@ -211,7 +211,7 @@ create_pre_exit (int n_entities, int *entity_map, const int *num_modes)
|
|||
fallthrough edge; there can be at most one, but there could be
|
||||
none at all, e.g. when exit is called. */
|
||||
pre_exit = 0;
|
||||
FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (eg, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
if (eg->flags & EDGE_FALLTHRU)
|
||||
{
|
||||
basic_block src_bb = eg->src;
|
||||
|
@ -221,7 +221,7 @@ create_pre_exit (int n_entities, int *entity_map, const int *num_modes)
|
|||
/* If this function returns a value at the end, we have to
|
||||
insert the final mode switch before the return value copy
|
||||
to its hard register. */
|
||||
if (EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 1
|
||||
if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) == 1
|
||||
&& NONJUMP_INSN_P ((last_insn = BB_END (src_bb)))
|
||||
&& GET_CODE (PATTERN (last_insn)) == USE
|
||||
&& GET_CODE ((ret_reg = XEXP (PATTERN (last_insn), 0))) == REG)
|
||||
|
@ -492,7 +492,7 @@ optimize_mode_switching (void)
|
|||
#if defined (MODE_ENTRY) && defined (MODE_EXIT)
|
||||
/* Split the edge from the entry block, so that we can note that
|
||||
there NORMAL_MODE is supplied. */
|
||||
post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
|
||||
post_entry = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
pre_exit = create_pre_exit (n_entities, entity_map, num_modes);
|
||||
#endif
|
||||
|
||||
|
|
|
@ -1308,7 +1308,7 @@ canon_loop (struct loop *loop)
|
|||
|
||||
/* Avoid annoying special cases of edges going to exit
|
||||
block. */
|
||||
FOR_EACH_EDGE (e, i, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, i, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
if ((e->flags & EDGE_FALLTHRU) && (EDGE_COUNT (e->src->succs) > 1))
|
||||
split_edge (e);
|
||||
|
||||
|
@ -3344,7 +3344,7 @@ rest_of_handle_sms (void)
|
|||
|
||||
/* Finalize layout changes. */
|
||||
FOR_EACH_BB (bb)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR)
|
||||
if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bb->aux = bb->next_bb;
|
||||
free_dominance_info (CDI_DOMINATORS);
|
||||
cfg_layout_finalize ();
|
||||
|
|
|
@ -8235,7 +8235,7 @@ build_omp_regions (void)
|
|||
{
|
||||
gcc_assert (root_omp_region == NULL);
|
||||
calculate_dominance_info (CDI_DOMINATORS);
|
||||
build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL, false);
|
||||
build_omp_regions_1 (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, false);
|
||||
}
|
||||
|
||||
/* Main entry point for expanding OMP-GIMPLE into runtime calls. */
|
||||
|
|
|
@ -1158,12 +1158,12 @@ eliminate_partially_redundant_loads (void)
|
|||
|
||||
/* Note we start at block 1. */
|
||||
|
||||
if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
|
||||
if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return;
|
||||
|
||||
FOR_BB_BETWEEN (bb,
|
||||
ENTRY_BLOCK_PTR->next_bb->next_bb,
|
||||
EXIT_BLOCK_PTR,
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun),
|
||||
next_bb)
|
||||
{
|
||||
/* Don't try anything on basic blocks with strange predecessors. */
|
||||
|
|
|
@ -129,11 +129,11 @@ maybe_hot_frequency_p (struct function *fun, int freq)
|
|||
if (profile_status_for_function (fun) == PROFILE_ABSENT)
|
||||
return true;
|
||||
if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
|
||||
&& freq < (ENTRY_BLOCK_PTR_FOR_FUNCTION (fun)->frequency * 2 / 3))
|
||||
&& freq < (ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency * 2 / 3))
|
||||
return false;
|
||||
if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0)
|
||||
return false;
|
||||
if (freq < (ENTRY_BLOCK_PTR_FOR_FUNCTION (fun)->frequency
|
||||
if (freq < (ENTRY_BLOCK_PTR_FOR_FN (fun)->frequency
|
||||
/ PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION)))
|
||||
return false;
|
||||
return true;
|
||||
|
@ -251,24 +251,27 @@ probably_never_executed (struct function *fun,
|
|||
return false;
|
||||
if (!frequency)
|
||||
return true;
|
||||
if (!ENTRY_BLOCK_PTR->frequency)
|
||||
if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency)
|
||||
return false;
|
||||
if (ENTRY_BLOCK_PTR->count)
|
||||
if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
|
||||
{
|
||||
gcov_type computed_count;
|
||||
/* Check for possibility of overflow, in which case entry bb count
|
||||
is large enough to do the division first without losing much
|
||||
precision. */
|
||||
if (ENTRY_BLOCK_PTR->count < REG_BR_PROB_BASE * REG_BR_PROB_BASE)
|
||||
if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count < REG_BR_PROB_BASE *
|
||||
REG_BR_PROB_BASE)
|
||||
{
|
||||
gcov_type scaled_count
|
||||
= frequency * ENTRY_BLOCK_PTR->count * unlikely_count_fraction;
|
||||
computed_count = RDIV (scaled_count, ENTRY_BLOCK_PTR->frequency);
|
||||
= frequency * ENTRY_BLOCK_PTR_FOR_FN (cfun)->count *
|
||||
unlikely_count_fraction;
|
||||
computed_count = RDIV (scaled_count,
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency);
|
||||
}
|
||||
else
|
||||
{
|
||||
computed_count = RDIV (ENTRY_BLOCK_PTR->count,
|
||||
ENTRY_BLOCK_PTR->frequency);
|
||||
computed_count = RDIV (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count,
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency);
|
||||
computed_count *= frequency * unlikely_count_fraction;
|
||||
}
|
||||
if (computed_count >= profile_info->runs)
|
||||
|
@ -613,7 +616,8 @@ void
|
|||
gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
|
||||
{
|
||||
gcc_assert (profile_status != PROFILE_GUESSED);
|
||||
if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
|
||||
if ((e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && EDGE_COUNT (e->src->succs) >
|
||||
1)
|
||||
&& flag_guess_branch_prob && optimize)
|
||||
{
|
||||
struct edge_prediction *i = XNEW (struct edge_prediction);
|
||||
|
@ -2170,7 +2174,7 @@ apply_return_prediction (void)
|
|||
enum prediction direction;
|
||||
edge_iterator ei;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
{
|
||||
return_stmt = last_stmt (e->src);
|
||||
if (return_stmt
|
||||
|
@ -2218,7 +2222,7 @@ tree_bb_level_predictions (void)
|
|||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
if (!(e->flags & (EDGE_ABNORMAL | EDGE_FAKE | EDGE_EH)))
|
||||
{
|
||||
has_return_edges = true;
|
||||
|
@ -2286,7 +2290,7 @@ tree_estimate_probability_bb (basic_block bb)
|
|||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
{
|
||||
/* Predict edges to user labels with attributes. */
|
||||
if (e->dest != EXIT_BLOCK_PTR)
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
gimple_stmt_iterator gi;
|
||||
for (gi = gsi_start_bb (e->dest); !gsi_end_p (gi); gsi_next (&gi))
|
||||
|
@ -2324,9 +2328,9 @@ tree_estimate_probability_bb (basic_block bb)
|
|||
return_block:
|
||||
return_stmt. */
|
||||
if (e->dest != bb->next_bb
|
||||
&& e->dest != EXIT_BLOCK_PTR
|
||||
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& single_succ_p (e->dest)
|
||||
&& single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR
|
||||
&& single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& (last = last_stmt (e->dest)) != NULL
|
||||
&& gimple_code (last) == GIMPLE_RETURN)
|
||||
{
|
||||
|
@ -2350,7 +2354,7 @@ tree_estimate_probability_bb (basic_block bb)
|
|||
|
||||
/* Look for block we are guarding (ie we dominate it,
|
||||
but it doesn't postdominate us). */
|
||||
if (e->dest != EXIT_BLOCK_PTR && e->dest != bb
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest != bb
|
||||
&& dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
|
||||
&& !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
|
||||
{
|
||||
|
@ -2612,7 +2616,7 @@ propagate_freq (basic_block head, bitmap tovisit)
|
|||
}
|
||||
BLOCK_INFO (bb)->npredecessors = count;
|
||||
/* When function never returns, we will never process exit block. */
|
||||
if (!count && bb == EXIT_BLOCK_PTR)
|
||||
if (!count && bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bb->count = bb->frequency = 0;
|
||||
}
|
||||
|
||||
|
@ -2762,7 +2766,7 @@ estimate_loops (void)
|
|||
{
|
||||
bitmap_set_bit (tovisit, bb->index);
|
||||
}
|
||||
propagate_freq (ENTRY_BLOCK_PTR, tovisit);
|
||||
propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit);
|
||||
BITMAP_FREE (tovisit);
|
||||
}
|
||||
|
||||
|
@ -2892,14 +2896,14 @@ counts_to_freqs (void)
|
|||
/* Don't overwrite the estimated frequencies when the profile for
|
||||
the function is missing. We may drop this function PROFILE_GUESSED
|
||||
later in drop_profile (). */
|
||||
if (!ENTRY_BLOCK_PTR->count)
|
||||
if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->count)
|
||||
return 0;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
true_count_max = MAX (bb->count, true_count_max);
|
||||
|
||||
count_max = MAX (true_count_max, 1);
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
bb->frequency = (bb->count * BB_FREQ_MAX + count_max / 2) / count_max;
|
||||
|
||||
return true_count_max;
|
||||
|
@ -2924,11 +2928,11 @@ expensive_function_p (int threshold)
|
|||
/* Frequencies are out of range. This either means that function contains
|
||||
internal loop executing more than BB_FREQ_MAX times or profile feedback
|
||||
is available and function has not been executed at all. */
|
||||
if (ENTRY_BLOCK_PTR->frequency == 0)
|
||||
if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency == 0)
|
||||
return true;
|
||||
|
||||
/* Maximally BB_FREQ_MAX^2 so overflow won't happen. */
|
||||
limit = ENTRY_BLOCK_PTR->frequency * threshold;
|
||||
limit = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency * threshold;
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
rtx insn;
|
||||
|
@ -2973,12 +2977,13 @@ estimate_bb_frequencies (bool force)
|
|||
|
||||
mark_dfs_back_edges ();
|
||||
|
||||
single_succ_edge (ENTRY_BLOCK_PTR)->probability = REG_BR_PROB_BASE;
|
||||
single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability =
|
||||
REG_BR_PROB_BASE;
|
||||
|
||||
/* Set up block info for each basic block. */
|
||||
alloc_aux_for_blocks (sizeof (struct block_info_def));
|
||||
alloc_aux_for_edges (sizeof (struct edge_info_def));
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
@ -3002,7 +3007,7 @@ estimate_bb_frequencies (bool force)
|
|||
memcpy (&freq_max, &BLOCK_INFO (bb)->frequency, sizeof (freq_max));
|
||||
|
||||
sreal_div (&freq_max, &real_bb_freq_max, &freq_max);
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
{
|
||||
sreal tmp;
|
||||
|
||||
|
@ -3186,7 +3191,7 @@ rebuild_frequencies (void)
|
|||
max counts. */
|
||||
gcov_type count_max = 0;
|
||||
basic_block bb;
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
count_max = MAX (bb->count, count_max);
|
||||
|
||||
if (profile_status == PROFILE_GUESSED
|
||||
|
|
|
@ -117,7 +117,7 @@ instrument_edges (struct edge_list *el)
|
|||
int num_edges = NUM_EDGES (el);
|
||||
basic_block bb;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
@ -192,7 +192,8 @@ instrument_values (histogram_values values)
|
|||
|
||||
case HIST_TYPE_TIME_PROFILE:
|
||||
{
|
||||
basic_block bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
|
||||
basic_block bb =
|
||||
split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
gimple_stmt_iterator gsi = gsi_start_bb (bb);
|
||||
|
||||
gimple_gen_time_profiler (t, 0, gsi);
|
||||
|
@ -272,7 +273,7 @@ get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
|
|||
gcov_type *counts;
|
||||
|
||||
/* Count the edges to be (possibly) instrumented. */
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
@ -332,7 +333,7 @@ correct_negative_edge_counts (void)
|
|||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
{
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
{
|
||||
|
@ -383,7 +384,8 @@ is_inconsistent (void)
|
|||
inconsistent = true;
|
||||
}
|
||||
if (bb->count != sum_edge_counts (bb->succs) &&
|
||||
! (find_edge (bb, EXIT_BLOCK_PTR) != NULL && block_ends_with_call_p (bb)))
|
||||
! (find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun)) != NULL
|
||||
&& block_ends_with_call_p (bb)))
|
||||
{
|
||||
if (dump_file)
|
||||
{
|
||||
|
@ -408,7 +410,7 @@ static void
|
|||
set_bb_counts (void)
|
||||
{
|
||||
basic_block bb;
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
{
|
||||
bb->count = sum_edge_counts (bb->succs);
|
||||
gcc_assert (bb->count >= 0);
|
||||
|
@ -427,7 +429,7 @@ read_profile_edge_counts (gcov_type *exec_counts)
|
|||
/* The first count in the .da file is the number of times that the function
|
||||
was entered. This is the exec_count for block zero. */
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
@ -491,7 +493,7 @@ compute_frequency_overlap (void)
|
|||
int overlap = 0;
|
||||
basic_block bb;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
{
|
||||
count_total += bb->count;
|
||||
freq_total += bb->frequency;
|
||||
|
@ -500,7 +502,7 @@ compute_frequency_overlap (void)
|
|||
if (count_total == 0 || freq_total == 0)
|
||||
return 0;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
overlap += MIN (bb->count * OVERLAP_BASE / count_total,
|
||||
bb->frequency * OVERLAP_BASE / freq_total);
|
||||
|
||||
|
@ -537,7 +539,7 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
|
|||
|
||||
/* Attach extra info block to each bb. */
|
||||
alloc_aux_for_blocks (sizeof (struct bb_info));
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
@ -551,8 +553,8 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
|
|||
}
|
||||
|
||||
/* Avoid predicting entry on exit nodes. */
|
||||
BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
|
||||
BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
|
||||
BB_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->succ_count = 2;
|
||||
BB_INFO (ENTRY_BLOCK_PTR_FOR_FN (cfun))->pred_count = 2;
|
||||
|
||||
num_edges = read_profile_edge_counts (exec_counts);
|
||||
|
||||
|
@ -582,7 +584,7 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
|
|||
{
|
||||
passes++;
|
||||
changes = 0;
|
||||
FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
|
||||
FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, prev_bb)
|
||||
{
|
||||
struct bb_info *bi = BB_INFO (bb);
|
||||
if (! bi->count_valid)
|
||||
|
@ -724,7 +726,7 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
|
|||
hist_br_prob[i] = 0;
|
||||
num_branches = 0;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
@ -743,9 +745,9 @@ compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum)
|
|||
already present. We get negative frequency from the entry
|
||||
point. */
|
||||
if ((e->count < 0
|
||||
&& e->dest == EXIT_BLOCK_PTR)
|
||||
&& e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
|| (e->count > bb->count
|
||||
&& e->dest != EXIT_BLOCK_PTR))
|
||||
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)))
|
||||
{
|
||||
if (block_ends_with_call_p (bb))
|
||||
e->count = e->count < 0 ? 0 : bb->count;
|
||||
|
@ -1064,17 +1066,17 @@ branch_prob (void)
|
|||
ne->goto_locus = e->goto_locus;
|
||||
}
|
||||
if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
|
||||
&& e->dest != EXIT_BLOCK_PTR)
|
||||
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
need_exit_edge = 1;
|
||||
if (e->dest == EXIT_BLOCK_PTR)
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
have_exit_edge = 1;
|
||||
}
|
||||
FOR_EACH_EDGE (e, ei, bb->preds)
|
||||
{
|
||||
if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
|
||||
&& e->src != ENTRY_BLOCK_PTR)
|
||||
&& e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
need_entry_edge = 1;
|
||||
if (e->src == ENTRY_BLOCK_PTR)
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
have_entry_edge = 1;
|
||||
}
|
||||
|
||||
|
@ -1083,14 +1085,14 @@ branch_prob (void)
|
|||
if (dump_file)
|
||||
fprintf (dump_file, "Adding fake exit edge to bb %i\n",
|
||||
bb->index);
|
||||
make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
|
||||
make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
|
||||
}
|
||||
if (need_entry_edge && !have_entry_edge)
|
||||
{
|
||||
if (dump_file)
|
||||
fprintf (dump_file, "Adding fake entry edge to bb %i\n",
|
||||
bb->index);
|
||||
make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
|
||||
make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FAKE);
|
||||
/* Avoid bbs that have both fake entry edge and also some
|
||||
exit edge. One of those edges wouldn't be added to the
|
||||
spanning tree, but we can't instrument any of them. */
|
||||
|
@ -1146,7 +1148,8 @@ branch_prob (void)
|
|||
|
||||
/* Mark edges we've replaced by fake edges above as ignored. */
|
||||
if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
|
||||
&& e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
|
||||
&& e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
EDGE_INFO (e)->ignore = 1;
|
||||
ignored_edges++;
|
||||
|
@ -1213,7 +1216,8 @@ branch_prob (void)
|
|||
gcov_write_length (offset);
|
||||
|
||||
/* Arcs */
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
@ -1257,7 +1261,7 @@ branch_prob (void)
|
|||
gimple_stmt_iterator gsi;
|
||||
gcov_position_t offset = 0;
|
||||
|
||||
if (bb == ENTRY_BLOCK_PTR->next_bb)
|
||||
if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
|
||||
{
|
||||
expanded_location curr_location =
|
||||
expand_location (DECL_SOURCE_LOCATION (current_function_decl));
|
||||
|
@ -1381,11 +1385,11 @@ find_spanning_tree (struct edge_list *el)
|
|||
basic_block bb;
|
||||
|
||||
/* We use aux field for standard union-find algorithm. */
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
|
||||
bb->aux = bb;
|
||||
|
||||
/* Add fake edge exit to entry we can't instrument. */
|
||||
union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
|
||||
union_groups (EXIT_BLOCK_PTR_FOR_FN (cfun), ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
/* First add all abnormal edges to the tree unless they form a cycle. Also
|
||||
add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
|
||||
|
@ -1394,7 +1398,7 @@ find_spanning_tree (struct edge_list *el)
|
|||
{
|
||||
edge e = INDEX_EDGE (el, i);
|
||||
if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
|
||||
|| e->dest == EXIT_BLOCK_PTR)
|
||||
|| e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
&& !EDGE_INFO (e)->ignore
|
||||
&& (find_group (e->src) != find_group (e->dest)))
|
||||
{
|
||||
|
|
|
@ -2649,7 +2649,7 @@ convert_regs_entry (void)
|
|||
Note that we are inserting converted code here. This code is
|
||||
never seen by the convert_regs pass. */
|
||||
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
|
||||
{
|
||||
basic_block block = e->dest;
|
||||
block_info bi = BLOCK_INFO (block);
|
||||
|
@ -2693,7 +2693,7 @@ convert_regs_exit (void)
|
|||
value_reg_high = END_HARD_REGNO (retvalue) - 1;
|
||||
}
|
||||
|
||||
output_stack = &BLOCK_INFO (EXIT_BLOCK_PTR)->stack_in;
|
||||
output_stack = &BLOCK_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->stack_in;
|
||||
if (value_reg_low == -1)
|
||||
output_stack->top = -1;
|
||||
else
|
||||
|
@ -2847,7 +2847,7 @@ compensate_edges (void)
|
|||
starting_stack_p = false;
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
if (bb != ENTRY_BLOCK_PTR)
|
||||
if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
edge e;
|
||||
edge_iterator ei;
|
||||
|
@ -3141,14 +3141,14 @@ convert_regs (void)
|
|||
|
||||
/* Construct the desired stack for function exit. */
|
||||
convert_regs_exit ();
|
||||
BLOCK_INFO (EXIT_BLOCK_PTR)->done = 1;
|
||||
BLOCK_INFO (EXIT_BLOCK_PTR_FOR_FN (cfun))->done = 1;
|
||||
|
||||
/* ??? Future: process inner loops first, and give them arbitrary
|
||||
initial stacks which emit_swap_insn can modify. This ought to
|
||||
prevent double fxch that often appears at the head of a loop. */
|
||||
|
||||
/* Process all blocks reachable from all entry points. */
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
|
||||
cfg_altered |= convert_regs_2 (e->dest);
|
||||
|
||||
/* ??? Process all unreachable blocks. Though there's no excuse
|
||||
|
@ -3221,7 +3221,7 @@ reg_to_stack (void)
|
|||
|
||||
FOR_EACH_EDGE (e, ei, bb->preds)
|
||||
if (!(e->flags & EDGE_DFS_BACK)
|
||||
&& e->src != ENTRY_BLOCK_PTR)
|
||||
&& e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
bi->predecessors++;
|
||||
|
||||
/* Set current register status at last instruction `uninitialized'. */
|
||||
|
|
|
@ -137,7 +137,7 @@ extern size_t reg_info_p_size;
|
|||
frequency. */
|
||||
#define REG_FREQ_FROM_BB(bb) (optimize_size \
|
||||
|| (flag_branch_probabilities \
|
||||
&& !ENTRY_BLOCK_PTR->count) \
|
||||
&& !ENTRY_BLOCK_PTR_FOR_FN (cfun)->count) \
|
||||
? REG_FREQ_MAX \
|
||||
: ((bb)->frequency * REG_FREQ_MAX / BB_FREQ_MAX)\
|
||||
? ((bb)->frequency * REG_FREQ_MAX / BB_FREQ_MAX)\
|
||||
|
|
|
@ -1615,7 +1615,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
|
|||
&& reg_mentioned_p (XEXP (note, 0), in)
|
||||
/* Check that a former pseudo is valid; see find_dummy_reload. */
|
||||
&& (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
|
||||
|| (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
|
||||
|| (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
|
||||
ORIGINAL_REGNO (XEXP (note, 0)))
|
||||
&& hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1))
|
||||
&& ! refers_to_regno_for_reload_p (regno,
|
||||
|
@ -1939,7 +1939,7 @@ combine_reloads (void)
|
|||
&& !fixed_regs[regno]
|
||||
/* Check that a former pseudo is valid; see find_dummy_reload. */
|
||||
&& (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
|
||||
|| (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
|
||||
|| (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
|
||||
ORIGINAL_REGNO (XEXP (note, 0)))
|
||||
&& hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] == 1)))
|
||||
{
|
||||
|
@ -2098,7 +2098,7 @@ find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
|
|||
can ignore the conflict). We must never introduce writes
|
||||
to such hardregs, as they would clobber the other live
|
||||
pseudo. See PR 20973. */
|
||||
|| (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR),
|
||||
|| (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
|
||||
ORIGINAL_REGNO (in))
|
||||
/* Similarly, only do this if we can be sure that the death
|
||||
note is still valid. global can assign some hardreg to
|
||||
|
|
|
@ -617,8 +617,8 @@ has_nonexceptional_receiver (void)
|
|||
bb->flags &= ~BB_REACHABLE;
|
||||
|
||||
/* Place the exit block on our worklist. */
|
||||
EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
|
||||
*tos++ = EXIT_BLOCK_PTR;
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_REACHABLE;
|
||||
*tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
/* Iterate: find everything reachable from what we've already seen. */
|
||||
while (tos != worklist)
|
||||
|
|
|
@ -147,7 +147,7 @@ find_basic_block (rtx insn, int search_limit)
|
|||
|
||||
/* The start of the function. */
|
||||
else if (insn == 0)
|
||||
return ENTRY_BLOCK_PTR->next_bb->index;
|
||||
return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
|
||||
|
||||
/* See if any of the upcoming CODE_LABELs start a basic block. If we reach
|
||||
anything other than a CODE_LABEL or note, we can't find this code. */
|
||||
|
@ -966,7 +966,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
|
|||
|
||||
/* Get starting and ending insn, handling the case where each might
|
||||
be a SEQUENCE. */
|
||||
start_insn = (b == ENTRY_BLOCK_PTR->next_bb->index ?
|
||||
start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
|
||||
insns : BB_HEAD (BASIC_BLOCK (b)));
|
||||
stop_insn = target;
|
||||
|
||||
|
|
|
@ -648,7 +648,7 @@ schedule_ebbs (void)
|
|||
{
|
||||
edge e;
|
||||
tail = BB_END (bb);
|
||||
if (bb->next_bb == EXIT_BLOCK_PTR
|
||||
if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| LABEL_P (BB_HEAD (bb->next_bb)))
|
||||
break;
|
||||
e = find_fallthru_edge (bb->succs);
|
||||
|
@ -683,7 +683,7 @@ ebb_add_block (basic_block bb, basic_block after)
|
|||
/* Recovery blocks are always bounded by BARRIERS,
|
||||
therefore, they always form single block EBB,
|
||||
therefore, we can use rec->index to identify such EBBs. */
|
||||
if (after == EXIT_BLOCK_PTR)
|
||||
if (after == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bitmap_set_bit (&dont_calc_deps, bb->index);
|
||||
else if (after == last_bb)
|
||||
last_bb = bb;
|
||||
|
|
|
@ -945,14 +945,15 @@ extern vec<haifa_deps_insn_data_def> h_d_i_d;
|
|||
/* INSN is a speculation check that will simply reexecute the speculatively
|
||||
scheduled instruction if the speculation fails. */
|
||||
#define IS_SPECULATION_SIMPLE_CHECK_P(INSN) \
|
||||
(RECOVERY_BLOCK (INSN) == EXIT_BLOCK_PTR)
|
||||
(RECOVERY_BLOCK (INSN) == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
|
||||
/* INSN is a speculation check that will branch to RECOVERY_BLOCK if the
|
||||
speculation fails. Insns in that block will reexecute the speculatively
|
||||
scheduled code and then will return immediately after INSN thus preserving
|
||||
semantics of the program. */
|
||||
#define IS_SPECULATION_BRANCHY_CHECK_P(INSN) \
|
||||
(RECOVERY_BLOCK (INSN) != NULL && RECOVERY_BLOCK (INSN) != EXIT_BLOCK_PTR)
|
||||
(RECOVERY_BLOCK (INSN) != NULL \
|
||||
&& RECOVERY_BLOCK (INSN) != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
|
||||
|
||||
/* Dep status (aka ds_t) of the link encapsulates all information for a given
|
||||
|
|
|
@ -495,7 +495,7 @@ find_single_block_region (bool ebbs_p)
|
|||
BLOCK_TO_BB (bb->index) = i - RGN_BLOCKS (nr_regions);
|
||||
i++;
|
||||
|
||||
if (bb->next_bb == EXIT_BLOCK_PTR
|
||||
if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| LABEL_P (BB_HEAD (bb->next_bb)))
|
||||
break;
|
||||
|
||||
|
@ -665,7 +665,7 @@ haifa_find_rgns (void)
|
|||
|
||||
/* DFS traversal to find inner loops in the cfg. */
|
||||
|
||||
current_edge = ei_start (single_succ (ENTRY_BLOCK_PTR)->succs);
|
||||
current_edge = ei_start (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))->succs);
|
||||
sp = -1;
|
||||
|
||||
while (1)
|
||||
|
@ -840,7 +840,7 @@ haifa_find_rgns (void)
|
|||
/* If we exited the loop early, then I is the header of
|
||||
a non-reducible loop and we should quit processing it
|
||||
now. */
|
||||
if (jbb != EXIT_BLOCK_PTR)
|
||||
if (jbb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
/* I is a header of an inner loop, or block 0 in a subroutine
|
||||
|
@ -858,7 +858,7 @@ haifa_find_rgns (void)
|
|||
/* Decrease degree of all I's successors for topological
|
||||
ordering. */
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR)
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
--degree[e->dest->index];
|
||||
|
||||
/* Estimate # insns, and count # blocks in the region. */
|
||||
|
@ -875,7 +875,7 @@ haifa_find_rgns (void)
|
|||
/* Leaf nodes have only a single successor which must
|
||||
be EXIT_BLOCK. */
|
||||
if (single_succ_p (jbb)
|
||||
&& single_succ (jbb) == EXIT_BLOCK_PTR)
|
||||
&& single_succ (jbb) == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
queue[++tail] = jbb->index;
|
||||
bitmap_set_bit (in_queue, jbb->index);
|
||||
|
@ -893,7 +893,7 @@ haifa_find_rgns (void)
|
|||
|
||||
FOR_EACH_EDGE (e, ei, bb->preds)
|
||||
{
|
||||
if (e->src == ENTRY_BLOCK_PTR)
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
node = e->src->index;
|
||||
|
@ -954,7 +954,7 @@ haifa_find_rgns (void)
|
|||
|
||||
/* See discussion above about nodes not marked as in
|
||||
this loop during the initial DFS traversal. */
|
||||
if (e->src == ENTRY_BLOCK_PTR
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| max_hdr[node] != loop_head)
|
||||
{
|
||||
tail = -1;
|
||||
|
@ -1006,7 +1006,7 @@ haifa_find_rgns (void)
|
|||
queue[head] = queue[tail--];
|
||||
|
||||
FOR_EACH_EDGE (e, ei, BASIC_BLOCK (child)->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR)
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
--degree[e->dest->index];
|
||||
}
|
||||
else
|
||||
|
@ -1026,7 +1026,7 @@ haifa_find_rgns (void)
|
|||
This may provide several smaller regions instead
|
||||
of one too_large region. */
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR)
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bitmap_set_bit (extended_rgn_header, e->dest->index);
|
||||
}
|
||||
}
|
||||
|
@ -1305,7 +1305,7 @@ extend_rgns (int *degree, int *idxp, sbitmap header, int *loop_hdr)
|
|||
BLOCK_TO_BB (bbn) = 0;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, BASIC_BLOCK (bbn)->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR)
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
degree[e->dest->index]--;
|
||||
|
||||
if (!large)
|
||||
|
@ -1362,7 +1362,7 @@ extend_rgns (int *degree, int *idxp, sbitmap header, int *loop_hdr)
|
|||
idx++;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, BASIC_BLOCK (succn)->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR)
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
degree[e->dest->index]--;
|
||||
}
|
||||
}
|
||||
|
@ -1426,7 +1426,7 @@ compute_dom_prob_ps (int bb)
|
|||
edge out_edge;
|
||||
edge_iterator out_ei;
|
||||
|
||||
if (in_edge->src == ENTRY_BLOCK_PTR)
|
||||
if (in_edge->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
pred_bb = BLOCK_TO_BB (in_edge->src->index);
|
||||
|
@ -2663,7 +2663,7 @@ propagate_deps (int bb, struct deps_desc *pred_deps)
|
|||
FOR_EACH_EDGE (e, ei, block->succs)
|
||||
{
|
||||
/* Only bbs "below" bb, in the same region, are interesting. */
|
||||
if (e->dest == EXIT_BLOCK_PTR
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| CONTAINING_RGN (block->index) != CONTAINING_RGN (e->dest->index)
|
||||
|| BLOCK_TO_BB (e->dest->index) <= bb)
|
||||
continue;
|
||||
|
@ -3454,10 +3454,11 @@ rgn_add_block (basic_block bb, basic_block after)
|
|||
extend_regions ();
|
||||
bitmap_set_bit (¬_in_df, bb->index);
|
||||
|
||||
if (after == 0 || after == EXIT_BLOCK_PTR)
|
||||
if (after == 0 || after == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
rgn_make_new_region_out_of_new_block (bb);
|
||||
RGN_DONT_CALC_DEPS (nr_regions - 1) = (after == EXIT_BLOCK_PTR);
|
||||
RGN_DONT_CALC_DEPS (nr_regions - 1) = (after
|
||||
== EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
}
|
||||
else
|
||||
{
|
||||
|
|
|
@ -3682,7 +3682,7 @@ maybe_tidy_empty_bb (basic_block bb)
|
|||
successors. Otherwise remove it. */
|
||||
if (!sel_bb_empty_p (bb)
|
||||
|| (single_succ_p (bb)
|
||||
&& single_succ (bb) == EXIT_BLOCK_PTR
|
||||
&& single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& (!single_pred_p (bb)
|
||||
|| !(single_pred_edge (bb)->flags & EDGE_FALLTHRU)))
|
||||
|| EDGE_COUNT (bb->preds) == 0
|
||||
|
@ -3853,7 +3853,7 @@ tidy_control_flow (basic_block xbb, bool full_tidying)
|
|||
&& EDGE_COUNT (xbb->succs) == 1
|
||||
&& (EDGE_SUCC (xbb, 0)->flags & EDGE_FALLTHRU)
|
||||
/* When successor is an EXIT block, it may not be the next block. */
|
||||
&& single_succ (xbb) != EXIT_BLOCK_PTR
|
||||
&& single_succ (xbb) != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
/* And unconditional jump in previous basic block leads to
|
||||
next basic block of XBB and this jump can be safely removed. */
|
||||
&& in_current_region_p (xbb->prev_bb)
|
||||
|
@ -4325,7 +4325,7 @@ init_lv_sets (void)
|
|||
init_lv_set (bb);
|
||||
|
||||
/* Don't forget EXIT_BLOCK. */
|
||||
init_lv_set (EXIT_BLOCK_PTR);
|
||||
init_lv_set (EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
}
|
||||
|
||||
/* Release lv set of HEAD. */
|
||||
|
@ -4346,7 +4346,7 @@ free_lv_sets (void)
|
|||
basic_block bb;
|
||||
|
||||
/* Don't forget EXIT_BLOCK. */
|
||||
free_lv_set (EXIT_BLOCK_PTR);
|
||||
free_lv_set (EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
/* Free LV sets. */
|
||||
FOR_EACH_BB (bb)
|
||||
|
@ -4524,7 +4524,7 @@ sel_bb_head (basic_block bb)
|
|||
{
|
||||
insn_t head;
|
||||
|
||||
if (bb == EXIT_BLOCK_PTR)
|
||||
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
gcc_assert (exit_insn != NULL_RTX);
|
||||
head = exit_insn;
|
||||
|
@ -4557,7 +4557,7 @@ sel_bb_end (basic_block bb)
|
|||
if (sel_bb_empty_p (bb))
|
||||
return NULL_RTX;
|
||||
|
||||
gcc_assert (bb != EXIT_BLOCK_PTR);
|
||||
gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
return BB_END (bb);
|
||||
}
|
||||
|
@ -4852,7 +4852,7 @@ bb_ends_ebb_p (basic_block bb)
|
|||
basic_block next_bb = bb_next_bb (bb);
|
||||
edge e;
|
||||
|
||||
if (next_bb == EXIT_BLOCK_PTR
|
||||
if (next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| bitmap_bit_p (forced_ebb_heads, next_bb->index)
|
||||
|| (LABEL_P (BB_HEAD (next_bb))
|
||||
/* NB: LABEL_NUSES () is not maintained outside of jump.c.
|
||||
|
@ -5538,7 +5538,7 @@ sel_create_recovery_block (insn_t orig_insn)
|
|||
|
||||
recovery_block = sched_create_recovery_block (&before_recovery);
|
||||
if (before_recovery)
|
||||
copy_lv_set_from (before_recovery, EXIT_BLOCK_PTR);
|
||||
copy_lv_set_from (before_recovery, EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
gcc_assert (sel_bb_empty_p (recovery_block));
|
||||
sched_create_recovery_edges (first_bb, recovery_block, second_bb);
|
||||
|
@ -5821,7 +5821,7 @@ setup_nop_and_exit_insns (void)
|
|||
emit_insn (nop_pattern);
|
||||
exit_insn = get_insns ();
|
||||
end_sequence ();
|
||||
set_block_for_insn (exit_insn, EXIT_BLOCK_PTR);
|
||||
set_block_for_insn (exit_insn, EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
}
|
||||
|
||||
/* Free special insns used in the scheduler. */
|
||||
|
@ -6396,7 +6396,7 @@ sel_remove_loop_preheader (void)
|
|||
If it is so - delete this jump and clear data sets of its
|
||||
basic block if it becomes empty. */
|
||||
if (next_bb->prev_bb == prev_bb
|
||||
&& prev_bb != ENTRY_BLOCK_PTR
|
||||
&& prev_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& bb_has_removable_jump_to_p (prev_bb, next_bb))
|
||||
{
|
||||
redirect_edge_and_branch (EDGE_SUCC (prev_bb, 0), next_bb);
|
||||
|
|
|
@ -1024,7 +1024,7 @@ inner_loop_header_p (basic_block bb)
|
|||
if (!current_loop_nest)
|
||||
return false;
|
||||
|
||||
if (bb == EXIT_BLOCK_PTR)
|
||||
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return false;
|
||||
|
||||
inner_loop = bb->loop_father;
|
||||
|
@ -1050,7 +1050,7 @@ get_loop_exit_edges_unique_dests (const struct loop *loop)
|
|||
vec<edge> edges = vNULL;
|
||||
struct loop_exit *exit;
|
||||
|
||||
gcc_assert (loop->latch != EXIT_BLOCK_PTR
|
||||
gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& current_loops->state & LOOPS_HAVE_RECORDED_EXITS);
|
||||
|
||||
for (exit = loop->exits->next; exit->e; exit = exit->next)
|
||||
|
@ -1083,7 +1083,7 @@ sel_bb_empty_or_nop_p (basic_block bb)
|
|||
if (!INSN_NOP_P (first))
|
||||
return false;
|
||||
|
||||
if (bb == EXIT_BLOCK_PTR)
|
||||
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return false;
|
||||
|
||||
last = sel_bb_end (bb);
|
||||
|
@ -1204,7 +1204,7 @@ _succ_iter_start (insn_t *succp, insn_t insn, int flags)
|
|||
i.current_exit = -1;
|
||||
i.loop_exits.create (0);
|
||||
|
||||
if (bb != EXIT_BLOCK_PTR && BB_END (bb) != insn)
|
||||
if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun) && BB_END (bb) != insn)
|
||||
{
|
||||
i.bb_end = false;
|
||||
|
||||
|
@ -1308,7 +1308,7 @@ _succ_iter_cond (succ_iterator *ip, rtx *succp, rtx insn,
|
|||
{
|
||||
basic_block bb = ip->e2->dest;
|
||||
|
||||
if (bb == EXIT_BLOCK_PTR || bb == after_recovery)
|
||||
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == after_recovery)
|
||||
*succp = exit_insn;
|
||||
else
|
||||
{
|
||||
|
|
|
@ -4551,7 +4551,8 @@ find_block_for_bookkeeping (edge e1, edge e2, bool lax)
|
|||
edge e;
|
||||
|
||||
/* Loop over edges from E1 to E2, inclusive. */
|
||||
for (e = e1; !lax || e->dest != EXIT_BLOCK_PTR; e = EDGE_SUCC (e->dest, 0))
|
||||
for (e = e1; !lax || e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun); e =
|
||||
EDGE_SUCC (e->dest, 0))
|
||||
{
|
||||
if (EDGE_COUNT (e->dest->preds) == 2)
|
||||
{
|
||||
|
@ -4642,7 +4643,7 @@ create_block_for_bookkeeping (edge e1, edge e2)
|
|||
if (DEBUG_INSN_P (insn)
|
||||
&& single_succ_p (new_bb)
|
||||
&& (succ = single_succ (new_bb))
|
||||
&& succ != EXIT_BLOCK_PTR
|
||||
&& succ != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& DEBUG_INSN_P ((last = sel_bb_end (new_bb))))
|
||||
{
|
||||
while (insn != last && (DEBUG_INSN_P (insn) || NOTE_P (insn)))
|
||||
|
|
|
@ -805,7 +805,7 @@ insert_store (struct st_expr * expr, edge e)
|
|||
|
||||
/* If tmp is NULL, we found an insertion on every edge, blank the
|
||||
insertion vector for these edges, and insert at the start of the BB. */
|
||||
if (!tmp && bb != EXIT_BLOCK_PTR)
|
||||
if (!tmp && bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
FOR_EACH_EDGE (tmp, ei, e->dest->preds)
|
||||
{
|
||||
|
@ -869,7 +869,7 @@ remove_reachable_equiv_notes (basic_block bb, struct st_expr *smexpr)
|
|||
}
|
||||
bb = act->dest;
|
||||
|
||||
if (bb == EXIT_BLOCK_PTR
|
||||
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
|| bitmap_bit_p (visited, bb->index))
|
||||
{
|
||||
if (!ei_end_p (ei))
|
||||
|
|
|
@ -1950,7 +1950,7 @@ tm_region_init (struct tm_region *region)
|
|||
vec<tm_region_p> bb_regions = vNULL;
|
||||
|
||||
all_tm_regions = region;
|
||||
bb = single_succ (ENTRY_BLOCK_PTR);
|
||||
bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
/* We could store this information in bb->aux, but we may get called
|
||||
through get_all_tm_blocks() from another pass that may be already
|
||||
|
@ -2016,7 +2016,7 @@ gate_tm_init (void)
|
|||
struct tm_region *region = (struct tm_region *)
|
||||
obstack_alloc (&tm_obstack.obstack, sizeof (struct tm_region));
|
||||
memset (region, 0, sizeof (*region));
|
||||
region->entry_block = single_succ (ENTRY_BLOCK_PTR);
|
||||
region->entry_block = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
/* For a clone, the entire function is the region. But even if
|
||||
we don't need to record any exit blocks, we may need to
|
||||
record irrevocable blocks. */
|
||||
|
@ -3633,7 +3633,8 @@ tm_memopt_compute_available (struct tm_region *region,
|
|||
/* If the out state of this block changed, then we need to add
|
||||
its successors to the worklist if they are not already in. */
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
if (!AVAIL_IN_WORKLIST_P (e->dest) && e->dest != EXIT_BLOCK_PTR)
|
||||
if (!AVAIL_IN_WORKLIST_P (e->dest)
|
||||
&& e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
*qin++ = e->dest;
|
||||
AVAIL_IN_WORKLIST_P (e->dest) = true;
|
||||
|
@ -4539,12 +4540,14 @@ ipa_tm_scan_irr_function (struct cgraph_node *node, bool for_clone)
|
|||
if (for_clone)
|
||||
{
|
||||
old_irr = d->irrevocable_blocks_clone;
|
||||
queue.quick_push (single_succ (ENTRY_BLOCK_PTR));
|
||||
queue.quick_push (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
if (ipa_tm_scan_irr_blocks (&queue, new_irr, old_irr, NULL))
|
||||
{
|
||||
ipa_tm_propagate_irr (single_succ (ENTRY_BLOCK_PTR), new_irr,
|
||||
ipa_tm_propagate_irr (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
|
||||
new_irr,
|
||||
old_irr, NULL);
|
||||
ret = bitmap_bit_p (new_irr, single_succ (ENTRY_BLOCK_PTR)->index);
|
||||
ret = bitmap_bit_p (new_irr,
|
||||
single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun))->index);
|
||||
}
|
||||
}
|
||||
else
|
||||
|
@ -5294,7 +5297,8 @@ ipa_tm_transform_clone (struct cgraph_node *node)
|
|||
calculate_dominance_info (CDI_DOMINATORS);
|
||||
|
||||
need_ssa_rename =
|
||||
ipa_tm_transform_calls (d->clone, NULL, single_succ (ENTRY_BLOCK_PTR),
|
||||
ipa_tm_transform_calls (d->clone, NULL,
|
||||
single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
|
||||
d->irrevocable_blocks_clone);
|
||||
|
||||
if (need_ssa_rename)
|
||||
|
|
|
@ -190,14 +190,14 @@ init_empty_tree_cfg_for_function (struct function *fn)
|
|||
initial_cfg_capacity);
|
||||
|
||||
SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
|
||||
ENTRY_BLOCK_PTR_FOR_FUNCTION (fn));
|
||||
ENTRY_BLOCK_PTR_FOR_FN (fn));
|
||||
SET_BASIC_BLOCK_FOR_FUNCTION (fn, EXIT_BLOCK,
|
||||
EXIT_BLOCK_PTR_FOR_FUNCTION (fn));
|
||||
EXIT_BLOCK_PTR_FOR_FN (fn));
|
||||
|
||||
ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->next_bb
|
||||
= EXIT_BLOCK_PTR_FOR_FUNCTION (fn);
|
||||
EXIT_BLOCK_PTR_FOR_FUNCTION (fn)->prev_bb
|
||||
= ENTRY_BLOCK_PTR_FOR_FUNCTION (fn);
|
||||
ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
|
||||
= EXIT_BLOCK_PTR_FOR_FN (fn);
|
||||
EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
|
||||
= ENTRY_BLOCK_PTR_FOR_FN (fn);
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -236,7 +236,7 @@ build_gimple_cfg (gimple_seq seq)
|
|||
|
||||
/* Make sure there is always at least one block, even if it's empty. */
|
||||
if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
|
||||
create_empty_bb (ENTRY_BLOCK_PTR);
|
||||
create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
/* Adjust the size of the array. */
|
||||
if (basic_block_info->length () < (size_t) n_basic_blocks_for_fn (cfun))
|
||||
|
@ -518,7 +518,7 @@ make_blocks (gimple_seq seq)
|
|||
gimple stmt = NULL;
|
||||
bool start_new_block = true;
|
||||
bool first_stmt_of_seq = true;
|
||||
basic_block bb = ENTRY_BLOCK_PTR;
|
||||
basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
while (!gsi_end_p (i))
|
||||
{
|
||||
|
@ -669,7 +669,8 @@ make_edges (void)
|
|||
|
||||
/* Create an edge from entry to the first block with executable
|
||||
statements in it. */
|
||||
make_edge (ENTRY_BLOCK_PTR, BASIC_BLOCK (NUM_FIXED_BLOCKS), EDGE_FALLTHRU);
|
||||
make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), BASIC_BLOCK (NUM_FIXED_BLOCKS),
|
||||
EDGE_FALLTHRU);
|
||||
|
||||
/* Traverse the basic block array placing edges. */
|
||||
FOR_EACH_BB (bb)
|
||||
|
@ -687,7 +688,7 @@ make_edges (void)
|
|||
fallthru = false;
|
||||
break;
|
||||
case GIMPLE_RETURN:
|
||||
make_edge (bb, EXIT_BLOCK_PTR, 0);
|
||||
make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
|
||||
fallthru = false;
|
||||
break;
|
||||
case GIMPLE_COND:
|
||||
|
@ -719,7 +720,8 @@ make_edges (void)
|
|||
|
||||
/* BUILTIN_RETURN is really a return statement. */
|
||||
if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
|
||||
make_edge (bb, EXIT_BLOCK_PTR, 0), fallthru = false;
|
||||
make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0), fallthru =
|
||||
false;
|
||||
/* Some calls are known not to return. */
|
||||
else
|
||||
fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
|
||||
|
@ -1503,7 +1505,7 @@ gimple_can_merge_blocks_p (basic_block a, basic_block b)
|
|||
if (!single_pred_p (b))
|
||||
return false;
|
||||
|
||||
if (b == EXIT_BLOCK_PTR)
|
||||
if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return false;
|
||||
|
||||
/* If A ends by a statement causing exceptions or something similar, we
|
||||
|
@ -4849,19 +4851,21 @@ gimple_verify_flow_info (void)
|
|||
edge e;
|
||||
edge_iterator ei;
|
||||
|
||||
if (ENTRY_BLOCK_PTR->il.gimple.seq || ENTRY_BLOCK_PTR->il.gimple.phi_nodes)
|
||||
if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
|
||||
|| ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
|
||||
{
|
||||
error ("ENTRY_BLOCK has IL associated with it");
|
||||
err = 1;
|
||||
}
|
||||
|
||||
if (EXIT_BLOCK_PTR->il.gimple.seq || EXIT_BLOCK_PTR->il.gimple.phi_nodes)
|
||||
if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
|
||||
|| EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
|
||||
{
|
||||
error ("EXIT_BLOCK has IL associated with it");
|
||||
err = 1;
|
||||
}
|
||||
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
if (e->flags & EDGE_FALLTHRU)
|
||||
{
|
||||
error ("fallthru to exit from bb %d", e->src->index);
|
||||
|
@ -5041,7 +5045,7 @@ gimple_verify_flow_info (void)
|
|||
error ("wrong outgoing edge flags at end of bb %d", bb->index);
|
||||
err = 1;
|
||||
}
|
||||
if (single_succ (bb) != EXIT_BLOCK_PTR)
|
||||
if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
error ("return edge does not point to exit in bb %d",
|
||||
bb->index);
|
||||
|
@ -5281,7 +5285,7 @@ gimple_redirect_edge_and_branch (edge e, basic_block dest)
|
|||
if (e->flags & EDGE_EH)
|
||||
return redirect_eh_edge (e, dest);
|
||||
|
||||
if (e->src != ENTRY_BLOCK_PTR)
|
||||
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
ret = gimple_try_redirect_by_replacing_jump (e, dest);
|
||||
if (ret)
|
||||
|
@ -5564,7 +5568,7 @@ gimple_duplicate_bb (basic_block bb)
|
|||
gimple_seq phis = phi_nodes (bb);
|
||||
gimple phi, stmt, copy;
|
||||
|
||||
new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
|
||||
new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
|
||||
|
||||
/* Copy the PHI nodes. We ignore PHI node arguments here because
|
||||
the incoming edges have not been setup yet. */
|
||||
|
@ -6901,9 +6905,9 @@ move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
|
|||
FIXME, this is silly. The CFG ought to become a parameter to
|
||||
these helpers. */
|
||||
push_cfun (dest_cfun);
|
||||
make_edge (ENTRY_BLOCK_PTR, entry_bb, EDGE_FALLTHRU);
|
||||
make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
|
||||
if (exit_bb)
|
||||
make_edge (exit_bb, EXIT_BLOCK_PTR, 0);
|
||||
make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
|
||||
pop_cfun ();
|
||||
|
||||
/* Back in the original function, the SESE region has disappeared,
|
||||
|
@ -7247,7 +7251,7 @@ print_loops (FILE *file, int verbosity)
|
|||
{
|
||||
basic_block bb;
|
||||
|
||||
bb = ENTRY_BLOCK_PTR;
|
||||
bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
if (bb && bb->loop_father)
|
||||
print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
|
||||
}
|
||||
|
@ -7416,7 +7420,8 @@ gimple_flow_call_edges_add (sbitmap blocks)
|
|||
if (! blocks)
|
||||
check_last_block = true;
|
||||
else
|
||||
check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
|
||||
check_last_block = bitmap_bit_p (blocks,
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
|
||||
|
||||
/* In the last basic block, before epilogue generation, there will be
|
||||
a fallthru edge to EXIT. Special care is required if the last insn
|
||||
|
@ -7432,7 +7437,7 @@ gimple_flow_call_edges_add (sbitmap blocks)
|
|||
Handle this by adding a dummy instruction in a new last basic block. */
|
||||
if (check_last_block)
|
||||
{
|
||||
basic_block bb = EXIT_BLOCK_PTR->prev_bb;
|
||||
basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
|
||||
gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
|
||||
gimple t = NULL;
|
||||
|
||||
|
@ -7443,7 +7448,7 @@ gimple_flow_call_edges_add (sbitmap blocks)
|
|||
{
|
||||
edge e;
|
||||
|
||||
e = find_edge (bb, EXIT_BLOCK_PTR);
|
||||
e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
if (e)
|
||||
{
|
||||
gsi_insert_on_edge (e, gimple_build_nop ());
|
||||
|
@ -7486,7 +7491,7 @@ gimple_flow_call_edges_add (sbitmap blocks)
|
|||
#ifdef ENABLE_CHECKING
|
||||
if (stmt == last_stmt)
|
||||
{
|
||||
e = find_edge (bb, EXIT_BLOCK_PTR);
|
||||
e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
gcc_assert (e == NULL);
|
||||
}
|
||||
#endif
|
||||
|
@ -7499,7 +7504,7 @@ gimple_flow_call_edges_add (sbitmap blocks)
|
|||
if (e)
|
||||
blocks_split++;
|
||||
}
|
||||
make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
|
||||
make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
|
||||
}
|
||||
gsi_prev (&gsi);
|
||||
}
|
||||
|
@ -7537,7 +7542,7 @@ remove_edge_and_dominated_blocks (edge e)
|
|||
}
|
||||
|
||||
/* No updating is needed for edges to exit. */
|
||||
if (e->dest == EXIT_BLOCK_PTR)
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
if (cfgcleanup_altered_bbs)
|
||||
bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
|
||||
|
@ -7577,7 +7582,7 @@ remove_edge_and_dominated_blocks (edge e)
|
|||
{
|
||||
FOR_EACH_EDGE (f, ei, bb->succs)
|
||||
{
|
||||
if (f->dest != EXIT_BLOCK_PTR)
|
||||
if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bitmap_set_bit (df, f->dest->index);
|
||||
}
|
||||
}
|
||||
|
@ -7928,8 +7933,8 @@ split_critical_edges (void)
|
|||
gimple_find_edge_insert_loc. */
|
||||
else if ((!single_pred_p (e->dest)
|
||||
|| !gimple_seq_empty_p (phi_nodes (e->dest))
|
||||
|| e->dest == EXIT_BLOCK_PTR)
|
||||
&& e->src != ENTRY_BLOCK_PTR
|
||||
|| e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
&& e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& !(e->flags & EDGE_ABNORMAL))
|
||||
{
|
||||
gimple_stmt_iterator gsi;
|
||||
|
@ -8053,10 +8058,10 @@ execute_warn_function_return (void)
|
|||
|
||||
/* If we have a path to EXIT, then we do return. */
|
||||
if (TREE_THIS_VOLATILE (cfun->decl)
|
||||
&& EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0)
|
||||
&& EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0)
|
||||
{
|
||||
location = UNKNOWN_LOCATION;
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
{
|
||||
last = last_stmt (e->src);
|
||||
if ((gimple_code (last) == GIMPLE_RETURN
|
||||
|
@ -8073,10 +8078,10 @@ execute_warn_function_return (void)
|
|||
without returning a value. */
|
||||
else if (warn_return_type
|
||||
&& !TREE_NO_WARNING (cfun->decl)
|
||||
&& EDGE_COUNT (EXIT_BLOCK_PTR->preds) > 0
|
||||
&& EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0
|
||||
&& !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
|
||||
{
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
|
||||
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
|
||||
{
|
||||
gimple last = last_stmt (e->src);
|
||||
if (gimple_code (last) == GIMPLE_RETURN
|
||||
|
@ -8293,13 +8298,15 @@ execute_fixup_cfg (void)
|
|||
|
||||
count_scale
|
||||
= GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
|
||||
ENTRY_BLOCK_PTR->count);
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
|
||||
|
||||
ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
|
||||
EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
|
||||
cgraph_get_node (current_function_decl)->count;
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
|
||||
apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
|
||||
count_scale);
|
||||
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
|
||||
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
|
||||
e->count = apply_scale (e->count, count_scale);
|
||||
|
||||
FOR_EACH_BB (bb)
|
||||
|
|
|
@ -251,14 +251,14 @@ tree_forwarder_block_p (basic_block bb, bool phi_wanted)
|
|||
Otherwise, BB must have PHI nodes. */
|
||||
|| gimple_seq_empty_p (phi_nodes (bb)) == phi_wanted
|
||||
/* BB may not be a predecessor of EXIT_BLOCK_PTR. */
|
||||
|| single_succ (bb) == EXIT_BLOCK_PTR
|
||||
|| single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
/* Nor should this be an infinite loop. */
|
||||
|| single_succ (bb) == bb
|
||||
/* BB may not have an abnormal outgoing edge. */
|
||||
|| (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
|
||||
return false;
|
||||
|
||||
gcc_checking_assert (bb != ENTRY_BLOCK_PTR);
|
||||
gcc_checking_assert (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
locus = single_succ_edge (bb)->goto_locus;
|
||||
|
||||
|
@ -268,7 +268,7 @@ tree_forwarder_block_p (basic_block bb, bool phi_wanted)
|
|||
edge e;
|
||||
|
||||
FOR_EACH_EDGE (e, ei, bb->preds)
|
||||
if (e->src == ENTRY_BLOCK_PTR || (e->flags & EDGE_EH))
|
||||
if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || (e->flags & EDGE_EH))
|
||||
return false;
|
||||
/* If goto_locus of any of the edges differs, prevent removing
|
||||
the forwarder block for -O0. */
|
||||
|
|
|
@ -690,7 +690,7 @@ update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i)
|
|||
static void
|
||||
update_parameter_components (void)
|
||||
{
|
||||
edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR);
|
||||
edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
tree parm;
|
||||
|
||||
for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm))
|
||||
|
|
|
@ -918,7 +918,7 @@ get_loop_body_in_if_conv_order (const struct loop *loop)
|
|||
unsigned int visited_count = 0;
|
||||
|
||||
gcc_assert (loop->num_nodes);
|
||||
gcc_assert (loop->latch != EXIT_BLOCK_PTR);
|
||||
gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
blocks = XCNEWVEC (basic_block, loop->num_nodes);
|
||||
visited = BITMAP_ALLOC (NULL);
|
||||
|
|
|
@ -199,7 +199,7 @@ remap_ssa_name (tree name, copy_body_data *id)
|
|||
if (SSA_NAME_IS_DEFAULT_DEF (name)
|
||||
&& TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
|
||||
&& id->entry_bb == NULL
|
||||
&& single_succ_p (ENTRY_BLOCK_PTR))
|
||||
&& single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
|
||||
{
|
||||
tree vexpr = make_node (DEBUG_EXPR_DECL);
|
||||
gimple def_temp;
|
||||
|
@ -218,7 +218,7 @@ remap_ssa_name (tree name, copy_body_data *id)
|
|||
DECL_ARTIFICIAL (vexpr) = 1;
|
||||
TREE_TYPE (vexpr) = TREE_TYPE (name);
|
||||
DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
|
||||
gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
|
||||
gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
|
||||
return vexpr;
|
||||
}
|
||||
|
@ -300,7 +300,8 @@ remap_ssa_name (tree name, copy_body_data *id)
|
|||
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
|
||||
&& (!SSA_NAME_VAR (name)
|
||||
|| TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
|
||||
&& (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
|
||||
&& (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
0)->dest
|
||||
|| EDGE_COUNT (id->entry_bb->preds) != 1))
|
||||
{
|
||||
gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
|
||||
|
@ -1978,7 +1979,7 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
|
|||
|
||||
/* Return edges do get a FALLTHRU flag when the get inlined. */
|
||||
if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
|
||||
&& old_edge->dest->aux != EXIT_BLOCK_PTR)
|
||||
&& old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
flags |= EDGE_FALLTHRU;
|
||||
new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
|
||||
new_edge->count = apply_scale (old_edge->count, count_scale);
|
||||
|
@ -2163,10 +2164,10 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
|
|||
if (!DECL_RESULT (new_fndecl))
|
||||
DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
|
||||
|
||||
if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
|
||||
if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
|
||||
count_scale
|
||||
= GCOV_COMPUTE_SCALE (count,
|
||||
ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
|
||||
ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
|
||||
else
|
||||
count_scale = REG_BR_PROB_BASE;
|
||||
|
||||
|
@ -2202,16 +2203,16 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
|
|||
init_empty_tree_cfg ();
|
||||
|
||||
profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
|
||||
ENTRY_BLOCK_PTR->count =
|
||||
(ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
|
||||
(ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
|
||||
REG_BR_PROB_BASE);
|
||||
ENTRY_BLOCK_PTR->frequency
|
||||
= ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
|
||||
EXIT_BLOCK_PTR->count =
|
||||
(EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
|
||||
= ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
|
||||
(EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
|
||||
REG_BR_PROB_BASE);
|
||||
EXIT_BLOCK_PTR->frequency =
|
||||
EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
|
||||
EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
|
||||
if (src_cfun->eh)
|
||||
init_eh_for_function ();
|
||||
|
||||
|
@ -2410,7 +2411,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
|
|||
before inlining, using the guessed edge frequencies, so that we don't
|
||||
end up with a 0-count inline body which can confuse downstream
|
||||
optimizations such as function splitting. */
|
||||
if (!ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count && count)
|
||||
if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
|
||||
{
|
||||
/* Apply the larger of the call bb count and the total incoming
|
||||
call edge count to the callee. */
|
||||
|
@ -2422,10 +2423,10 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
|
|||
freqs_to_counts (id->src_node, count > in_count ? count : in_count);
|
||||
}
|
||||
|
||||
if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
|
||||
if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
|
||||
count_scale
|
||||
= GCOV_COMPUTE_SCALE (count,
|
||||
ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
|
||||
ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
|
||||
else
|
||||
count_scale = REG_BR_PROB_BASE;
|
||||
|
||||
|
@ -2450,20 +2451,20 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
|
|||
incoming_count = apply_scale (incoming_count, count_scale);
|
||||
incoming_frequency
|
||||
= apply_scale ((gcov_type)incoming_frequency, frequency_scale);
|
||||
ENTRY_BLOCK_PTR->count = incoming_count;
|
||||
ENTRY_BLOCK_PTR->frequency = incoming_frequency;
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
|
||||
}
|
||||
|
||||
/* Must have a CFG here at this point. */
|
||||
gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
|
||||
gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
|
||||
(DECL_STRUCT_FUNCTION (callee_fndecl)));
|
||||
|
||||
cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
|
||||
|
||||
ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
|
||||
EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
|
||||
entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
|
||||
exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
|
||||
entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
|
||||
exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
|
||||
|
||||
/* Duplicate any exception-handling regions. */
|
||||
if (cfun->eh)
|
||||
|
@ -2694,7 +2695,7 @@ copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
|
|||
tree body;
|
||||
|
||||
/* If this body has a CFG, walk CFG and copy. */
|
||||
gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
|
||||
gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
|
||||
body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
|
||||
new_entry);
|
||||
copy_debug_stmts (id);
|
||||
|
@ -5098,7 +5099,8 @@ delete_unreachable_blocks_update_callgraph (copy_body_data *id)
|
|||
|
||||
/* Delete all unreachable basic blocks. */
|
||||
|
||||
for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
|
||||
for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
|
||||
!= EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
|
||||
{
|
||||
next_bb = b->next_bb;
|
||||
|
||||
|
@ -5294,7 +5296,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
|
|||
id.transform_parameter = false;
|
||||
id.transform_lang_insert_block = NULL;
|
||||
|
||||
old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
|
||||
old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
|
||||
(DECL_STRUCT_FUNCTION (old_decl));
|
||||
DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
|
||||
DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
|
||||
|
@ -5413,7 +5415,8 @@ tree_function_versioning (tree old_decl, tree new_decl,
|
|||
|
||||
/* Copy the Function's body. */
|
||||
copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
|
||||
ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, new_entry);
|
||||
ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
|
||||
new_entry);
|
||||
|
||||
/* Renumber the lexical scoping (non-code) blocks consecutively. */
|
||||
number_blocks (new_decl);
|
||||
|
@ -5421,7 +5424,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
|
|||
/* We want to create the BB unconditionally, so that the addition of
|
||||
debug stmts doesn't affect BB count, which may in the end cause
|
||||
codegen differences. */
|
||||
bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
|
||||
bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
while (init_stmts.length ())
|
||||
insert_init_stmt (&id, bb, init_stmts.pop ());
|
||||
update_clone_info (&id);
|
||||
|
@ -5458,7 +5461,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
|
|||
struct cgraph_edge *e;
|
||||
rebuild_frequencies ();
|
||||
|
||||
new_version_node->count = ENTRY_BLOCK_PTR->count;
|
||||
new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
|
||||
for (e = new_version_node->callees; e; e = e->next_callee)
|
||||
{
|
||||
basic_block bb = gimple_bb (e->call_stmt);
|
||||
|
|
|
@ -1221,10 +1221,12 @@ rewrite_debug_stmt_uses (gimple stmt)
|
|||
def = info->current_def;
|
||||
if (!def)
|
||||
{
|
||||
if (TREE_CODE (var) == PARM_DECL && single_succ_p (ENTRY_BLOCK_PTR))
|
||||
if (TREE_CODE (var) == PARM_DECL
|
||||
&& single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
|
||||
{
|
||||
gimple_stmt_iterator gsi
|
||||
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
|
||||
=
|
||||
gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
int lim;
|
||||
/* Search a few source bind stmts at the start of first bb to
|
||||
see if a DEBUG_EXPR_DECL can't be reused. */
|
||||
|
@ -1253,7 +1255,8 @@ rewrite_debug_stmt_uses (gimple stmt)
|
|||
DECL_ARTIFICIAL (def) = 1;
|
||||
TREE_TYPE (def) = TREE_TYPE (var);
|
||||
DECL_MODE (def) = DECL_MODE (var);
|
||||
gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
|
||||
gsi =
|
||||
gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
|
||||
}
|
||||
update = true;
|
||||
|
@ -1868,7 +1871,7 @@ maybe_register_def (def_operand_p def_p, gimple stmt,
|
|||
bind stmts, but there wouldn't be a PC to bind
|
||||
them to either, so avoid diverging the CFG. */
|
||||
if (ef && single_pred_p (ef->dest)
|
||||
&& ef->dest != EXIT_BLOCK_PTR)
|
||||
&& ef->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
/* If there were PHI nodes in the node, we'd
|
||||
have to make sure the value we're binding
|
||||
|
@ -2331,7 +2334,7 @@ rewrite_into_ssa (void)
|
|||
insert_phi_nodes (dfs);
|
||||
|
||||
/* 4- Rename all the blocks. */
|
||||
rewrite_blocks (ENTRY_BLOCK_PTR, REWRITE_ALL);
|
||||
rewrite_blocks (ENTRY_BLOCK_PTR_FOR_FN (cfun), REWRITE_ALL);
|
||||
|
||||
/* Free allocated memory. */
|
||||
FOR_EACH_BB (bb)
|
||||
|
@ -3017,7 +3020,7 @@ insert_updated_phi_nodes_for (tree var, bitmap_head *dfs, bitmap blocks,
|
|||
common dominator of all the definition blocks. */
|
||||
entry = nearest_common_dominator_for_set (CDI_DOMINATORS,
|
||||
db->def_blocks);
|
||||
if (entry != ENTRY_BLOCK_PTR)
|
||||
if (entry != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
EXECUTE_IF_SET_IN_BITMAP (idf, 0, i, bi)
|
||||
if (BASIC_BLOCK (i) != entry
|
||||
&& dominated_by_p (CDI_DOMINATORS, BASIC_BLOCK (i), entry))
|
||||
|
@ -3216,7 +3219,7 @@ update_ssa (unsigned update_flags)
|
|||
be possible to determine the nearest block that had a
|
||||
definition for each of the symbols that are marked for
|
||||
updating. For now this seems more work than it's worth. */
|
||||
start_bb = ENTRY_BLOCK_PTR;
|
||||
start_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
/* Traverse the CFG looking for existing definitions and uses of
|
||||
symbols in SSA operands. Mark interesting blocks and
|
||||
|
@ -3299,7 +3302,7 @@ update_ssa (unsigned update_flags)
|
|||
/* Insertion of PHI nodes may have added blocks to the region.
|
||||
We need to re-compute START_BB to include the newly added
|
||||
blocks. */
|
||||
if (start_bb != ENTRY_BLOCK_PTR)
|
||||
if (start_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
start_bb = nearest_common_dominator_for_set (CDI_DOMINATORS,
|
||||
blocks_to_update);
|
||||
}
|
||||
|
|
|
@ -931,7 +931,8 @@ expand_phi_nodes (struct ssaexpand *sa)
|
|||
elim_graph g = new_elim_graph (sa->map->num_partitions);
|
||||
g->map = sa->map;
|
||||
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb,
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
if (!gimple_seq_empty_p (phi_nodes (bb)))
|
||||
{
|
||||
edge e;
|
||||
|
|
|
@ -440,7 +440,8 @@ gimple_gen_ic_func_profiler (void)
|
|||
stmt1: __gcov_indirect_call_profiler_v2 (profile_id,
|
||||
¤t_function_decl)
|
||||
*/
|
||||
gsi = gsi_after_labels (split_edge (single_succ_edge (ENTRY_BLOCK_PTR)));
|
||||
gsi =
|
||||
gsi_after_labels (split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))));
|
||||
|
||||
cur_func = force_gimple_operand_gsi (&gsi,
|
||||
build_addr (current_function_decl,
|
||||
|
|
|
@ -47,7 +47,7 @@ static inline basic_block
|
|||
block_before_loop (loop_p loop)
|
||||
{
|
||||
edge preheader = loop_preheader_edge (loop);
|
||||
return (preheader ? preheader->src : ENTRY_BLOCK_PTR);
|
||||
return (preheader ? preheader->src : ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
}
|
||||
|
||||
/* Analyze all the parameters of the chrec that were left under a
|
||||
|
|
|
@ -3409,7 +3409,7 @@ initialize_parameter_reductions (void)
|
|||
|
||||
seq = gsi_seq (gsi);
|
||||
if (seq)
|
||||
gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
|
||||
gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
|
||||
}
|
||||
|
||||
/* The "main" function of intraprocedural SRA passes. Runs the analysis and if
|
||||
|
@ -3788,7 +3788,7 @@ propagate_dereference_distances (void)
|
|||
basic_block bb;
|
||||
|
||||
queue.create (last_basic_block_for_function (cfun));
|
||||
queue.quick_push (ENTRY_BLOCK_PTR);
|
||||
queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
FOR_EACH_BB (bb)
|
||||
{
|
||||
queue.quick_push (bb);
|
||||
|
@ -3818,7 +3818,7 @@ propagate_dereference_distances (void)
|
|||
{
|
||||
int succ_idx = e->dest->index * func_param_count + i;
|
||||
|
||||
if (e->src == EXIT_BLOCK_PTR)
|
||||
if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
|
||||
if (first)
|
||||
|
@ -3859,10 +3859,11 @@ dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
|
|||
basic_block bb;
|
||||
|
||||
fprintf (dump_file, str);
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
|
||||
FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
|
||||
{
|
||||
fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
|
||||
if (bb != EXIT_BLOCK_PTR)
|
||||
if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
int i;
|
||||
for (i = 0; i < func_param_count; i++)
|
||||
|
@ -3914,7 +3915,7 @@ analyze_caller_dereference_legality (vec<access_p> representatives)
|
|||
for (i = 0; i < func_param_count; i++)
|
||||
{
|
||||
struct access *repr = representatives[i];
|
||||
int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
|
||||
int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
|
||||
|
||||
if (!repr || no_accesses_p (repr))
|
||||
continue;
|
||||
|
@ -4728,9 +4729,9 @@ sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
|
|||
int i, len;
|
||||
gimple_stmt_iterator *gsip = NULL, gsi;
|
||||
|
||||
if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
|
||||
if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
|
||||
{
|
||||
gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
|
||||
gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
gsip = &gsi;
|
||||
}
|
||||
len = adjustments.length ();
|
||||
|
|
|
@ -1824,7 +1824,7 @@ gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
|
|||
while (gsi_end_p (*i))
|
||||
{
|
||||
dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
|
||||
if (dom == NULL || dom == ENTRY_BLOCK_PTR)
|
||||
if (dom == NULL || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
return;
|
||||
|
||||
*i = gsi_last_bb (dom);
|
||||
|
@ -2314,7 +2314,7 @@ optimize_stack_restore (gimple_stmt_iterator i)
|
|||
case 0:
|
||||
break;
|
||||
case 1:
|
||||
if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
|
||||
if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
return NULL_TREE;
|
||||
break;
|
||||
default:
|
||||
|
|
|
@ -1078,7 +1078,7 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
|
|||
v2 = SSA_NAME_VERSION (var);
|
||||
bitmap_set_bit (used_in_copy, v1);
|
||||
bitmap_set_bit (used_in_copy, v2);
|
||||
cost = coalesce_cost_bb (EXIT_BLOCK_PTR);
|
||||
cost = coalesce_cost_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
add_coalesce (cl, v1, v2, cost);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -328,9 +328,9 @@ mark_control_dependent_edges_necessary (basic_block bb, bool ignore_self)
|
|||
unsigned edge_number;
|
||||
bool skipped = false;
|
||||
|
||||
gcc_assert (bb != EXIT_BLOCK_PTR);
|
||||
gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
if (bb == ENTRY_BLOCK_PTR)
|
||||
if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
return;
|
||||
|
||||
EXECUTE_IF_SET_IN_BITMAP (cd->get_edges_dependent_on (bb->index),
|
||||
|
@ -636,7 +636,7 @@ propagate_necessity (bool aggressive)
|
|||
containing STMT is control dependent, but only if we haven't
|
||||
already done so. */
|
||||
basic_block bb = gimple_bb (stmt);
|
||||
if (bb != ENTRY_BLOCK_PTR
|
||||
if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& !bitmap_bit_p (visited_control_parents, bb->index))
|
||||
mark_control_dependent_edges_necessary (bb, false);
|
||||
}
|
||||
|
@ -742,7 +742,7 @@ propagate_necessity (bool aggressive)
|
|||
if (!bitmap_bit_p (last_stmt_necessary, arg_bb->index))
|
||||
mark_last_stmt_necessary (arg_bb);
|
||||
}
|
||||
else if (arg_bb != ENTRY_BLOCK_PTR
|
||||
else if (arg_bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& !bitmap_bit_p (visited_control_parents,
|
||||
arg_bb->index))
|
||||
mark_control_dependent_edges_necessary (arg_bb, true);
|
||||
|
@ -1076,7 +1076,7 @@ remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
|
|||
fake edges in the dominator tree. */
|
||||
if (e)
|
||||
;
|
||||
else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR)
|
||||
else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
e = EDGE_SUCC (bb, 0);
|
||||
else
|
||||
e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
|
||||
|
@ -1168,7 +1168,8 @@ eliminate_unnecessary_stmts (void)
|
|||
|
||||
as desired. */
|
||||
gcc_assert (dom_info_available_p (CDI_DOMINATORS));
|
||||
h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR));
|
||||
h = get_all_dominated_blocks (CDI_DOMINATORS,
|
||||
single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
|
||||
|
||||
while (h.length ())
|
||||
{
|
||||
|
@ -1265,7 +1266,8 @@ eliminate_unnecessary_stmts (void)
|
|||
find_unreachable_blocks ();
|
||||
|
||||
/* Delete all unreachable basic blocks in reverse dominator order. */
|
||||
for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb)
|
||||
for (bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
|
||||
bb != ENTRY_BLOCK_PTR_FOR_FN (cfun); bb = prev_bb)
|
||||
{
|
||||
prev_bb = bb->prev_bb;
|
||||
|
||||
|
|
|
@ -902,7 +902,7 @@ tree_ssa_dominator_optimize (void)
|
|||
while (single_succ_p (bb)
|
||||
&& (single_succ_edge (bb)->flags & EDGE_EH) == 0)
|
||||
bb = single_succ (bb);
|
||||
if (bb == EXIT_BLOCK_PTR)
|
||||
if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
if ((unsigned) bb->index != i)
|
||||
bitmap_set_bit (need_eh_cleanup, bb->index);
|
||||
|
@ -3054,7 +3054,8 @@ eliminate_degenerate_phis (void)
|
|||
phase in dominator order. Presumably this is because walking
|
||||
in dominator order leaves fewer PHIs for later examination
|
||||
by the worklist phase. */
|
||||
eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR, interesting_names);
|
||||
eliminate_degenerate_phis_1 (ENTRY_BLOCK_PTR_FOR_FN (cfun),
|
||||
interesting_names);
|
||||
|
||||
/* Second phase. Eliminate second order degenerate PHIs as well
|
||||
as trivial copies or constant initializations identified by
|
||||
|
|
|
@ -1009,7 +1009,7 @@ loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited,
|
|||
FOR_EACH_EDGE (e, ei, bb->preds)
|
||||
{
|
||||
pred_bb = e->src;
|
||||
if (pred_bb == ENTRY_BLOCK_PTR)
|
||||
if (pred_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
/* TMP is variables live-on-entry from BB that aren't defined in the
|
||||
predecessor block. This should be the live on entry vars to pred.
|
||||
|
@ -1087,7 +1087,7 @@ set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
|
|||
bitmap_set_bit (&live->liveout[def_bb->index], p);
|
||||
}
|
||||
else
|
||||
def_bb = ENTRY_BLOCK_PTR;
|
||||
def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
|
||||
/* Visit each use of SSA_NAME and if it isn't in the same block as the def,
|
||||
add it to the list of live on entry blocks. */
|
||||
|
@ -1103,7 +1103,7 @@ set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
|
|||
defined in that block, or whether its live on entry. */
|
||||
int index = PHI_ARG_INDEX_FROM_USE (use);
|
||||
edge e = gimple_phi_arg_edge (use_stmt, index);
|
||||
if (e->src != ENTRY_BLOCK_PTR)
|
||||
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
{
|
||||
if (e->src != def_bb)
|
||||
add_block = e->src;
|
||||
|
@ -1169,14 +1169,14 @@ calculate_live_on_exit (tree_live_info_p liveinfo)
|
|||
if (p == NO_PARTITION)
|
||||
continue;
|
||||
e = gimple_phi_arg_edge (phi, i);
|
||||
if (e->src != ENTRY_BLOCK_PTR)
|
||||
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
|
||||
bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
|
||||
}
|
||||
}
|
||||
|
||||
/* Add each successors live on entry to this bock live on exit. */
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR)
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
bitmap_ior_into (&liveinfo->liveout[bb->index],
|
||||
live_on_entry (liveinfo, e->dest));
|
||||
}
|
||||
|
@ -1369,12 +1369,12 @@ verify_live_on_entry (tree_live_info_p live)
|
|||
/* Check for live on entry partitions and report those with a DEF in
|
||||
the program. This will typically mean an optimization has done
|
||||
something wrong. */
|
||||
bb = ENTRY_BLOCK_PTR;
|
||||
bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
|
||||
num = 0;
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
{
|
||||
int entry_block = e->dest->index;
|
||||
if (e->dest == EXIT_BLOCK_PTR)
|
||||
if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
|
||||
continue;
|
||||
for (i = 0; i < (unsigned)num_var_partitions (map); i++)
|
||||
{
|
||||
|
|
|
@ -273,8 +273,8 @@ static inline bitmap
|
|||
live_on_entry (tree_live_info_p live, basic_block bb)
|
||||
{
|
||||
gcc_checking_assert (live->livein
|
||||
&& bb != ENTRY_BLOCK_PTR
|
||||
&& bb != EXIT_BLOCK_PTR);
|
||||
&& bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
return &live->livein[bb->index];
|
||||
}
|
||||
|
@ -287,8 +287,8 @@ static inline bitmap
|
|||
live_on_exit (tree_live_info_p live, basic_block bb)
|
||||
{
|
||||
gcc_checking_assert (live->liveout
|
||||
&& bb != ENTRY_BLOCK_PTR
|
||||
&& bb != EXIT_BLOCK_PTR);
|
||||
&& bb != ENTRY_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
return &live->liveout[bb->index];
|
||||
}
|
||||
|
|
|
@ -2007,7 +2007,7 @@ find_interesting_uses (struct ivopts_data *data)
|
|||
bb = body[i];
|
||||
|
||||
FOR_EACH_EDGE (e, ei, bb->succs)
|
||||
if (e->dest != EXIT_BLOCK_PTR
|
||||
if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
|
||||
&& !flow_bb_inside_loop_p (data->current_loop, e->dest))
|
||||
find_interesting_uses_outside (data, e);
|
||||
|
||||
|
|
|
@ -231,7 +231,7 @@ compute_live_loop_exits (bitmap live_exits, bitmap use_blocks,
|
|||
bool pred_visited;
|
||||
|
||||
/* We should have met DEF_BB along the way. */
|
||||
gcc_assert (pred != ENTRY_BLOCK_PTR);
|
||||
gcc_assert (pred != ENTRY_BLOCK_PTR_FOR_FN (cfun));
|
||||
|
||||
if (pred_loop_depth >= def_loop_depth)
|
||||
{
|
||||
|
|
|
@ -496,7 +496,7 @@ bound_difference (struct loop *loop, tree x, tree y, bounds *bnds)
|
|||
/* Now walk the dominators of the loop header and use the entry
|
||||
guards to refine the estimates. */
|
||||
for (bb = loop->header;
|
||||
bb != ENTRY_BLOCK_PTR && cnt < MAX_DOMINATORS_TO_WALK;
|
||||
bb != ENTRY_BLOCK_PTR_FOR_FN (cfun) && cnt < MAX_DOMINATORS_TO_WALK;
|
||||
bb = get_immediate_dominator (CDI_DOMINATORS, bb))
|
||||
{
|
||||
if (!single_pred_p (bb))
|
||||
|
@ -1781,7 +1781,7 @@ simplify_using_initial_conditions (struct loop *loop, tree expr)
|
|||
the number of BBs times the number of loops in degenerate
|
||||
cases. */
|
||||
for (bb = loop->header;
|
||||
bb != ENTRY_BLOCK_PTR && cnt < MAX_DOMINATORS_TO_WALK;
|
||||
bb != ENTRY_BLOCK_PTR_FOR_FN (cfun) && cnt < MAX_DOMINATORS_TO_WALK;
|
||||
bb = get_immediate_dominator (CDI_DOMINATORS, bb))
|
||||
{
|
||||
if (!single_pred_p (bb))
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue