alias.c (nonlocal_mentioned_p, [...]): Use, LABEL_P, JUMP_P, CALL_P, NONJUMP_INSN_P, INSN_P, NOTE_P, BARRIER_P.

2004-07-08  Jerry Quinn  <jlquinn@optonline.net>

	* alias.c (nonlocal_mentioned_p, nonlocal_referenced_p,
	nonlocal_set_p, init_alias_analysis): Use, LABEL_P, JUMP_P, CALL_P,
	NONJUMP_INSN_P, INSN_P, NOTE_P, BARRIER_P.
	* bb-reorder.c (mark_bb_for_unlikely_executed_section,
	add_labels_and_missing_jumps, find_jump_block,
	fix_crossing_unconditional_branches, add_reg_crossing_jump_notes):
	Likewise.
	* bt-load.c (btr_referenced_p, compute_defs_uses_and_gen,
	link_btr_uses, move_btr_def): Likewise.
	* builtins.c (expand_builtin_longjmp, expand_builtin_nonlocal_goto,
	expand_builtin_expect_jump): Likewise.
	* caller-save.c (save_call_clobbered_regs, insert_one_insn): Likewise.
	* calls.c (expand_call, emit_library_call_value_1): Likewise.
	* cfganal.c (forwarder_block_p): Likewise.
	* cfgbuild.c (inside_basic_block_p, count_basic_blocks,
	make_label_edge, rtl_make_eh_edge, make_edges, find_basic_blocks_1,
	find_bb_boundaries): Likewise.
	* cfgcleanup.c (try_simplify_condjump, try_forward_edges,
	merge_blocks_move_predecessor_nojumps,
	merge_blocks_move_successor_nojumps, insns_match_p,
	flow_find_cross_jump, outgoing_edges_match, try_crossjump_to_edge,
	try_optimize_cfg): Likewise.
	* cfgexpand.c (expand_block, construct_exit_block): Likewise.
	* cfglayout.c (skip_insns_after_block, label_for_bb,
	record_effective_endpoints, insn_locators_initialize,
	fixup_reorder_chain, update_unlikely_executed_notes): Likewise.
	* cfgmainloop.c (create_loop_notes): Likewise.
	* cfgrtl.c (delete_insn, delete_insn_chain,
	create_basic_block_structure, rtl_delete_block, free_bb_for_insn,
	update_bb_for_insn, rtl_merge_blocks, rtl_can_merge_blocks,
	block_label, try_redirect_by_replacing_jump, last_loop_beg_note,
	redirect_branch_edge, force_nonfallthru_and_redirect,
	rtl_tidy_fallthru_edge, back_edge_of_syntactic_loop_p,
	rtl_split_edge, commit_one_edge_insertion, print_rtl_with_bb,
	update_br_prob_note, rtl_verify_flow_info_1, rtl_verify_flow_info,
	purge_dead_edges, cfg_layout_redirect_edge_and_branch,
	cfg_layout_delete_block, cfg_layout_can_merge_blocks_p,
	cfg_layout_merge_blocks, rtl_block_ends_with_call_p,
	need_fake_edge_p, rtl_flow_call_edges_add): Likewise.
	* combine.c (combine_instructions, can_combine_p, try_combine,
	find_split_point, record_dead_and_set_regs, reg_dead_at_p,
	distribute_notes, distribute_links, insn_cuid): Likewise.
	* cse.c (fold_rtx, cse_insn, cse_around_loop,
	invalidate_skipped_block, cse_set_around_loop,
	cse_end_of_basic_block, cse_main, cse_basic_block,
	cse_condition_code_reg): Likewise.
	* cselib.c (cselib_process_insn): Likewise.
	* ddg.c (create_ddg): Likewise.
	* df.c (df_insn_refs_record, df_bb_rd_local_compute, df_insns_modify):
	Likewise.
	* dwarf2out.c (dwarf2out_stack_adjust, dwarf2out_frame_debug,
	gen_label_die, dwarf2out_var_location): Likewise.
	* emit-rtl.c (get_first_nonnote_insn, get_last_nonnote_insn,
	next_insn, previous_insn, next_nonnote_insn, prev_nonnote_insn,
	last_call_insn, active_insn_p, next_label, prev_label,
	link_cc0_insns, next_cc0_user, try_split, add_insn_after,
	add_insn_before, remove_insn, add_function_usage_to,
	reorder_insns, find_line_note, remove_unnecessary_notes,
	emit_insn_after_1, classify_insn): Likewise.
	* except.c (convert_from_eh_region_ranges_1, emit_to_new_bb_before,
	connect_post_landing_pads, sjlj_mark_call_sites,
	sjlj_emit_function_enter, sjlj_emit_function_exit, reachable_handlers,
	can_throw_internal, can_throw_external, set_nothrow_function_flags,
	convert_to_eh_region_ranges): Likewise.
	* explow.c (optimize_save_area_alloca): Likewise.
	* expr.c (expand_expr_real): Likewise.
	* final.c (insn_current_reference_address, compute_alignments,
	shorten_branches, final, scan_ahead_for_unlikely_executed_note,
	final_scan_insn, output_asm_label, leaf_function_p): Likewise.
	* flow.c (first_insn_after_basic_block_note, delete_dead_jumptables,
	propagate_block_delete_insn, propagate_one_insn,
	init_propagate_block_info, propagate_block, libcall_dead_p,
	mark_set_1, attempt_auto_inc, find_auto_inc, try_pre_increment):
	Likewise.
	* function.c (instantiate_virtual_regs,	reorder_blocks_1,
	expand_function_start, expand_function_end, contains,
	thread_prologue_and_epilogue_insns,
	reposition_prologue_and_epilogue_notes): Likewise.
	* gcse.c (constprop_register, bypass_conditional_jumps,
	insert_insn_end_bb, gcse_after_reload): Likewise.
	* genemit.c (gen_expand, gen_split): Likewise.
	* genpeep.c (gen_peephole, main): Likewise.
	* global.c (build_insn_chain): Likewise.
	* graph.c (node_data, print_rtl_graph_with_bb): Likewise.
	* haifa-sched.c (unlink_other_notes, unlink_line_notes,
	get_block_head_tail, no_real_insns_p, rm_line_notes, save_line_notes,
	restore_line_notes, rm_redundant_line_notes, rm_other_notes,
	ok_for_early_queue_removal, set_priorities, sched_init): Likewise.
	* ifcvt.c (count_bb_insns, first_active_insn, last_active_insn,
	cond_exec_process_insns, end_ifcvt_sequence, noce_process_if_block,
	merge_if_block, block_jumps_and_fallthru_p, find_if_block,
	dead_or_predicable): Likewise.
	* integrate.c (try_constants): Likewise.
	* jump.c (rebuild_jump_labels, cleanup_barriers,
	purge_line_number_notes, init_label_info, mark_all_labels,
	squeeze_notes, get_label_before, get_label_after,
	reversed_comparison_code_parts, simplejump_p, pc_set,
	returnjump_p, onlyjump_p, follow_jumps, mark_jump_label,
	delete_barrier, delete_prior_computation, delete_computation,
	delete_related_insns, delete_for_peephole, redirect_jump):
	Likewise.
	* lcm.c (optimize_mode_switching): Likewise.
	* local-alloc.c (validate_equiv_mem, update_equiv_regs, block_alloc):
	Likewise.
	* loop-doloop.c (doloop_valid_p, doloop_optimize): Likewise.
	* loop-invariant.c (find_exits, find_invariants_bb): Likewise.
	* loop-iv.c (simplify_using_assignment): Likewise.
	* loop.c (compute_luids, loop_optimize, scan_loop, libcall_other_reg,
	libcall_benefit, skip_consec_insns, move_movables, prescan_loop,
	find_and_verify_loops, labels_in_range_p, for_each_insn_in_loop,
	loop_bivs_init_find, strength_reduce, check_insn_for_bivs,
	check_insn_for_givs, check_final_value, update_giv_derive,
	basic_induction_var, product_cheap_p, check_dbra_loop,
	loop_insn_first_p, last_use_this_basic_block,
	canonicalize_condition, get_condition, loop_regs_scan, load_mems,
	try_copy_prop, LOOP_BLOCK_NUM, loop_dump_aux): Likewise.
	* modulo-sched.c (doloop_register_get, find_line_note, sms_schedule,
	sms_schedule_by_order): Likewise.
	* optabs.c (emit_no_conflict_block, emit_libcall_block): Likewise.
	* postreload.c (reload_cse_simplify_operands, reload_combine,
	reload_cse_move2add): Likewise.
	* predict.c (can_predict_insn_p, estimate_probability,
	expected_value_to_br_prob, process_note_predictions): Likewise.
	* print-rtl.c (print_rtx, print_rtl, print_rtl_single): Likewise.
	* profile.c (branch_prob): Likewise.
	* ra-build.c (live_out_1, livethrough_conflicts_bb,
	detect_webs_set_in_cond_jump): Likewise.
	* ra-debug.c (ra_print_rtx_object, ra_debug_insns,
	ra_print_rtl_with_bb): Likewise.
	* ra-rewrite.c (insert_stores, rewrite_program2): Likewise.
	* recog.c (next_insn_tests_no_inequality, find_single_use,
	split_all_insns, peephole2_optimize, if_test_bypass_p): Likewise.
	* reg-stack.c (next_flags_user, record_label_references,
	emit_swap_insn, swap_rtx_condition, subst_stack_regs,
	compensate_edge, convert_regs_1): Likewise.
	* regclass.c (scan_one_insn): Likewise.
	* regmove.c (optimize_reg_copy_1, optimize_reg_copy_2, fixup_match_2,
	regmove_optimize, fixup_match_1, single_set_for_csa,
	combine_stack_adjustments_for_block): Likewise.
	* regrename.c (build_def_use, copyprop_hardreg_forward_1): Likewise.
	* reload.c (find_reloads, find_reloads_address_1, subst_reloads,
	find_equiv_reg): Likewise.
	* reload1.c (reload, calculate_needs_all_insns, set_label_offsets,
	reload_as_needed, emit_input_reload_insns, do_output_reload,
	delete_output_reload, delete_address_reloads_1, fixup_abnormal_edges):
	Likewise.
	* reorg.c (find_end_label, emit_delay_sequence,
	delete_from_delay_slot, delete_scheduled_jump, optimize_skip,
	get_jump_flags, rare_destination, mostly_true_jump,
	try_merge_delay_insns, redundant_insn, own_thread_p,
	fill_simple_delay_slots, fill_slots_from_thread,
	fill_eager_delay_slots, relax_delay_slots, make_return_insns,
	dbr_schedule): Likewise.
	* resource.c (find_basic_block, next_insn_no_annul,
	find_dead_or_set_registers, mark_target_live_regs): Likewise.
	* rtl.h (RTX_PREV): Likewise.
	* rtlanal.c (global_reg_mentioned_p, no_labels_between_p,
	no_jumps_between_p, reg_used_between_p, reg_referenced_between_p,
	reg_set_p, find_last_value, dead_or_set_regno_p, find_reg_fusage,
	find_regno_fusage, pure_call_p, replace_label, rtx_referenced_p_1,
	tablejump_p, computed_jump_p, insns_safe_to_move_p,
	find_first_parameter_load, can_hoist_insn_p): Likewise.
	* sched-deps.c (get_condition, add_dependence, sched_analyze_2,
	sched_analyze_insn, sched_analyze, add_forward_dependence): Likewise.
	* sched-ebb.c (fix_basic_block_boundaries, add_deps_for_risky_insns,
	schedule_ebbs): Likewise.
	* sched-rgn.c (is_cfg_nonregular, find_conditional_protection,
	is_conditionally_protected, can_schedule_ready_p,
	add_branch_dependences, debug_dependencies): Likewise.
	* stmt.c (emit_nop, expand_start_case, emit_jump_if_reachable):
	Likewise.
	* unroll.c (unroll_loop, copy_loop_body, back_branch_in_range_p,
	reg_dead_after_loop, loop_find_equiv_value, loop_iterations,
	set_dominates_use, ujump_to_loop_cont): Likewise.
	* var-tracking.c (prologue_stack_adjust, vt_initialize): Likewise.
	* varasm.c (output_constant_pool_1): Likewise.

From-SVN: r84341
This commit is contained in:
Jerry Quinn 2004-07-09 03:29:35 +00:00 committed by Jerry Quinn
parent e9eb809dec
commit 4b4bf9414f
69 changed files with 1133 additions and 970 deletions

View File

@ -1,3 +1,182 @@
2004-07-08 Jerry Quinn <jlquinn@optonline.net>
* alias.c (nonlocal_mentioned_p, nonlocal_referenced_p,
nonlocal_set_p, init_alias_analysis): Use, LABEL_P, JUMP_P, CALL_P,
NONJUMP_INSN_P, INSN_P, NOTE_P, BARRIER_P.
* bb-reorder.c (mark_bb_for_unlikely_executed_section,
add_labels_and_missing_jumps, find_jump_block,
fix_crossing_unconditional_branches, add_reg_crossing_jump_notes):
Likewise.
* bt-load.c (btr_referenced_p, compute_defs_uses_and_gen,
link_btr_uses, move_btr_def): Likewise.
* builtins.c (expand_builtin_longjmp, expand_builtin_nonlocal_goto,
expand_builtin_expect_jump): Likewise.
* caller-save.c (save_call_clobbered_regs, insert_one_insn): Likewise.
* calls.c (expand_call, emit_library_call_value_1): Likewise.
* cfganal.c (forwarder_block_p): Likewise.
* cfgbuild.c (inside_basic_block_p, count_basic_blocks,
make_label_edge, rtl_make_eh_edge, make_edges, find_basic_blocks_1,
find_bb_boundaries): Likewise.
* cfgcleanup.c (try_simplify_condjump, try_forward_edges,
merge_blocks_move_predecessor_nojumps,
merge_blocks_move_successor_nojumps, insns_match_p,
flow_find_cross_jump, outgoing_edges_match, try_crossjump_to_edge,
try_optimize_cfg): Likewise.
* cfgexpand.c (expand_block, construct_exit_block): Likewise.
* cfglayout.c (skip_insns_after_block, label_for_bb,
record_effective_endpoints, insn_locators_initialize,
fixup_reorder_chain, update_unlikely_executed_notes): Likewise.
* cfgmainloop.c (create_loop_notes): Likewise.
* cfgrtl.c (delete_insn, delete_insn_chain,
create_basic_block_structure, rtl_delete_block, free_bb_for_insn,
update_bb_for_insn, rtl_merge_blocks, rtl_can_merge_blocks,
block_label, try_redirect_by_replacing_jump, last_loop_beg_note,
redirect_branch_edge, force_nonfallthru_and_redirect,
rtl_tidy_fallthru_edge, back_edge_of_syntactic_loop_p,
rtl_split_edge, commit_one_edge_insertion, print_rtl_with_bb,
update_br_prob_note, rtl_verify_flow_info_1, rtl_verify_flow_info,
purge_dead_edges, cfg_layout_redirect_edge_and_branch,
cfg_layout_delete_block, cfg_layout_can_merge_blocks_p,
cfg_layout_merge_blocks, rtl_block_ends_with_call_p,
need_fake_edge_p, rtl_flow_call_edges_add): Likewise.
* combine.c (combine_instructions, can_combine_p, try_combine,
find_split_point, record_dead_and_set_regs, reg_dead_at_p,
distribute_notes, distribute_links, insn_cuid): Likewise.
* cse.c (fold_rtx, cse_insn, cse_around_loop,
invalidate_skipped_block, cse_set_around_loop,
cse_end_of_basic_block, cse_main, cse_basic_block,
cse_condition_code_reg): Likewise.
* cselib.c (cselib_process_insn): Likewise.
* ddg.c (create_ddg): Likewise.
* df.c (df_insn_refs_record, df_bb_rd_local_compute, df_insns_modify):
Likewise.
* dwarf2out.c (dwarf2out_stack_adjust, dwarf2out_frame_debug,
gen_label_die, dwarf2out_var_location): Likewise.
* emit-rtl.c (get_first_nonnote_insn, get_last_nonnote_insn,
next_insn, previous_insn, next_nonnote_insn, prev_nonnote_insn,
last_call_insn, active_insn_p, next_label, prev_label,
link_cc0_insns, next_cc0_user, try_split, add_insn_after,
add_insn_before, remove_insn, add_function_usage_to,
reorder_insns, find_line_note, remove_unnecessary_notes,
emit_insn_after_1, classify_insn): Likewise.
* except.c (convert_from_eh_region_ranges_1, emit_to_new_bb_before,
connect_post_landing_pads, sjlj_mark_call_sites,
sjlj_emit_function_enter, sjlj_emit_function_exit, reachable_handlers,
can_throw_internal, can_throw_external, set_nothrow_function_flags,
convert_to_eh_region_ranges): Likewise.
* explow.c (optimize_save_area_alloca): Likewise.
* expr.c (expand_expr_real): Likewise.
* final.c (insn_current_reference_address, compute_alignments,
shorten_branches, final, scan_ahead_for_unlikely_executed_note,
final_scan_insn, output_asm_label, leaf_function_p): Likewise.
* flow.c (first_insn_after_basic_block_note, delete_dead_jumptables,
propagate_block_delete_insn, propagate_one_insn,
init_propagate_block_info, propagate_block, libcall_dead_p,
mark_set_1, attempt_auto_inc, find_auto_inc, try_pre_increment):
Likewise.
* function.c (instantiate_virtual_regs, reorder_blocks_1,
expand_function_start, expand_function_end, contains,
thread_prologue_and_epilogue_insns,
reposition_prologue_and_epilogue_notes): Likewise.
* gcse.c (constprop_register, bypass_conditional_jumps,
insert_insn_end_bb, gcse_after_reload): Likewise.
* genemit.c (gen_expand, gen_split): Likewise.
* genpeep.c (gen_peephole, main): Likewise.
* global.c (build_insn_chain): Likewise.
* graph.c (node_data, print_rtl_graph_with_bb): Likewise.
* haifa-sched.c (unlink_other_notes, unlink_line_notes,
get_block_head_tail, no_real_insns_p, rm_line_notes, save_line_notes,
restore_line_notes, rm_redundant_line_notes, rm_other_notes,
ok_for_early_queue_removal, set_priorities, sched_init): Likewise.
* ifcvt.c (count_bb_insns, first_active_insn, last_active_insn,
cond_exec_process_insns, end_ifcvt_sequence, noce_process_if_block,
merge_if_block, block_jumps_and_fallthru_p, find_if_block,
dead_or_predicable): Likewise.
* integrate.c (try_constants): Likewise.
* jump.c (rebuild_jump_labels, cleanup_barriers,
purge_line_number_notes, init_label_info, mark_all_labels,
squeeze_notes, get_label_before, get_label_after,
reversed_comparison_code_parts, simplejump_p, pc_set,
returnjump_p, onlyjump_p, follow_jumps, mark_jump_label,
delete_barrier, delete_prior_computation, delete_computation,
delete_related_insns, delete_for_peephole, redirect_jump):
Likewise.
* lcm.c (optimize_mode_switching): Likewise.
* local-alloc.c (validate_equiv_mem, update_equiv_regs, block_alloc):
Likewise.
* loop-doloop.c (doloop_valid_p, doloop_optimize): Likewise.
* loop-invariant.c (find_exits, find_invariants_bb): Likewise.
* loop-iv.c (simplify_using_assignment): Likewise.
* loop.c (compute_luids, loop_optimize, scan_loop, libcall_other_reg,
libcall_benefit, skip_consec_insns, move_movables, prescan_loop,
find_and_verify_loops, labels_in_range_p, for_each_insn_in_loop,
loop_bivs_init_find, strength_reduce, check_insn_for_bivs,
check_insn_for_givs, check_final_value, update_giv_derive,
basic_induction_var, product_cheap_p, check_dbra_loop,
loop_insn_first_p, last_use_this_basic_block,
canonicalize_condition, get_condition, loop_regs_scan, load_mems,
try_copy_prop, LOOP_BLOCK_NUM, loop_dump_aux): Likewise.
* modulo-sched.c (doloop_register_get, find_line_note, sms_schedule,
sms_schedule_by_order): Likewise.
* optabs.c (emit_no_conflict_block, emit_libcall_block): Likewise.
* postreload.c (reload_cse_simplify_operands, reload_combine,
reload_cse_move2add): Likewise.
* predict.c (can_predict_insn_p, estimate_probability,
expected_value_to_br_prob, process_note_predictions): Likewise.
* print-rtl.c (print_rtx, print_rtl, print_rtl_single): Likewise.
* profile.c (branch_prob): Likewise.
* ra-build.c (live_out_1, livethrough_conflicts_bb,
detect_webs_set_in_cond_jump): Likewise.
* ra-debug.c (ra_print_rtx_object, ra_debug_insns,
ra_print_rtl_with_bb): Likewise.
* ra-rewrite.c (insert_stores, rewrite_program2): Likewise.
* recog.c (next_insn_tests_no_inequality, find_single_use,
split_all_insns, peephole2_optimize, if_test_bypass_p): Likewise.
* reg-stack.c (next_flags_user, record_label_references,
emit_swap_insn, swap_rtx_condition, subst_stack_regs,
compensate_edge, convert_regs_1): Likewise.
* regclass.c (scan_one_insn): Likewise.
* regmove.c (optimize_reg_copy_1, optimize_reg_copy_2, fixup_match_2,
regmove_optimize, fixup_match_1, single_set_for_csa,
combine_stack_adjustments_for_block): Likewise.
* regrename.c (build_def_use, copyprop_hardreg_forward_1): Likewise.
* reload.c (find_reloads, find_reloads_address_1, subst_reloads,
find_equiv_reg): Likewise.
* reload1.c (reload, calculate_needs_all_insns, set_label_offsets,
reload_as_needed, emit_input_reload_insns, do_output_reload,
delete_output_reload, delete_address_reloads_1, fixup_abnormal_edges):
Likewise.
* reorg.c (find_end_label, emit_delay_sequence,
delete_from_delay_slot, delete_scheduled_jump, optimize_skip,
get_jump_flags, rare_destination, mostly_true_jump,
try_merge_delay_insns, redundant_insn, own_thread_p,
fill_simple_delay_slots, fill_slots_from_thread,
fill_eager_delay_slots, relax_delay_slots, make_return_insns,
dbr_schedule): Likewise.
* resource.c (find_basic_block, next_insn_no_annul,
find_dead_or_set_registers, mark_target_live_regs): Likewise.
* rtl.h (RTX_PREV): Likewise.
* rtlanal.c (global_reg_mentioned_p, no_labels_between_p,
no_jumps_between_p, reg_used_between_p, reg_referenced_between_p,
reg_set_p, find_last_value, dead_or_set_regno_p, find_reg_fusage,
find_regno_fusage, pure_call_p, replace_label, rtx_referenced_p_1,
tablejump_p, computed_jump_p, insns_safe_to_move_p,
find_first_parameter_load, can_hoist_insn_p): Likewise.
* sched-deps.c (get_condition, add_dependence, sched_analyze_2,
sched_analyze_insn, sched_analyze, add_forward_dependence): Likewise.
* sched-ebb.c (fix_basic_block_boundaries, add_deps_for_risky_insns,
schedule_ebbs): Likewise.
* sched-rgn.c (is_cfg_nonregular, find_conditional_protection,
is_conditionally_protected, can_schedule_ready_p,
add_branch_dependences, debug_dependencies): Likewise.
* stmt.c (emit_nop, expand_start_case, emit_jump_if_reachable):
Likewise.
* unroll.c (unroll_loop, copy_loop_body, back_branch_in_range_p,
reg_dead_after_loop, loop_find_equiv_value, loop_iterations,
set_dominates_use, ujump_to_loop_cont): Likewise.
* var-tracking.c (prologue_stack_adjust, vt_initialize): Likewise.
* varasm.c (output_constant_pool_1): Likewise.
2004-07-08 Zdenek Dvorak <rakdver@atrey.karlin.mff.cuni.cz>
* tree-scalar-evolution.c: New file.

View File

@ -2495,7 +2495,7 @@ nonlocal_mentioned_p (rtx x)
{
if (INSN_P (x))
{
if (GET_CODE (x) == CALL_INSN)
if (CALL_P (x))
{
if (! CONST_OR_PURE_CALL_P (x))
return 1;
@ -2589,7 +2589,7 @@ nonlocal_referenced_p (rtx x)
{
if (INSN_P (x))
{
if (GET_CODE (x) == CALL_INSN)
if (CALL_P (x))
{
if (! CONST_OR_PURE_CALL_P (x))
return 1;
@ -2665,7 +2665,7 @@ nonlocal_set_p (rtx x)
{
if (INSN_P (x))
{
if (GET_CODE (x) == CALL_INSN)
if (CALL_P (x))
{
if (! CONST_OR_PURE_CALL_P (x))
return 1;
@ -2953,7 +2953,7 @@ init_alias_analysis (void)
}
}
}
else if (GET_CODE (insn) == NOTE
else if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
copying_arguments = false;
}

View File

@ -1306,8 +1306,8 @@ mark_bb_for_unlikely_executed_section (basic_block bb)
for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
cur_insn = NEXT_INSN (cur_insn))
if (GET_CODE (cur_insn) != NOTE
&& GET_CODE (cur_insn) != CODE_LABEL)
if (!NOTE_P (cur_insn)
&& !LABEL_P (cur_insn))
{
insert_insn = cur_insn;
break;
@ -1360,7 +1360,7 @@ add_labels_and_missing_jumps (edge *crossing_edges, int n_crossing_edges)
if (src && (src != ENTRY_BLOCK_PTR))
{
if (GET_CODE (BB_END (src)) != JUMP_INSN)
if (!JUMP_P (BB_END (src)))
/* bb just falls through. */
{
/* make sure there's only one successor */
@ -1556,14 +1556,14 @@ find_jump_block (basic_block jump_dest)
only one executable instruction, which is an unconditional jump.
If so, we can use it. */
if (GET_CODE (BB_HEAD (src)) == CODE_LABEL)
if (LABEL_P (BB_HEAD (src)))
for (insn = BB_HEAD (src);
!INSN_P (insn) && insn != NEXT_INSN (BB_END (src));
insn = NEXT_INSN (insn))
{
if (INSN_P (insn)
&& insn == BB_END (src)
&& GET_CODE (insn) == JUMP_INSN
&& JUMP_P (insn)
&& !any_condjump_p (insn))
{
source_bb = src;
@ -1763,7 +1763,7 @@ fix_crossing_unconditional_branches (void)
/* Check to see if bb ends in a crossing (unconditional) jump. At
this point, no crossing jumps should be conditional. */
if (GET_CODE (last_insn) == JUMP_INSN
if (JUMP_P (last_insn)
&& succ->crossing_edge)
{
rtx label2, table;
@ -1803,7 +1803,7 @@ fix_crossing_unconditional_branches (void)
cur_insn = NEXT_INSN (cur_insn))
{
BLOCK_FOR_INSN (cur_insn) = cur_bb;
if (GET_CODE (cur_insn) == JUMP_INSN)
if (JUMP_P (cur_insn))
jump_insn = cur_insn;
}
@ -1833,7 +1833,7 @@ add_reg_crossing_jump_notes (void)
FOR_EACH_BB (bb)
for (e = bb->succ; e; e = e->succ_next)
if (e->crossing_edge
&& GET_CODE (BB_END (e->src)) == JUMP_INSN)
&& JUMP_P (BB_END (e->src)))
REG_NOTES (BB_END (e->src)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
NULL_RTX,
REG_NOTES (BB_END

View File

@ -225,7 +225,7 @@ insn_sets_btr_p (rtx insn, int check_const, int *regno)
{
rtx set;
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& (set = single_set (insn)))
{
rtx dest = SET_DEST (set);
@ -533,7 +533,7 @@ compute_defs_uses_and_gen (fibheap_t all_btr_defs, btr_def *def_array,
user->next = info.users_this_bb;
info.users_this_bb = user;
}
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
HARD_REG_SET *clobbered = &call_used_reg_set;
HARD_REG_SET call_saved;
@ -580,7 +580,7 @@ compute_defs_uses_and_gen (fibheap_t all_btr_defs, btr_def *def_array,
IOR_HARD_REG_SET (btrs_live_at_end[i], tmp);
can_throw = 1;
}
if (can_throw || GET_CODE (insn) == JUMP_INSN)
if (can_throw || JUMP_P (insn))
{
int regno;
@ -740,7 +740,7 @@ link_btr_uses (btr_def *def_array, btr_user *use_array, sbitmap *bb_out,
sbitmap_free (reaching_defs_of_reg);
}
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
int regno;
@ -1135,7 +1135,7 @@ move_btr_def (basic_block new_def_bb, int btr, btr_def def, bitmap live_range,
combine_btr_defs (def, btrs_live_in_range);
btr = def->btr;
add_btr_to_live_range (def);
if (GET_CODE (insp) == CODE_LABEL)
if (LABEL_P (insp))
insp = NEXT_INSN (insp);
/* N.B.: insp is expected to be NOTE_INSN_BASIC_BLOCK now. Some
optimizations can result in insp being both first and last insn of
@ -1148,7 +1148,7 @@ move_btr_def (basic_block new_def_bb, int btr, btr_def def, bitmap live_range,
for (insp = BB_END (b); ! INSN_P (insp); insp = PREV_INSN (insp))
if (insp == BB_HEAD (b))
abort ();
if (GET_CODE (insp) == JUMP_INSN || can_throw_internal (insp))
if (JUMP_P (insp) || can_throw_internal (insp))
insp = PREV_INSN (insp);
}

View File

@ -766,13 +766,13 @@ expand_builtin_longjmp (rtx buf_addr, rtx value)
{
if (insn == last)
abort ();
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
{
REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
REG_NOTES (insn));
break;
}
else if (GET_CODE (insn) == CALL_INSN)
else if (CALL_P (insn))
break;
}
}
@ -837,13 +837,13 @@ expand_builtin_nonlocal_goto (tree arglist)
non-local goto. */
for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
{
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
{
REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
const0_rtx, REG_NOTES (insn));
break;
}
else if (GET_CODE (insn) == CALL_INSN)
else if (CALL_P (insn))
break;
}
@ -4981,9 +4981,9 @@ expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
ret = get_insns ();
drop_through_label = get_last_insn ();
if (drop_through_label && GET_CODE (drop_through_label) == NOTE)
if (drop_through_label && NOTE_P (drop_through_label))
drop_through_label = prev_nonnote_insn (drop_through_label);
if (drop_through_label && GET_CODE (drop_through_label) != CODE_LABEL)
if (drop_through_label && !LABEL_P (drop_through_label))
drop_through_label = NULL_RTX;
end_sequence ();
@ -4998,7 +4998,7 @@ expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
{
rtx next = NEXT_INSN (insn);
if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn))
if (JUMP_P (insn) && any_condjump_p (insn))
{
rtx ifelse = SET_SRC (pc_set (insn));
rtx then_dest = XEXP (ifelse, 1);
@ -5021,10 +5021,10 @@ expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
/* Otherwise check where we drop through. */
else if (else_dest == pc_rtx)
{
if (next && GET_CODE (next) == NOTE)
if (next && NOTE_P (next))
next = next_nonnote_insn (next);
if (next && GET_CODE (next) == JUMP_INSN
if (next && JUMP_P (next)
&& any_uncondjump_p (next))
temp = XEXP (SET_SRC (pc_set (next)), 0);
else
@ -5039,10 +5039,10 @@ expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
}
else if (then_dest == pc_rtx)
{
if (next && GET_CODE (next) == NOTE)
if (next && NOTE_P (next))
next = next_nonnote_insn (next);
if (next && GET_CODE (next) == JUMP_INSN
if (next && JUMP_P (next)
&& any_uncondjump_p (next))
temp = XEXP (SET_SRC (pc_set (next)), 0);
else

View File

@ -484,7 +484,7 @@ save_call_clobbered_regs (void)
if (n_regs_saved)
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
if (TEST_HARD_REG_BIT (hard_regs_saved, regno))
regno += insert_restore (chain, GET_CODE (insn) == JUMP_INSN,
regno += insert_restore (chain, JUMP_P (insn),
regno, MOVE_MAX_WORDS, save_mode);
}
}
@ -793,7 +793,7 @@ insert_one_insn (struct insn_chain *chain, int before_p, int code, rtx pat)
isn't a problem. We do, however, assume here that CALL_INSNs don't
reference CC0. Guard against non-INSN's like CODE_LABEL. */
if ((GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
if ((NONJUMP_INSN_P (insn) || JUMP_P (insn))
&& before_p
&& reg_referenced_p (cc0_rtx, PATTERN (insn)))
chain = chain->prev, insn = chain->insn;

View File

@ -2838,7 +2838,7 @@ expand_call (tree exp, rtx target, int ignore)
/* Expansion of block moves possibly introduced a loop that may
not appear inside libcall block. */
for (insn = insns; insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
failed = true;
if (failed)
@ -2923,7 +2923,7 @@ expand_call (tree exp, rtx target, int ignore)
than just a CALL_INSN above, so we must search for it here. */
rtx last = get_last_insn ();
while (GET_CODE (last) != CALL_INSN)
while (!CALL_P (last))
{
last = PREV_INSN (last);
/* There was no CALL_INSN? */
@ -3929,7 +3929,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
just a CALL_INSN above, so we must search for it here. */
rtx last = get_last_insn ();
while (GET_CODE (last) != CALL_INSN)
while (!CALL_P (last))
{
last = PREV_INSN (last);
/* There was no CALL_INSN? */

View File

@ -93,7 +93,7 @@ forwarder_block_p (basic_block bb)
return false;
return (!INSN_P (insn)
|| (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
|| (JUMP_P (insn) && simplejump_p (insn))
|| !flow_active_insn_p (insn));
}

View File

@ -66,7 +66,7 @@ inside_basic_block_p (rtx insn)
case CODE_LABEL:
/* Avoid creating of basic block for jumptables. */
return (NEXT_INSN (insn) == 0
|| GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
|| !JUMP_P (NEXT_INSN (insn))
|| (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC
&& GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC));
@ -148,7 +148,7 @@ count_basic_blocks (rtx f)
{
/* Code labels and barriers causes current basic block to be
terminated at previous real insn. */
if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
if ((LABEL_P (insn) || BARRIER_P (insn))
&& saw_insn)
count++, saw_insn = false;
@ -183,7 +183,7 @@ count_basic_blocks (rtx f)
static void
make_label_edge (sbitmap *edge_cache, basic_block src, rtx label, int flags)
{
if (GET_CODE (label) != CODE_LABEL)
if (!LABEL_P (label))
abort ();
/* If the label was never emitted, this insn is junk, but avoid a
@ -202,7 +202,7 @@ make_label_edge (sbitmap *edge_cache, basic_block src, rtx label, int flags)
void
rtl_make_eh_edge (sbitmap *edge_cache, basic_block src, rtx insn)
{
int is_call = GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0;
int is_call = CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0;
rtx handlers, i;
handlers = reachable_handlers (insn);
@ -269,7 +269,7 @@ make_edges (basic_block min, basic_block max, int update_p)
int force_fallthru = 0;
edge e;
if (GET_CODE (BB_HEAD (bb)) == CODE_LABEL
if (LABEL_P (BB_HEAD (bb))
&& LABEL_ALT_ENTRY_P (BB_HEAD (bb)))
cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
@ -394,7 +394,7 @@ make_edges (basic_block min, basic_block max, int update_p)
break;
}
while (insn
&& GET_CODE (insn) == NOTE
&& NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
insn = NEXT_INSN (insn);
@ -437,7 +437,7 @@ find_basic_blocks_1 (rtx f)
next = NEXT_INSN (insn);
if ((GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == BARRIER)
if ((LABEL_P (insn) || BARRIER_P (insn))
&& head)
{
prev = create_basic_block_structure (head, end, bb_note, prev);
@ -579,7 +579,7 @@ find_bb_boundaries (basic_block bb)
if (insn == BB_END (bb))
return;
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
insn = NEXT_INSN (insn);
/* Scan insn chain and try to find new basic block boundaries. */

View File

@ -194,7 +194,7 @@ try_simplify_condjump (basic_block cbranch_block)
insn = next)
{
next = NEXT_INSN (insn);
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
{
if (insn == BB_END (jump_block))
{
@ -535,16 +535,16 @@ try_forward_edges (int mode, basic_block b)
rtx insn = (target->succ->flags & EDGE_FALLTHRU
? BB_HEAD (target) : prev_nonnote_insn (BB_END (target)));
if (GET_CODE (insn) != NOTE)
if (!NOTE_P (insn))
insn = NEXT_INSN (insn);
for (; insn && GET_CODE (insn) != CODE_LABEL && !INSN_P (insn);
for (; insn && !LABEL_P (insn) && !INSN_P (insn);
insn = NEXT_INSN (insn))
if (GET_CODE (insn) == NOTE
if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
break;
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
break;
/* Do not clean up branches to just past the end of a loop
@ -552,7 +552,7 @@ try_forward_edges (int mode, basic_block b)
recognition of some patterns. */
insn = PREV_INSN (BB_HEAD (target));
if (insn && GET_CODE (insn) == NOTE
if (insn && NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
break;
}
@ -690,7 +690,7 @@ merge_blocks_move_predecessor_nojumps (basic_block a, basic_block b)
return;
barrier = next_nonnote_insn (BB_END (a));
if (GET_CODE (barrier) != BARRIER)
if (!BARRIER_P (barrier))
abort ();
delete_insn (barrier);
@ -753,7 +753,7 @@ merge_blocks_move_successor_nojumps (basic_block a, basic_block b)
/* There had better have been a barrier there. Delete it. */
barrier = NEXT_INSN (BB_END (b));
if (barrier && GET_CODE (barrier) == BARRIER)
if (barrier && BARRIER_P (barrier))
delete_insn (barrier);
/* Move block and loop notes out of the chain so that we do not
@ -1000,7 +1000,7 @@ insns_match_p (int mode ATTRIBUTE_UNUSED, rtx i1, rtx i2)
??? We take the simple route for now and assume that if they're
equal, they were constructed identically. */
if (GET_CODE (i1) == CALL_INSN
if (CALL_P (i1)
&& (!rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
CALL_INSN_FUNCTION_USAGE (i2))
|| SIBLING_CALL_P (i1) != SIBLING_CALL_P (i2)))
@ -1178,13 +1178,13 @@ flow_find_cross_jump (int mode ATTRIBUTE_UNUSED, basic_block bb1,
while (last1 != BB_HEAD (bb1) && !INSN_P (PREV_INSN (last1)))
last1 = PREV_INSN (last1);
if (last1 != BB_HEAD (bb1) && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
if (last1 != BB_HEAD (bb1) && LABEL_P (PREV_INSN (last1)))
last1 = PREV_INSN (last1);
while (last2 != BB_HEAD (bb2) && !INSN_P (PREV_INSN (last2)))
last2 = PREV_INSN (last2);
if (last2 != BB_HEAD (bb2) && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
if (last2 != BB_HEAD (bb2) && LABEL_P (PREV_INSN (last2)))
last2 = PREV_INSN (last2);
*f1 = last1;
@ -1211,10 +1211,10 @@ outgoing_edges_match (int mode, basic_block bb1, basic_block bb2)
unconditional jump, or a fake edge to exit. */
if (bb1->succ && !bb1->succ->succ_next
&& (bb1->succ->flags & (EDGE_COMPLEX | EDGE_FAKE)) == 0
&& (GET_CODE (BB_END (bb1)) != JUMP_INSN || simplejump_p (BB_END (bb1))))
&& (!JUMP_P (BB_END (bb1)) || simplejump_p (BB_END (bb1))))
return (bb2->succ && !bb2->succ->succ_next
&& (bb2->succ->flags & (EDGE_COMPLEX | EDGE_FAKE)) == 0
&& (GET_CODE (BB_END (bb2)) != JUMP_INSN || simplejump_p (BB_END (bb2))));
&& (!JUMP_P (BB_END (bb2)) || simplejump_p (BB_END (bb2))));
/* Match conditional jumps - this may get tricky when fallthru and branch
edges are crossed. */
@ -1642,10 +1642,10 @@ try_crossjump_to_edge (int mode, edge e1, edge e2)
/* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
/* Skip possible basic block header. */
if (GET_CODE (newpos1) == CODE_LABEL)
if (LABEL_P (newpos1))
newpos1 = NEXT_INSN (newpos1);
if (GET_CODE (newpos1) == NOTE)
if (NOTE_P (newpos1))
newpos1 = NEXT_INSN (newpos1);
redirect_from = split_block (src1, PREV_INSN (newpos1))->src;
@ -1835,7 +1835,7 @@ try_optimize_cfg (int mode)
if (b->pred->pred_next == NULL
&& (b->pred->flags & EDGE_FALLTHRU)
&& !(b->pred->flags & EDGE_COMPLEX)
&& GET_CODE (BB_HEAD (b)) == CODE_LABEL
&& LABEL_P (BB_HEAD (b))
/* If the previous block ends with a branch to this
block, we can't delete the label. Normally this
is a condjump that is yet to be simplified, but
@ -1843,7 +1843,7 @@ try_optimize_cfg (int mode)
some element going to the same place as the
default (fallthru). */
&& (b->pred->src == ENTRY_BLOCK_PTR
|| GET_CODE (BB_END (b->pred->src)) != JUMP_INSN
|| !JUMP_P (BB_END (b->pred->src))
|| ! label_is_jump_target_p (BB_HEAD (b),
BB_END (b->pred->src))))
{
@ -1868,7 +1868,7 @@ try_optimize_cfg (int mode)
if (!(mode & CLEANUP_CFGLAYOUT)
&& b->pred->pred_next == NULL
&& (b->pred->flags & EDGE_FALLTHRU)
&& GET_CODE (BB_HEAD (b)) != CODE_LABEL
&& !LABEL_P (BB_HEAD (b))
&& FORWARDER_BLOCK_P (b)
/* Note that forwarder_block_p true ensures that
there is a successor for this block. */
@ -1910,7 +1910,7 @@ try_optimize_cfg (int mode)
else if (!(mode & CLEANUP_CFGLAYOUT)
/* If the jump insn has side effects,
we can't kill the edge. */
&& (GET_CODE (BB_END (b)) != JUMP_INSN
&& (!JUMP_P (BB_END (b))
|| (reload_completed
? simplejump_p (BB_END (b))
: (onlyjump_p (BB_END (b))

View File

@ -64,7 +64,7 @@ expand_block (basic_block bb, FILE * dump_file)
/* Java emits line number notes in the top of labels.
??? Make this go away once line number notes are obsoleted. */
BB_HEAD (bb) = NEXT_INSN (last);
if (GET_CODE (BB_HEAD (bb)) == NOTE)
if (NOTE_P (BB_HEAD (bb)))
BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
bsi_next (&bsi);
note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
@ -150,7 +150,7 @@ expand_block (basic_block bb, FILE * dump_file)
expand_expr (else_exp, const0_rtx, VOIDmode, 0);
BB_END (bb) = last;
if (GET_CODE (BB_END (bb)) == BARRIER)
if (BARRIER_P (BB_END (bb)))
BB_END (bb) = PREV_INSN (BB_END (bb));
update_bb_for_insn (bb);
@ -163,7 +163,7 @@ expand_block (basic_block bb, FILE * dump_file)
new_edge = make_edge (new_bb, dest, 0);
new_edge->probability = REG_BR_PROB_BASE;
new_edge->count = new_bb->count;
if (GET_CODE (BB_END (new_bb)) == BARRIER)
if (BARRIER_P (BB_END (new_bb)))
BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
update_bb_for_insn (new_bb);
@ -182,7 +182,7 @@ expand_block (basic_block bb, FILE * dump_file)
expand_expr_stmt (stmt);
for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
{
if (GET_CODE (last) == CALL_INSN && SIBLING_CALL_P (last))
if (CALL_P (last) && SIBLING_CALL_P (last))
{
edge e;
int probability = 0;
@ -217,13 +217,13 @@ expand_block (basic_block bb, FILE * dump_file)
after the sibcall (to perform the function return). These confuse the
find_sub_basic_blocks code, so we need to get rid of these. */
last = NEXT_INSN (last);
if (GET_CODE (last) != BARRIER)
if (!BARRIER_P (last))
abort ();
while (NEXT_INSN (last))
{
/* For instance an sqrt builtin expander expands if with
sibcall in the then and label for `else`. */
if (GET_CODE (NEXT_INSN (last)) == CODE_LABEL)
if (LABEL_P (NEXT_INSN (last)))
break;
delete_insn (NEXT_INSN (last));
}
@ -252,7 +252,7 @@ expand_block (basic_block bb, FILE * dump_file)
/* Find the the block tail. The last insn is the block is the insn
before a barrier and/or table jump insn. */
last = get_last_insn ();
if (GET_CODE (last) == BARRIER)
if (BARRIER_P (last))
last = PREV_INSN (last);
if (JUMP_TABLE_DATA_P (last))
last = PREV_INSN (PREV_INSN (last));
@ -330,7 +330,7 @@ construct_exit_block (void)
end = get_last_insn ();
if (head == end)
return;
while (NEXT_INSN (head) && GET_CODE (NEXT_INSN (head)) == NOTE)
while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
head = NEXT_INSN (head);
exit_block = create_basic_block (NEXT_INSN (head), end, EXIT_BLOCK_PTR->prev_bb);
exit_block->frequency = EXIT_BLOCK_PTR->frequency;

View File

@ -119,7 +119,7 @@ skip_insns_after_block (basic_block bb)
case CODE_LABEL:
if (NEXT_INSN (insn)
&& GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
&& JUMP_P (NEXT_INSN (insn))
&& (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
|| GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
{
@ -149,7 +149,7 @@ skip_insns_after_block (basic_block bb)
for (insn = last_insn; insn != BB_END (bb); insn = prev)
{
prev = PREV_INSN (insn);
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
switch (NOTE_LINE_NUMBER (insn))
{
case NOTE_INSN_LOOP_END:
@ -172,7 +172,7 @@ label_for_bb (basic_block bb)
{
rtx label = BB_HEAD (bb);
if (GET_CODE (label) != CODE_LABEL)
if (!LABEL_P (label))
{
if (dump_file)
fprintf (dump_file, "Emitting label for block %d\n", bb->index);
@ -195,7 +195,7 @@ record_effective_endpoints (void)
for (insn = get_insns ();
insn
&& GET_CODE (insn) == NOTE
&& NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
insn = NEXT_INSN (insn))
continue;
@ -303,7 +303,7 @@ insn_locators_initialize (void)
epilogue_locator = loc;
if (active_insn_p (insn))
INSN_LOCATOR (insn) = loc;
else if (GET_CODE (insn) == NOTE)
else if (NOTE_P (insn))
{
switch (NOTE_LINE_NUMBER (insn))
{
@ -649,7 +649,7 @@ fixup_reorder_chain (void)
e_taken = e;
bb_end_insn = BB_END (bb);
if (GET_CODE (bb_end_insn) == JUMP_INSN)
if (JUMP_P (bb_end_insn))
{
if (any_condjump_p (bb_end_insn))
{
@ -806,7 +806,7 @@ fixup_reorder_chain (void)
note);
NOTE_BASIC_BLOCK (new_note) = bb;
}
if (GET_CODE (BB_END (bb)) == JUMP_INSN
if (JUMP_P (BB_END (bb))
&& !any_condjump_p (BB_END (bb))
&& bb->succ->crossing_edge )
REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
@ -829,7 +829,7 @@ fixup_reorder_chain (void)
fprintf (dump_file, "duplicate of %i ",
bb->rbi->original->index);
else if (forwarder_block_p (bb)
&& GET_CODE (BB_HEAD (bb)) != CODE_LABEL)
&& !LABEL_P (BB_HEAD (bb)))
fprintf (dump_file, "compensation ");
else
fprintf (dump_file, "bb %i ", bb->index);
@ -875,7 +875,7 @@ update_unlikely_executed_notes (basic_block bb)
for (cur_insn = BB_HEAD (bb); cur_insn != BB_END (bb);
cur_insn = NEXT_INSN (cur_insn))
if (GET_CODE (cur_insn) == NOTE
if (NOTE_P (cur_insn)
&& NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
NOTE_BASIC_BLOCK (cur_insn) = bb;
}

View File

@ -1256,7 +1256,7 @@ create_loop_notes (void)
#ifdef ENABLE_CHECKING
/* Verify that there really are no loop notes. */
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == NOTE
if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
abort ();
#endif
@ -1297,11 +1297,11 @@ create_loop_notes (void)
front of the jump. */
insn = PREV_INSN (BB_HEAD (first[loop->num]));
if (insn
&& GET_CODE (insn) == BARRIER)
&& BARRIER_P (insn))
insn = PREV_INSN (insn);
if (insn
&& GET_CODE (insn) == JUMP_INSN
&& JUMP_P (insn)
&& any_uncondjump_p (insn)
&& onlyjump_p (insn))
{
@ -1322,7 +1322,7 @@ create_loop_notes (void)
/* Position the note correctly wrto barrier. */
insn = BB_END (last[loop->num]);
if (NEXT_INSN (insn)
&& GET_CODE (NEXT_INSN (insn)) == BARRIER)
&& BARRIER_P (NEXT_INSN (insn)))
insn = NEXT_INSN (insn);
end = BB_END (last[loop->num]);

View File

@ -118,7 +118,7 @@ delete_insn (rtx insn)
rtx note;
bool really_delete = true;
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
{
/* Some labels can't be directly removed from the INSN chain, as they
might be references via variables, constant pool etc.
@ -147,23 +147,23 @@ delete_insn (rtx insn)
/* If deleting a jump, decrement the use count of the label. Deleting
the label itself should happen in the normal course of block merging. */
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& JUMP_LABEL (insn)
&& GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
&& LABEL_P (JUMP_LABEL (insn)))
LABEL_NUSES (JUMP_LABEL (insn))--;
/* Also if deleting an insn that references a label. */
else
{
while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
&& GET_CODE (XEXP (note, 0)) == CODE_LABEL)
&& LABEL_P (XEXP (note, 0)))
{
LABEL_NUSES (XEXP (note, 0))--;
remove_note (insn, note);
}
}
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
{
@ -179,7 +179,7 @@ delete_insn (rtx insn)
/* When deleting code in bulk (e.g. removing many unreachable
blocks) we can delete a label that's a target of the vector
before deleting the vector itself. */
if (GET_CODE (label) != NOTE)
if (!NOTE_P (label))
LABEL_NUSES (label)--;
}
}
@ -218,7 +218,7 @@ delete_insn_chain (rtx start, rtx finish)
while (1)
{
next = NEXT_INSN (start);
if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
if (NOTE_P (start) && !can_delete_note_p (start))
;
else
next = delete_insn (start);
@ -265,7 +265,7 @@ create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
rtx after;
if (GET_CODE (head) == CODE_LABEL)
if (LABEL_P (head))
after = head;
else
{
@ -285,7 +285,7 @@ create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
if (!head && !end)
head = end = bb_note
= emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
else if (GET_CODE (head) == CODE_LABEL && end)
else if (LABEL_P (head) && end)
{
bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
if (head == end)
@ -381,7 +381,7 @@ rtl_delete_block (basic_block b)
for (insn = PREV_INSN (BB_HEAD (b)); insn; insn = PREV_INSN (insn))
{
if (GET_CODE (insn) != NOTE)
if (!NOTE_P (insn))
break;
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
@ -390,7 +390,7 @@ rtl_delete_block (basic_block b)
insn = BB_HEAD (b);
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
maybe_remove_eh_handler (insn);
/* Include any jump table following the basic block. */
@ -400,7 +400,7 @@ rtl_delete_block (basic_block b)
/* Include any barrier that may follow the basic block. */
tmp = next_nonnote_insn (end);
if (tmp && GET_CODE (tmp) == BARRIER)
if (tmp && BARRIER_P (tmp))
end = tmp;
/* Selectively delete the entire chain. */
@ -436,7 +436,7 @@ free_bb_for_insn (void)
{
rtx insn;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) != BARRIER)
if (!BARRIER_P (insn))
BLOCK_FOR_INSN (insn) = NULL;
}
@ -456,7 +456,7 @@ update_bb_for_insn (basic_block bb)
for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) != BARRIER)
if (!BARRIER_P (insn))
set_block_for_insn (insn, bb);
if (insn == BB_END (bb))
break;
@ -539,7 +539,7 @@ rtl_merge_blocks (basic_block a, basic_block b)
int b_empty = 0;
/* If there was a CODE_LABEL beginning B, delete it. */
if (GET_CODE (b_head) == CODE_LABEL)
if (LABEL_P (b_head))
{
/* Detect basic blocks with nothing but a label. This can happen
in particular at the end of a function. */
@ -564,12 +564,12 @@ rtl_merge_blocks (basic_block a, basic_block b)
}
/* If there was a jump out of A, delete it. */
if (GET_CODE (a_end) == JUMP_INSN)
if (JUMP_P (a_end))
{
rtx prev;
for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
if (GET_CODE (prev) != NOTE
if (!NOTE_P (prev)
|| NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
|| prev == BB_HEAD (a))
break;
@ -592,7 +592,7 @@ rtl_merge_blocks (basic_block a, basic_block b)
a_end = PREV_INSN (del_first);
}
else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
else if (BARRIER_P (NEXT_INSN (a_end)))
del_first = NEXT_INSN (a_end);
/* Delete everything marked above as well as crap that might be
@ -642,7 +642,7 @@ rtl_can_merge_blocks (basic_block a,basic_block b)
&& a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
/* If the jump insn has side effects,
we can't kill the edge. */
&& (GET_CODE (BB_END (a)) != JUMP_INSN
&& (!JUMP_P (BB_END (a))
|| (reload_completed
? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
}
@ -656,7 +656,7 @@ block_label (basic_block block)
if (block == EXIT_BLOCK_PTR)
return NULL_RTX;
if (GET_CODE (BB_HEAD (block)) != CODE_LABEL)
if (!LABEL_P (BB_HEAD (block)))
{
BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
}
@ -727,7 +727,7 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
/* Remove barriers but keep jumptables. */
while (insn)
{
if (GET_CODE (insn) == BARRIER)
if (BARRIER_P (insn))
{
if (PREV_INSN (insn))
NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
@ -736,7 +736,7 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
if (NEXT_INSN (insn))
PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
}
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
break;
insn = NEXT_INSN (insn);
}
@ -788,7 +788,7 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
delete_insn_chain (label, table);
barrier = next_nonnote_insn (BB_END (src));
if (!barrier || GET_CODE (barrier) != BARRIER)
if (!barrier || !BARRIER_P (barrier))
emit_barrier_after (BB_END (src));
else
{
@ -830,7 +830,7 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
/* We don't want a block to end on a line-number note since that has
the potential of changing the code between -g and not -g. */
while (GET_CODE (BB_END (e->src)) == NOTE
while (NOTE_P (BB_END (e->src))
&& NOTE_LINE_NUMBER (BB_END (e->src)) >= 0)
delete_insn (BB_END (e->src));
@ -853,7 +853,7 @@ last_loop_beg_note (rtx insn)
{
rtx last = insn;
for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
for (insn = NEXT_INSN (insn); insn && NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
insn = NEXT_INSN (insn))
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
@ -875,7 +875,7 @@ redirect_branch_edge (edge e, basic_block target)
/* We can only redirect non-fallthru edges of jump insn. */
if (e->flags & EDGE_FALLTHRU)
return NULL;
else if (GET_CODE (insn) != JUMP_INSN)
else if (!JUMP_P (insn))
return NULL;
/* Recognize a tablejump and adjust all matching cases. */
@ -1103,7 +1103,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
for (bb_note = BB_HEAD (jump_block);
bb_note && bb_note != NEXT_INSN (BB_END (jump_block));
bb_note = NEXT_INSN (bb_note))
if (GET_CODE (bb_note) == NOTE
if (NOTE_P (bb_note)
&& NOTE_LINE_NUMBER (bb_note) == NOTE_INSN_BASIC_BLOCK)
break;
new_note = emit_note_after (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
@ -1111,7 +1111,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target)
NOTE_BASIC_BLOCK (new_note) = jump_block;
jump_block->partition = COLD_PARTITION;
}
if (GET_CODE (BB_END (jump_block)) == JUMP_INSN
if (JUMP_P (BB_END (jump_block))
&& !any_condjump_p (BB_END (jump_block))
&& jump_block->succ->crossing_edge )
REG_NOTES (BB_END (jump_block)) = gen_rtx_EXPR_LIST
@ -1212,7 +1212,7 @@ rtl_tidy_fallthru_edge (edge e)
If block B consisted only of this single jump, turn it into a deleted
note. */
q = BB_END (b);
if (GET_CODE (q) == JUMP_INSN
if (JUMP_P (q)
&& onlyjump_p (q)
&& (any_uncondjump_p (q)
|| (b->succ == e && e->succ_next == NULL)))
@ -1228,7 +1228,7 @@ rtl_tidy_fallthru_edge (edge e)
/* We don't want a block to end on a line-number note since that has
the potential of changing the code between -g and not -g. */
while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
while (NOTE_P (q) && NOTE_LINE_NUMBER (q) >= 0)
q = PREV_INSN (q);
}
@ -1262,7 +1262,7 @@ back_edge_of_syntactic_loop_p (basic_block bb1, basic_block bb2)
for (insn = BB_END (bb1); insn != BB_HEAD (bb2) && count >= 0;
insn = NEXT_INSN (insn))
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
{
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
count++;
@ -1333,7 +1333,7 @@ rtl_split_edge (edge edge_in)
if (edge_in->dest != EXIT_BLOCK_PTR
&& PREV_INSN (BB_HEAD (edge_in->dest))
&& GET_CODE (PREV_INSN (BB_HEAD (edge_in->dest))) == NOTE
&& NOTE_P (PREV_INSN (BB_HEAD (edge_in->dest)))
&& (NOTE_LINE_NUMBER (PREV_INSN (BB_HEAD (edge_in->dest)))
== NOTE_INSN_LOOP_BEG)
&& !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
@ -1349,7 +1349,7 @@ rtl_split_edge (edge edge_in)
{
before = NEXT_INSN (BB_END (edge_in->src));
if (before
&& GET_CODE (before) == NOTE
&& NOTE_P (before)
&& NOTE_LINE_NUMBER (before) == NOTE_INSN_LOOP_END)
before = NEXT_INSN (before);
bb = create_basic_block (before, NULL, edge_in->src);
@ -1517,7 +1517,7 @@ commit_one_edge_insertion (edge e, int watch_calls)
its return value. */
if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
&& e->src != ENTRY_BLOCK_PTR
&& GET_CODE (BB_END (e->src)) == CALL_INSN)
&& CALL_P (BB_END (e->src)))
{
rtx next = next_nonnote_insn (BB_END (e->src));
@ -1542,12 +1542,12 @@ commit_one_edge_insertion (edge e, int watch_calls)
/* Get the location correct wrt a code label, and "nice" wrt
a basic block note, and before everything else. */
tmp = BB_HEAD (bb);
if (GET_CODE (tmp) == CODE_LABEL)
if (LABEL_P (tmp))
tmp = NEXT_INSN (tmp);
if (NOTE_INSN_BASIC_BLOCK_P (tmp))
tmp = NEXT_INSN (tmp);
if (tmp
&& GET_CODE (tmp) == NOTE
&& NOTE_P (tmp)
&& NOTE_LINE_NUMBER (tmp) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
tmp = NEXT_INSN (tmp);
if (tmp == BB_HEAD (bb))
@ -1572,9 +1572,9 @@ commit_one_edge_insertion (edge e, int watch_calls)
We know this block has a single successor, so we can just emit
the queued insns before the jump. */
if (GET_CODE (BB_END (bb)) == JUMP_INSN)
if (JUMP_P (BB_END (bb)))
for (before = BB_END (bb);
GET_CODE (PREV_INSN (before)) == NOTE
NOTE_P (PREV_INSN (before))
&& NOTE_LINE_NUMBER (PREV_INSN (before)) ==
NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
;
@ -1606,7 +1606,7 @@ commit_one_edge_insertion (edge e, int watch_calls)
bb_note = NULL_RTX;
for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
cur_insn = NEXT_INSN (cur_insn))
if (GET_CODE (cur_insn) == NOTE
if (NOTE_P (cur_insn)
&& NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_BASIC_BLOCK)
{
bb_note = cur_insn;
@ -1616,7 +1616,7 @@ commit_one_edge_insertion (edge e, int watch_calls)
new_note = emit_note_after (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
bb_note);
NOTE_BASIC_BLOCK (new_note) = bb;
if (GET_CODE (BB_END (bb)) == JUMP_INSN
if (JUMP_P (BB_END (bb))
&& !any_condjump_p (BB_END (bb))
&& bb->succ->crossing_edge )
REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
@ -1655,7 +1655,7 @@ commit_one_edge_insertion (edge e, int watch_calls)
if (before)
delete_insn (before);
}
else if (GET_CODE (last) == JUMP_INSN)
else if (JUMP_P (last))
abort ();
/* Mark the basic block for find_sub_basic_blocks. */
@ -1836,8 +1836,8 @@ print_rtl_with_bb (FILE *outf, rtx rtx_first)
}
if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
&& GET_CODE (tmp_rtx) != NOTE
&& GET_CODE (tmp_rtx) != BARRIER)
&& !NOTE_P (tmp_rtx)
&& !BARRIER_P (tmp_rtx))
fprintf (outf, ";; Insn is not within a basic block\n");
else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
fprintf (outf, ";; Insn is in multiple basic blocks\n");
@ -1874,7 +1874,7 @@ void
update_br_prob_note (basic_block bb)
{
rtx note;
if (GET_CODE (BB_END (bb)) != JUMP_INSN)
if (!JUMP_P (BB_END (bb)))
return;
note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
@ -2013,7 +2013,7 @@ rtl_verify_flow_info_1 (void)
err = 1;
}
if (n_branch
&& (GET_CODE (BB_END (bb)) != JUMP_INSN
&& (!JUMP_P (BB_END (bb))
|| (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
|| any_condjump_p (BB_END (bb))))))
{
@ -2036,14 +2036,14 @@ rtl_verify_flow_info_1 (void)
error ("Wrong amount of branch edges after conditional jump %i", bb->index);
err = 1;
}
if (n_call && GET_CODE (BB_END (bb)) != CALL_INSN)
if (n_call && !CALL_P (BB_END (bb)))
{
error ("Call edges for non-call insn in bb %i", bb->index);
err = 1;
}
if (n_abnormal
&& (GET_CODE (BB_END (bb)) != CALL_INSN && n_call != n_abnormal)
&& (GET_CODE (BB_END (bb)) != JUMP_INSN
&& (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
&& (!JUMP_P (BB_END (bb))
|| any_condjump_p (BB_END (bb))
|| any_uncondjump_p (BB_END (bb))))
{
@ -2071,7 +2071,7 @@ rtl_verify_flow_info_1 (void)
block. It ought to contain optional CODE_LABEL followed
by NOTE_BASIC_BLOCK. */
x = BB_HEAD (bb);
if (GET_CODE (x) == CODE_LABEL)
if (LABEL_P (x))
{
if (BB_END (bb) == x)
{
@ -2149,10 +2149,10 @@ rtl_verify_flow_info (void)
rtx insn;
/* Ensure existence of barrier in BB with no fallthru edges. */
for (insn = BB_END (bb); !insn || GET_CODE (insn) != BARRIER;
for (insn = BB_END (bb); !insn || !BARRIER_P (insn);
insn = NEXT_INSN (insn))
if (!insn
|| (GET_CODE (insn) == NOTE
|| (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
{
error ("missing barrier after block %i", bb->index);
@ -2175,7 +2175,7 @@ rtl_verify_flow_info (void)
else
for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
insn = NEXT_INSN (insn))
if (GET_CODE (insn) == BARRIER
if (BARRIER_P (insn)
#ifndef CASE_DROPS_THROUGH
|| INSN_P (insn)
#else
@ -2218,7 +2218,7 @@ rtl_verify_flow_info (void)
case CODE_LABEL:
/* An addr_vec is placed outside any basic block. */
if (NEXT_INSN (x)
&& GET_CODE (NEXT_INSN (x)) == JUMP_INSN
&& JUMP_P (NEXT_INSN (x))
&& (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
|| GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
x = NEXT_INSN (x);
@ -2232,9 +2232,9 @@ rtl_verify_flow_info (void)
}
if (INSN_P (x)
&& GET_CODE (x) == JUMP_INSN
&& JUMP_P (x)
&& returnjump_p (x) && ! condjump_p (x)
&& ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
&& ! (NEXT_INSN (x) && BARRIER_P (NEXT_INSN (x))))
fatal_insn ("return not followed by barrier", x);
if (curr_bb && x == BB_END (curr_bb))
curr_bb = NULL;
@ -2260,7 +2260,7 @@ purge_dead_edges (basic_block bb)
bool purged = false;
/* If this instruction cannot trap, remove REG_EH_REGION notes. */
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& (note = find_reg_note (insn, REG_EH_REGION, NULL)))
{
rtx eqnote;
@ -2282,7 +2282,7 @@ purge_dead_edges (basic_block bb)
}
else if (e->flags & EDGE_ABNORMAL_CALL)
{
if (GET_CODE (BB_END (bb)) == CALL_INSN
if (CALL_P (BB_END (bb))
&& (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
|| INTVAL (XEXP (note, 0)) >= 0))
continue;
@ -2295,7 +2295,7 @@ purge_dead_edges (basic_block bb)
purged = true;
}
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
{
rtx note;
edge b,f;
@ -2386,7 +2386,7 @@ purge_dead_edges (basic_block bb)
return purged;
}
else if (GET_CODE (insn) == CALL_INSN && SIBLING_CALL_P (insn))
else if (CALL_P (insn) && SIBLING_CALL_P (insn))
{
/* First, there should not be any EH or ABCALL edges resulting
from non-local gotos and the like. If there were, we shouldn't
@ -2524,7 +2524,7 @@ cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
if (e->flags & EDGE_FALLTHRU)
{
/* Redirect any branch edges unified with the fallthru one. */
if (GET_CODE (BB_END (src)) == JUMP_INSN
if (JUMP_P (BB_END (src))
&& label_is_jump_target_p (BB_HEAD (e->dest),
BB_END (src)))
{
@ -2602,7 +2602,7 @@ cfg_layout_delete_block (basic_block bb)
insn = bb->rbi->footer;
while (insn)
{
if (GET_CODE (insn) == BARRIER)
if (BARRIER_P (insn))
{
if (PREV_INSN (insn))
NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
@ -2611,7 +2611,7 @@ cfg_layout_delete_block (basic_block bb)
if (NEXT_INSN (insn))
PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
}
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
break;
insn = NEXT_INSN (insn);
}
@ -2682,7 +2682,7 @@ cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
&& a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
/* If the jump insn has side effects,
we can't kill the edge. */
&& (GET_CODE (BB_END (a)) != JUMP_INSN
&& (!JUMP_P (BB_END (a))
|| (reload_completed
? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
}
@ -2697,14 +2697,14 @@ cfg_layout_merge_blocks (basic_block a, basic_block b)
#endif
/* If there was a CODE_LABEL beginning B, delete it. */
if (GET_CODE (BB_HEAD (b)) == CODE_LABEL)
if (LABEL_P (BB_HEAD (b)))
delete_insn (BB_HEAD (b));
/* We should have fallthru edge in a, or we can do dummy redirection to get
it cleaned up. */
if (GET_CODE (BB_END (a)) == JUMP_INSN)
if (JUMP_P (BB_END (a)))
try_redirect_by_replacing_jump (a->succ, b, true);
if (GET_CODE (BB_END (a)) == JUMP_INSN)
if (JUMP_P (BB_END (a)))
abort ();
/* Possible line number notes should appear in between. */
@ -2805,11 +2805,11 @@ rtl_block_ends_with_call_p (basic_block bb)
{
rtx insn = BB_END (bb);
while (GET_CODE (insn) != CALL_INSN
while (!CALL_P (insn)
&& insn != BB_HEAD (bb)
&& keep_with_call_p (insn))
insn = PREV_INSN (insn);
return (GET_CODE (insn) == CALL_INSN);
return (CALL_P (insn));
}
/* Return 1 if BB ends with a conditional branch, 0 otherwise. */
@ -2829,7 +2829,7 @@ need_fake_edge_p (rtx insn)
if (!INSN_P (insn))
return false;
if ((GET_CODE (insn) == CALL_INSN
if ((CALL_P (insn)
&& !SIBLING_CALL_P (insn)
&& !find_reg_note (insn, REG_NORETURN, NULL)
&& !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
@ -2930,7 +2930,7 @@ rtl_flow_call_edges_add (sbitmap blocks)
/* Don't split the block between a call and an insn that should
remain in the same block as the call. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
while (split_at_insn != BB_END (bb)
&& keep_with_call_p (NEXT_INSN (split_at_insn)))
split_at_insn = NEXT_INSN (split_at_insn);

View File

@ -738,7 +738,7 @@ combine_instructions (rtx f, unsigned int nregs)
INSN_UID (insn), uid_insn_cost[INSN_UID (insn)]);
}
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
label_tick++;
}
@ -760,7 +760,7 @@ combine_instructions (rtx f, unsigned int nregs)
{
next = 0;
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
label_tick++;
else if (INSN_P (insn))
@ -784,7 +784,7 @@ combine_instructions (rtx f, unsigned int nregs)
/* If the linked insn has been replaced by a note, then there
is no point in pursuing this chain any further. */
if (GET_CODE (link) == NOTE)
if (NOTE_P (link))
continue;
for (nextlinks = LOG_LINKS (link);
@ -804,9 +804,9 @@ combine_instructions (rtx f, unsigned int nregs)
We need this special code because data flow connections
via CC0 do not get entered in LOG_LINKS. */
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& (prev = prev_nonnote_insn (insn)) != 0
&& GET_CODE (prev) == INSN
&& NONJUMP_INSN_P (prev)
&& sets_cc0_p (PATTERN (prev)))
{
if ((next = try_combine (insn, prev,
@ -822,9 +822,9 @@ combine_instructions (rtx f, unsigned int nregs)
}
/* Do the same for an insn that explicitly references CC0. */
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& (prev = prev_nonnote_insn (insn)) != 0
&& GET_CODE (prev) == INSN
&& NONJUMP_INSN_P (prev)
&& sets_cc0_p (PATTERN (prev))
&& GET_CODE (PATTERN (insn)) == SET
&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
@ -845,11 +845,11 @@ combine_instructions (rtx f, unsigned int nregs)
explicitly references CC0. If so, try this insn, that insn,
and its predecessor if it sets CC0. */
for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
if (GET_CODE (XEXP (links, 0)) == INSN
if (NONJUMP_INSN_P (XEXP (links, 0))
&& GET_CODE (PATTERN (XEXP (links, 0))) == SET
&& reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
&& (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
&& GET_CODE (prev) == INSN
&& NONJUMP_INSN_P (prev)
&& sets_cc0_p (PATTERN (prev))
&& (next = try_combine (insn, XEXP (links, 0),
prev, &new_direct_jump_p)) != 0)
@ -891,7 +891,7 @@ combine_instructions (rtx f, unsigned int nregs)
}
}
if (GET_CODE (insn) != NOTE)
if (!NOTE_P (insn))
record_dead_and_set_regs (insn);
retry:
@ -1205,7 +1205,7 @@ can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
/* Can't merge a function call. */
|| GET_CODE (src) == CALL
/* Don't eliminate a function call argument. */
|| (GET_CODE (i3) == CALL_INSN
|| (CALL_P (i3)
&& (find_reg_fusage (i3, USE, dest)
|| (REG_P (dest)
&& REGNO (dest) < FIRST_PSEUDO_REGISTER
@ -1342,7 +1342,7 @@ can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
#ifdef AUTO_INC_DEC
for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_INC
&& (GET_CODE (i3) == JUMP_INSN
&& (JUMP_P (i3)
|| reg_used_between_p (XEXP (link, 0), insn, i3)
|| reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
return 0;
@ -1359,7 +1359,7 @@ can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
but that would be much slower, and this ought to be equivalent. */
p = prev_nonnote_insn (insn);
if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
&& ! all_adjacent)
return 0;
#endif
@ -1688,7 +1688,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
where I2 and I3 are adjacent to avoid making difficult register
usage tests. */
if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
&& REG_P (SET_SRC (PATTERN (i3)))
&& REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
&& find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
@ -2646,7 +2646,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
they are adjacent to each other or not. */
{
rtx p = prev_nonnote_insn (i3);
if (p && p != i2 && GET_CODE (p) == INSN && newi2pat
if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
&& sets_cc0_p (newi2pat))
{
undo_all ();
@ -2702,7 +2702,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
INSN_CODE (i3) = insn_code_number;
PATTERN (i3) = newpat;
if (GET_CODE (i3) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (i3))
if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
{
rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
@ -2947,7 +2947,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
mark_jump_label (PATTERN (i3), i3, 0);
if ((temp = next_nonnote_insn (i3)) == NULL_RTX
|| GET_CODE (temp) != BARRIER)
|| !BARRIER_P (temp))
emit_barrier_after (i3);
}
@ -2958,7 +2958,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
*new_direct_jump_p = 1;
if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX
|| GET_CODE (temp) != BARRIER)
|| !BARRIER_P (temp))
emit_barrier_after (undobuf.other_insn);
}
@ -3087,12 +3087,12 @@ find_split_point (rtx *loc, rtx insn)
if (seq
&& NEXT_INSN (seq) != NULL_RTX
&& NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
&& GET_CODE (seq) == INSN
&& NONJUMP_INSN_P (seq)
&& GET_CODE (PATTERN (seq)) == SET
&& SET_DEST (PATTERN (seq)) == reg
&& ! reg_mentioned_p (reg,
SET_SRC (PATTERN (seq)))
&& GET_CODE (NEXT_INSN (seq)) == INSN
&& NONJUMP_INSN_P (NEXT_INSN (seq))
&& GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
&& SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
&& memory_address_p (GET_MODE (x),
@ -10939,7 +10939,7 @@ record_dead_and_set_regs (rtx insn)
record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
}
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
@ -11321,7 +11321,7 @@ reg_dead_at_p (rtx reg, rtx insn)
/* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
beginning of function. */
for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
for (; insn && !LABEL_P (insn) && !BARRIER_P (insn);
insn = prev_nonnote_insn (insn))
{
note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
@ -11740,9 +11740,9 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
break;
case REG_NON_LOCAL_GOTO:
if (GET_CODE (i3) == JUMP_INSN)
if (JUMP_P (i3))
place = i3;
else if (i2 && GET_CODE (i2) == JUMP_INSN)
else if (i2 && JUMP_P (i2))
place = i2;
else
abort ();
@ -11750,9 +11750,9 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
case REG_EH_REGION:
/* These notes must remain with the call or trapping instruction. */
if (GET_CODE (i3) == CALL_INSN)
if (CALL_P (i3))
place = i3;
else if (i2 && GET_CODE (i2) == CALL_INSN)
else if (i2 && CALL_P (i2))
place = i2;
else if (flag_non_call_exceptions)
{
@ -11773,9 +11773,9 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
case REG_SETJMP:
/* These notes must remain with the call. It should not be
possible for both I2 and I3 to be a call. */
if (GET_CODE (i3) == CALL_INSN)
if (CALL_P (i3))
place = i3;
else if (i2 && GET_CODE (i2) == CALL_INSN)
else if (i2 && CALL_P (i2))
place = i2;
else
abort ();
@ -11883,19 +11883,19 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
/* Don't attach REG_LABEL note to a JUMP_INSN which has
JUMP_LABEL already. Instead, decrement LABEL_NUSES. */
if (place && GET_CODE (place) == JUMP_INSN && JUMP_LABEL (place))
if (place && JUMP_P (place) && JUMP_LABEL (place))
{
if (JUMP_LABEL (place) != XEXP (note, 0))
abort ();
if (GET_CODE (JUMP_LABEL (place)) == CODE_LABEL)
if (LABEL_P (JUMP_LABEL (place)))
LABEL_NUSES (JUMP_LABEL (place))--;
place = 0;
}
if (place2 && GET_CODE (place2) == JUMP_INSN && JUMP_LABEL (place2))
if (place2 && JUMP_P (place2) && JUMP_LABEL (place2))
{
if (JUMP_LABEL (place2) != XEXP (note, 0))
abort ();
if (GET_CODE (JUMP_LABEL (place2)) == CODE_LABEL)
if (LABEL_P (JUMP_LABEL (place2)))
LABEL_NUSES (JUMP_LABEL (place2))--;
place2 = 0;
}
@ -11912,7 +11912,7 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
/* If the insn previously containing this note still exists,
put it back where it was. Otherwise move it to the previous
insn. Adjust the corresponding REG_LIBCALL note. */
if (GET_CODE (from_insn) != NOTE)
if (!NOTE_P (from_insn))
place = from_insn;
else
{
@ -11932,7 +11932,7 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
case REG_LIBCALL:
/* This is handled similarly to REG_RETVAL. */
if (GET_CODE (from_insn) != NOTE)
if (!NOTE_P (from_insn))
place = from_insn;
else
{
@ -11966,7 +11966,7 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
use of A and put the death note there. */
if (from_insn
&& GET_CODE (from_insn) == CALL_INSN
&& CALL_P (from_insn)
&& find_reg_fusage (from_insn, USE, XEXP (note, 0)))
place = from_insn;
else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
@ -12074,7 +12074,7 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2)
}
}
else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
|| (GET_CODE (tem) == CALL_INSN
|| (CALL_P (tem)
&& find_reg_fusage (tem, USE, XEXP (note, 0))))
{
place = tem;
@ -12281,7 +12281,7 @@ distribute_links (rtx links)
replace I3, I2, and I1 by I3 and I2. But in that case the
destination of I2 also remains unchanged. */
if (GET_CODE (XEXP (link, 0)) == NOTE
if (NOTE_P (XEXP (link, 0))
|| (set = single_set (XEXP (link, 0))) == 0)
continue;
@ -12310,7 +12310,7 @@ distribute_links (rtx links)
place = insn;
break;
}
else if (GET_CODE (insn) == CALL_INSN
else if (CALL_P (insn)
&& find_reg_fusage (insn, USE, reg))
{
place = insn;
@ -12378,7 +12378,7 @@ static int
insn_cuid (rtx insn)
{
while (insn != 0 && INSN_UID (insn) > max_uid_cuid
&& GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
&& NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE)
insn = NEXT_INSN (insn);
if (INSN_UID (insn) > max_uid_cuid)

View File

@ -3486,7 +3486,7 @@ fold_rtx (rtx x, rtx insn)
rtx label = XEXP (base, 0);
rtx table_insn = NEXT_INSN (label);
if (table_insn && GET_CODE (table_insn) == JUMP_INSN
if (table_insn && JUMP_P (table_insn)
&& GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
{
rtx table = PATTERN (table_insn);
@ -3497,7 +3497,7 @@ fold_rtx (rtx x, rtx insn)
return XVECEXP (table, 0,
offset / GET_MODE_SIZE (GET_MODE (table)));
}
if (table_insn && GET_CODE (table_insn) == JUMP_INSN
if (table_insn && JUMP_P (table_insn)
&& GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
{
rtx table = PATTERN (table_insn);
@ -4589,7 +4589,7 @@ cse_insn (rtx insn, rtx libcall_insn)
Also determine whether there is a CLOBBER that invalidates
all memory references, or all references at varying addresses. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
{
@ -5597,7 +5597,7 @@ cse_insn (rtx insn, rtx libcall_insn)
{
/* Now emit a BARRIER after the unconditional jump. */
if (NEXT_INSN (insn) == 0
|| GET_CODE (NEXT_INSN (insn)) != BARRIER)
|| !BARRIER_P (NEXT_INSN (insn)))
emit_barrier_after (insn);
/* We reemit the jump in as many cases as possible just in
@ -5628,7 +5628,7 @@ cse_insn (rtx insn, rtx libcall_insn)
/* Now emit a BARRIER after the unconditional jump. */
if (NEXT_INSN (insn) == 0
|| GET_CODE (NEXT_INSN (insn)) != BARRIER)
|| !BARRIER_P (NEXT_INSN (insn)))
emit_barrier_after (insn);
}
else
@ -5796,7 +5796,7 @@ cse_insn (rtx insn, rtx libcall_insn)
/* Some registers are invalidated by subroutine calls. Memory is
invalidated by non-constant calls. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
if (! CONST_OR_PURE_CALL_P (insn))
invalidate_memory ();
@ -5834,7 +5834,7 @@ cse_insn (rtx insn, rtx libcall_insn)
}
/* A volatile ASM invalidates everything. */
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == ASM_OPERANDS
&& MEM_VOLATILE_P (PATTERN (insn)))
flush_hash_table ();
@ -6101,7 +6101,7 @@ cse_insn (rtx insn, rtx libcall_insn)
{
prev = PREV_INSN (prev);
}
while (prev && GET_CODE (prev) == NOTE
while (prev && NOTE_P (prev)
&& NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
/* Do not swap the registers around if the previous instruction
@ -6116,7 +6116,7 @@ cse_insn (rtx insn, rtx libcall_insn)
note. We cannot do that because REG_EQUIV may provide an
uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
if (prev != 0 && GET_CODE (prev) == INSN
if (prev != 0 && NONJUMP_INSN_P (prev)
&& GET_CODE (PATTERN (prev)) == SET
&& SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
&& ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
@ -6147,7 +6147,7 @@ cse_insn (rtx insn, rtx libcall_insn)
the condition being tested. */
last_jump_equiv_class = 0;
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& n_sets == 1 && GET_CODE (x) == SET
&& GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
record_jump_equiv (insn, 0);
@ -6156,7 +6156,7 @@ cse_insn (rtx insn, rtx libcall_insn)
/* If the previous insn set CC0 and this insn no longer references CC0,
delete the previous insn. Here we use the fact that nothing expects CC0
to be valid over an insn, which is true until the final pass. */
if (prev_insn && GET_CODE (prev_insn) == INSN
if (prev_insn && NONJUMP_INSN_P (prev_insn)
&& (tem = single_set (prev_insn)) != 0
&& SET_DEST (tem) == cc0_rtx
&& ! reg_mentioned_p (cc0_rtx, x))
@ -6366,12 +6366,12 @@ cse_around_loop (rtx loop_start)
/* If the jump at the end of the loop doesn't go to the start, we don't
do anything. */
for (insn = PREV_INSN (loop_start);
insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
insn && (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) >= 0);
insn = PREV_INSN (insn))
;
if (insn == 0
|| GET_CODE (insn) != NOTE
|| !NOTE_P (insn)
|| NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
return;
@ -6405,9 +6405,9 @@ cse_around_loop (rtx loop_start)
accesses by not processing any instructions created after cse started. */
for (insn = NEXT_INSN (loop_start);
GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
!CALL_P (insn) && !LABEL_P (insn)
&& INSN_UID (insn) < max_insn_uid
&& ! (GET_CODE (insn) == NOTE
&& ! (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
insn = NEXT_INSN (insn))
{
@ -6466,13 +6466,13 @@ invalidate_skipped_block (rtx start)
{
rtx insn;
for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
for (insn = start; insn && !LABEL_P (insn);
insn = NEXT_INSN (insn))
{
if (! INSN_P (insn))
continue;
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
if (! CONST_OR_PURE_CALL_P (insn))
invalidate_memory ();
@ -6547,8 +6547,8 @@ cse_set_around_loop (rtx x, rtx insn, rtx loop_start)
a label or CALL_INSN. */
for (p = prev_nonnote_insn (loop_start);
p && GET_CODE (p) != CALL_INSN
&& GET_CODE (p) != CODE_LABEL;
p && !CALL_P (p)
&& !LABEL_P (p);
p = prev_nonnote_insn (p))
if ((set = single_set (p)) != 0
&& REG_P (SET_DEST (set))
@ -6676,7 +6676,7 @@ cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
follow_jumps = skip_blocks = 0;
/* Scan to end of this basic block. */
while (p && GET_CODE (p) != CODE_LABEL)
while (p && !LABEL_P (p))
{
/* Don't cse out the end of a loop. This makes a difference
only for the unusual loops that always execute at least once;
@ -6691,14 +6691,14 @@ cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
If we are running after loop.c has finished, we can ignore
the NOTE_INSN_LOOP_END. */
if (! after_loop && GET_CODE (p) == NOTE
if (! after_loop && NOTE_P (p)
&& NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
break;
/* Don't cse over a call to setjmp; on some machines (eg VAX)
the regs restored by the longjmp come from
a later time than the setjmp. */
if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
if (PREV_INSN (p) && CALL_P (PREV_INSN (p))
&& find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
break;
@ -6706,7 +6706,7 @@ cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
especially if it is really an ASM_OPERANDS. */
if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
nsets += XVECLEN (PATTERN (p), 0);
else if (GET_CODE (p) != NOTE)
else if (!NOTE_P (p))
nsets += 1;
/* Ignore insns made by CSE; they cannot affect the boundaries of
@ -6739,7 +6739,7 @@ cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
registers set in the block when following the jump. */
else if ((follow_jumps || skip_blocks) && path_size < PARAM_VALUE (PARAM_MAX_CSE_PATH_LENGTH) - 1
&& GET_CODE (p) == JUMP_INSN
&& JUMP_P (p)
&& GET_CODE (PATTERN (p)) == SET
&& GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
&& JUMP_LABEL (p) != 0
@ -6747,16 +6747,16 @@ cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
&& NEXT_INSN (JUMP_LABEL (p)) != 0)
{
for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
if ((GET_CODE (q) != NOTE
if ((!NOTE_P (q)
|| NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
|| (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
|| (PREV_INSN (q) && CALL_P (PREV_INSN (q))
&& find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
&& (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
&& (!LABEL_P (q) || LABEL_NUSES (q) != 0))
break;
/* If we ran into a BARRIER, this code is an extension of the
basic block when the branch is taken. */
if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
if (follow_jumps && q != 0 && BARRIER_P (q))
{
/* Don't allow ourself to keep walking around an
always-executed loop. */
@ -6788,7 +6788,7 @@ cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
PUT_MODE (NEXT_INSN (p), QImode);
}
/* Detect a branch around a block of code. */
else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
else if (skip_blocks && q != 0 && !LABEL_P (q))
{
rtx tmp;
@ -6808,7 +6808,7 @@ cse_end_of_basic_block (rtx insn, struct cse_basic_block_data *data,
/* This is no_labels_between_p (p, q) with an added check for
reaching the end of a function (in case Q precedes P). */
for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
if (GET_CODE (tmp) == CODE_LABEL)
if (LABEL_P (tmp))
break;
if (tmp == q)
@ -6907,7 +6907,7 @@ cse_main (rtx f, int nregs, int after_loop, FILE *file)
for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) != NOTE
if (!NOTE_P (insn)
|| NOTE_LINE_NUMBER (insn) < 0)
INSN_CUID (insn) = ++i;
else
@ -7024,7 +7024,7 @@ cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
new_basic_block ();
/* TO might be a label. If so, protect it from being deleted. */
if (to != 0 && GET_CODE (to) == CODE_LABEL)
if (to != 0 && LABEL_P (to))
++LABEL_NUSES (to);
for (insn = from; insn != to; insn = NEXT_INSN (insn))
@ -7115,7 +7115,7 @@ cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
/* If we haven't already found an insn where we added a LABEL_REF,
check this one. */
if (GET_CODE (insn) == INSN && ! recorded_label_ref
if (NONJUMP_INSN_P (insn) && ! recorded_label_ref
&& for_each_rtx (&PATTERN (insn), check_for_label_ref,
(void *) insn))
recorded_label_ref = 1;
@ -7155,7 +7155,7 @@ cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
want to count the use in that jump. */
if (to != 0 && NEXT_INSN (insn) == to
&& GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
&& LABEL_P (to) && --LABEL_NUSES (to) == to_usage)
{
struct cse_basic_block_data val;
rtx prev;
@ -7172,7 +7172,7 @@ cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
/* If TO was preceded by a BARRIER we are done with this block
because it has no continuation. */
prev = prev_nonnote_insn (to);
if (prev && GET_CODE (prev) == BARRIER)
if (prev && BARRIER_P (prev))
{
free (qty_table + max_reg);
return insn;
@ -7199,7 +7199,7 @@ cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
to = val.last;
/* Prevent TO from being deleted if it is a label. */
if (to != 0 && GET_CODE (to) == CODE_LABEL)
if (to != 0 && LABEL_P (to))
++LABEL_NUSES (to);
/* Back up so we process the first insn in the extension. */
@ -7219,8 +7219,8 @@ cse_basic_block (rtx from, rtx to, struct branch_path *next_branch,
if ((cse_jumps_altered == 0
|| (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
&& around_loop && to != 0
&& GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
&& GET_CODE (insn) == JUMP_INSN
&& NOTE_P (to) && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
&& JUMP_P (insn)
&& JUMP_LABEL (insn) != 0
&& LABEL_NUSES (JUMP_LABEL (insn)) == 1)
cse_around_loop (JUMP_LABEL (insn));
@ -7830,7 +7830,7 @@ cse_condition_code_reg (void)
to optimize. */
last_insn = BB_END (bb);
if (GET_CODE (last_insn) != JUMP_INSN)
if (!JUMP_P (last_insn))
continue;
if (reg_referenced_p (cc_reg_1, PATTERN (last_insn)))

View File

@ -1341,10 +1341,10 @@ cselib_process_insn (rtx insn)
cselib_current_insn = insn;
/* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
if (GET_CODE (insn) == CODE_LABEL
|| (GET_CODE (insn) == CALL_INSN
if (LABEL_P (insn)
|| (CALL_P (insn)
&& find_reg_note (insn, REG_SETJMP, NULL))
|| (GET_CODE (insn) == INSN
|| (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == ASM_OPERANDS
&& MEM_VOLATILE_P (PATTERN (insn))))
{
@ -1361,7 +1361,7 @@ cselib_process_insn (rtx insn)
/* If this is a call instruction, forget anything stored in a
call clobbered register, or, if this is not a const call, in
memory. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (call_used_regs[i])
@ -1384,7 +1384,7 @@ cselib_process_insn (rtx insn)
/* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
after we have processed the insn. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
if (GET_CODE (XEXP (x, 0)) == CLOBBER)
cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX, NULL);

View File

@ -477,12 +477,12 @@ create_ddg (basic_block bb, struct df *df, int closing_branch_deps)
{
if (! INSN_P (insn))
{
if (! first_note && GET_CODE (insn) == NOTE
if (! first_note && NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)
first_note = insn;
continue;
}
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
{
if (g->closing_branch)
abort (); /* Found two branches in DDG. */

View File

@ -1168,7 +1168,7 @@ df_insn_refs_record (struct df *df, basic_block bb, rtx insn)
}
}
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
rtx note;
rtx x;
@ -1204,7 +1204,7 @@ df_insn_refs_record (struct df *df, basic_block bb, rtx insn)
df_uses_record (df, &PATTERN (insn),
DF_REF_REG_USE, bb, insn, 0);
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
rtx note;
@ -1654,7 +1654,7 @@ df_bb_rd_local_compute (struct df *df, basic_block bb, bitmap call_killed_defs)
bitmap_set_bit (seen, regno);
}
if (GET_CODE (insn) == CALL_INSN && (df->flags & DF_HARD_REGS))
if (CALL_P (insn) && (df->flags & DF_HARD_REGS))
{
bitmap_operation (bb_info->rd_kill, bb_info->rd_kill,
call_killed_defs, BITMAP_IOR);
@ -2911,9 +2911,9 @@ df_insns_modify (struct df *df, basic_block bb, rtx first_insn, rtx last_insn)
/* A non-const call should not have slipped through the net. If
it does, we need to create a new basic block. Ouch. The
same applies for a label. */
if ((GET_CODE (insn) == CALL_INSN
if ((CALL_P (insn)
&& ! CONST_OR_PURE_CALL_P (insn))
|| GET_CODE (insn) == CODE_LABEL)
|| LABEL_P (insn))
abort ();
uid = INSN_UID (insn);

View File

@ -1066,7 +1066,7 @@ dwarf2out_stack_adjust (rtx insn)
if (prologue_epilogue_contains (insn) || sibcall_epilogue_contains (insn))
return;
if (!flag_asynchronous_unwind_tables && GET_CODE (insn) == CALL_INSN)
if (!flag_asynchronous_unwind_tables && CALL_P (insn))
{
/* Extract the size of the args from the CALL rtx itself. */
insn = PATTERN (insn);
@ -1086,7 +1086,7 @@ dwarf2out_stack_adjust (rtx insn)
else if (!flag_asynchronous_unwind_tables && cfa.reg != STACK_POINTER_REGNUM)
return;
if (GET_CODE (insn) == BARRIER)
if (BARRIER_P (insn))
{
/* When we see a BARRIER, we know to reset args_size to 0. Usually
the compiler will have already emitted a stack adjustment, but
@ -1814,7 +1814,7 @@ dwarf2out_frame_debug (rtx insn)
return;
}
if (GET_CODE (insn) != INSN || clobbers_queued_reg_save (insn))
if (!NONJUMP_INSN_P (insn) || clobbers_queued_reg_save (insn))
flush_queued_reg_saves ();
if (! RTX_FRAME_RELATED_P (insn))
@ -11592,8 +11592,8 @@ gen_label_die (tree decl, dw_die_ref context_die)
eliminated because of various optimizations. We still emit them
here so that it is possible to put breakpoints on them. */
if (insn
&& (GET_CODE (insn) == CODE_LABEL
|| ((GET_CODE (insn) == NOTE
&& (LABEL_P (insn)
|| ((NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL))))
{
/* When optimization is enabled (via -O) some parts of the compiler
@ -13119,7 +13119,7 @@ dwarf2out_var_location (rtx loc_note)
last time. */
if (last_insn != NULL_RTX
&& last_insn == prev_insn
&& GET_CODE (prev_insn) == NOTE
&& NOTE_P (prev_insn)
&& NOTE_LINE_NUMBER (prev_insn) == NOTE_INSN_VAR_LOCATION)
{
newloc->label = last_label;

View File

@ -2863,7 +2863,7 @@ get_first_nonnote_insn (void)
while (insn)
{
insn = next_insn (insn);
if (insn == 0 || GET_CODE (insn) != NOTE)
if (insn == 0 || !NOTE_P (insn))
break;
}
@ -2881,7 +2881,7 @@ get_last_nonnote_insn (void)
while (insn)
{
insn = previous_insn (insn);
if (insn == 0 || GET_CODE (insn) != NOTE)
if (insn == 0 || !NOTE_P (insn))
break;
}
@ -2932,7 +2932,7 @@ next_insn (rtx insn)
if (insn)
{
insn = NEXT_INSN (insn);
if (insn && GET_CODE (insn) == INSN
if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
}
@ -2949,7 +2949,7 @@ previous_insn (rtx insn)
if (insn)
{
insn = PREV_INSN (insn);
if (insn && GET_CODE (insn) == INSN
if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
}
@ -2966,7 +2966,7 @@ next_nonnote_insn (rtx insn)
while (insn)
{
insn = NEXT_INSN (insn);
if (insn == 0 || GET_CODE (insn) != NOTE)
if (insn == 0 || !NOTE_P (insn))
break;
}
@ -2982,7 +2982,7 @@ prev_nonnote_insn (rtx insn)
while (insn)
{
insn = PREV_INSN (insn);
if (insn == 0 || GET_CODE (insn) != NOTE)
if (insn == 0 || !NOTE_P (insn))
break;
}
@ -3032,7 +3032,7 @@ last_call_insn (void)
rtx insn;
for (insn = get_last_insn ();
insn && GET_CODE (insn) != CALL_INSN;
insn && !CALL_P (insn);
insn = PREV_INSN (insn))
;
@ -3046,8 +3046,8 @@ last_call_insn (void)
int
active_insn_p (rtx insn)
{
return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
|| (GET_CODE (insn) == INSN
return (CALL_P (insn) || JUMP_P (insn)
|| (NONJUMP_INSN_P (insn)
&& (! reload_completed
|| (GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER))));
@ -3091,7 +3091,7 @@ next_label (rtx insn)
while (insn)
{
insn = NEXT_INSN (insn);
if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
if (insn == 0 || LABEL_P (insn))
break;
}
@ -3106,7 +3106,7 @@ prev_label (rtx insn)
while (insn)
{
insn = PREV_INSN (insn);
if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
if (insn == 0 || LABEL_P (insn))
break;
}
@ -3137,7 +3137,7 @@ link_cc0_insns (rtx insn)
{
rtx user = next_nonnote_insn (insn);
if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
user = XVECEXP (PATTERN (user), 0, 0);
REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
@ -3163,7 +3163,7 @@ next_cc0_user (rtx insn)
return XEXP (note, 0);
insn = next_nonnote_insn (insn);
if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
@ -3248,7 +3248,7 @@ try_split (rtx pat, rtx trial, int last)
/* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
We may need to handle this specially. */
if (after && GET_CODE (after) == BARRIER)
if (after && BARRIER_P (after))
{
has_barrier = 1;
after = NEXT_INSN (after);
@ -3273,7 +3273,7 @@ try_split (rtx pat, rtx trial, int last)
/* Mark labels. */
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
{
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
{
mark_jump_label (PATTERN (insn), insn, 0);
njumps++;
@ -3297,10 +3297,10 @@ try_split (rtx pat, rtx trial, int last)
/* If we are splitting a CALL_INSN, look for the CALL_INSN
in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
if (GET_CODE (trial) == CALL_INSN)
if (CALL_P (trial))
{
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
while (*p)
@ -3319,7 +3319,7 @@ try_split (rtx pat, rtx trial, int last)
insn = insn_last;
while (insn != NULL_RTX)
{
if (GET_CODE (insn) == CALL_INSN
if (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn))))
REG_NOTES (insn)
@ -3336,7 +3336,7 @@ try_split (rtx pat, rtx trial, int last)
insn = insn_last;
while (insn != NULL_RTX)
{
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
XEXP (note, 0),
@ -3349,7 +3349,7 @@ try_split (rtx pat, rtx trial, int last)
insn = insn_last;
while (insn != NULL_RTX)
{
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
XEXP (note, 0),
@ -3365,12 +3365,12 @@ try_split (rtx pat, rtx trial, int last)
/* If there are LABELS inside the split insns increment the
usage count so we don't delete the label. */
if (GET_CODE (trial) == INSN)
if (NONJUMP_INSN_P (trial))
{
insn = insn_last;
while (insn != NULL_RTX)
{
if (GET_CODE (insn) == INSN)
if (NONJUMP_INSN_P (insn))
mark_label_nuses (PATTERN (insn));
insn = PREV_INSN (insn);
@ -3512,7 +3512,7 @@ add_insn_after (rtx insn, rtx after)
if (next)
{
PREV_INSN (next) = insn;
if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
}
else if (last_insn == after)
@ -3532,8 +3532,8 @@ add_insn_after (rtx insn, rtx after)
abort ();
}
if (GET_CODE (after) != BARRIER
&& GET_CODE (insn) != BARRIER
if (!BARRIER_P (after)
&& !BARRIER_P (insn)
&& (bb = BLOCK_FOR_INSN (after)))
{
set_block_for_insn (insn, bb);
@ -3543,14 +3543,14 @@ add_insn_after (rtx insn, rtx after)
either NOTE or LABEL. */
if (BB_END (bb) == after
/* Avoid clobbering of structure when creating new BB. */
&& GET_CODE (insn) != BARRIER
&& (GET_CODE (insn) != NOTE
&& !BARRIER_P (insn)
&& (!NOTE_P (insn)
|| NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
BB_END (bb) = insn;
}
NEXT_INSN (after) = insn;
if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
{
rtx sequence = PATTERN (after);
NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
@ -3577,7 +3577,7 @@ add_insn_before (rtx insn, rtx before)
if (prev)
{
NEXT_INSN (prev) = insn;
if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
{
rtx sequence = PATTERN (prev);
NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
@ -3600,8 +3600,8 @@ add_insn_before (rtx insn, rtx before)
abort ();
}
if (GET_CODE (before) != BARRIER
&& GET_CODE (insn) != BARRIER
if (!BARRIER_P (before)
&& !BARRIER_P (insn)
&& (bb = BLOCK_FOR_INSN (before)))
{
set_block_for_insn (insn, bb);
@ -3611,14 +3611,14 @@ add_insn_before (rtx insn, rtx before)
either NOTE or LABEl. */
if (BB_HEAD (bb) == insn
/* Avoid clobbering of structure when creating new BB. */
&& GET_CODE (insn) != BARRIER
&& (GET_CODE (insn) != NOTE
&& !BARRIER_P (insn)
&& (!NOTE_P (insn)
|| NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
abort ();
}
PREV_INSN (before) = insn;
if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
}
@ -3634,7 +3634,7 @@ remove_insn (rtx insn)
if (prev)
{
NEXT_INSN (prev) = next;
if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
{
rtx sequence = PATTERN (prev);
NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
@ -3660,7 +3660,7 @@ remove_insn (rtx insn)
if (next)
{
PREV_INSN (next) = prev;
if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
}
else if (last_insn == insn)
@ -3679,7 +3679,7 @@ remove_insn (rtx insn)
if (stack == 0)
abort ();
}
if (GET_CODE (insn) != BARRIER
if (!BARRIER_P (insn)
&& (bb = BLOCK_FOR_INSN (insn)))
{
if (INSN_P (insn))
@ -3688,7 +3688,7 @@ remove_insn (rtx insn)
{
/* Never ever delete the basic block note without deleting whole
basic block. */
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
abort ();
BB_HEAD (bb) = next;
}
@ -3702,7 +3702,7 @@ remove_insn (rtx insn)
void
add_function_usage_to (rtx call_insn, rtx call_fusage)
{
if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
if (! call_insn || !CALL_P (call_insn))
abort ();
/* Put the register usage information on the CALL. If there is already
@ -3777,13 +3777,13 @@ reorder_insns (rtx from, rtx to, rtx after)
reorder_insns_nobb (from, to, after);
if (GET_CODE (after) != BARRIER
if (!BARRIER_P (after)
&& (bb = BLOCK_FOR_INSN (after)))
{
rtx x;
bb->flags |= BB_DIRTY;
if (GET_CODE (from) != BARRIER
if (!BARRIER_P (from)
&& (bb2 = BLOCK_FOR_INSN (from)))
{
if (BB_END (bb2) == to)
@ -3808,7 +3808,7 @@ find_line_note (rtx insn)
return 0;
for (; insn; insn = PREV_INSN (insn))
if (GET_CODE (insn) == NOTE
if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) >= 0)
break;
@ -3834,7 +3834,7 @@ remove_unnecessary_notes (void)
next = NEXT_INSN (insn);
/* We're only interested in notes. */
if (GET_CODE (insn) != NOTE)
if (!NOTE_P (insn))
continue;
switch (NOTE_LINE_NUMBER (insn))
@ -3894,7 +3894,7 @@ remove_unnecessary_notes (void)
break;
/* We're only interested in NOTEs. */
if (GET_CODE (tmp) != NOTE)
if (!NOTE_P (tmp))
continue;
if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
@ -4153,14 +4153,14 @@ emit_insn_after_1 (rtx first, rtx after)
rtx after_after;
basic_block bb;
if (GET_CODE (after) != BARRIER
if (!BARRIER_P (after)
&& (bb = BLOCK_FOR_INSN (after)))
{
bb->flags |= BB_DIRTY;
for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
if (GET_CODE (last) != BARRIER)
if (!BARRIER_P (last))
set_block_for_insn (last, bb);
if (GET_CODE (last) != BARRIER)
if (!BARRIER_P (last))
set_block_for_insn (last, bb);
if (BB_END (bb) == after)
BB_END (bb) = last;
@ -4763,7 +4763,7 @@ set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
enum rtx_code
classify_insn (rtx x)
{
if (GET_CODE (x) == CODE_LABEL)
if (LABEL_P (x))
return CODE_LABEL;
if (GET_CODE (x) == CALL)
return CALL_INSN;

View File

@ -887,7 +887,7 @@ convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
for (insn = *pinsns; insn ; insn = next)
{
next = NEXT_INSN (insn);
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
{
int kind = NOTE_LINE_NUMBER (insn);
if (kind == NOTE_INSN_EH_REGION_BEG
@ -928,7 +928,7 @@ convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
/* Calls can always potentially throw exceptions, unless
they have a REG_EH_REGION note with a value of 0 or less.
Which should be the only possible kind so far. */
&& (GET_CODE (insn) == CALL_INSN
&& (CALL_P (insn)
/* If we wanted exceptions for non-call insns, then
any may_trap_p instruction could throw. */
|| (flag_non_call_exceptions
@ -1464,7 +1464,7 @@ emit_to_new_bb_before (rtx seq, rtx insn)
if (e->flags & EDGE_FALLTHRU)
force_nonfallthru (e);
last = emit_insn_before (seq, insn);
if (GET_CODE (last) == BARRIER)
if (BARRIER_P (last))
last = PREV_INSN (last);
bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
update_bb_for_insn (bb);
@ -1655,7 +1655,7 @@ connect_post_landing_pads (void)
end_sequence ();
barrier = emit_insn_before (seq, region->resume);
/* Avoid duplicate barrier. */
if (GET_CODE (barrier) != BARRIER)
if (!BARRIER_P (barrier))
abort ();
delete_insn (barrier);
delete_insn (region->resume);
@ -1880,7 +1880,7 @@ sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
rtx note, before, p;
/* Reset value tracking at extended basic block boundaries. */
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
last_call_site = -2;
if (! INSN_P (insn))
@ -1892,7 +1892,7 @@ sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
/* Calls (and trapping insns) without notes are outside any
exception handling region in this function. Mark them as
no action. */
if (GET_CODE (insn) == CALL_INSN
if (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn))))
this_call_site = -1;
@ -1914,7 +1914,7 @@ sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
/* Don't separate a call from it's argument loads. */
before = insn;
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
before = find_first_parameter_load (insn, NULL_RTX);
start_sequence ();
@ -1991,7 +1991,7 @@ sjlj_emit_function_enter (rtx dispatch_label)
can_throw_internal instructions. */
for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
if (GET_CODE (fn_begin) == NOTE
if (NOTE_P (fn_begin)
&& (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
|| NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
break;
@ -2001,7 +2001,7 @@ sjlj_emit_function_enter (rtx dispatch_label)
{
rtx last = BB_END (ENTRY_BLOCK_PTR->succ->dest);
for (; ; fn_begin = NEXT_INSN (fn_begin))
if ((GET_CODE (fn_begin) == NOTE
if ((NOTE_P (fn_begin)
&& NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
|| fn_begin == last)
break;
@ -2056,7 +2056,7 @@ sjlj_emit_function_exit (void)
else
{
insn = cfun->eh->sjlj_exit_after;
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
insn = NEXT_INSN (insn);
emit_insn_after (seq, insn);
}
@ -2720,7 +2720,7 @@ reachable_handlers (rtx insn)
rtx handlers = NULL;
int region_number;
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) == RESX)
{
region_number = XINT (PATTERN (insn), 0);
@ -2784,12 +2784,12 @@ can_throw_internal (rtx insn)
if (! INSN_P (insn))
return false;
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) == RESX
&& XINT (PATTERN (insn), 0) > 0)
return can_throw_internal_1 (XINT (PATTERN (insn), 0));
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
@ -2836,7 +2836,7 @@ can_throw_external (rtx insn)
if (! INSN_P (insn))
return false;
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
@ -2848,7 +2848,7 @@ can_throw_external (rtx insn)
assume it might throw. Given that the front end and middle
ends mark known NOTHROW functions, this isn't so wildly
inaccurate. */
return (GET_CODE (insn) == CALL_INSN
return (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn))));
}
@ -2883,7 +2883,7 @@ set_nothrow_function_flags (void)
{
TREE_NOTHROW (current_function_decl) = 0;
if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
{
cfun->all_throwers_are_sibcalls = 0;
return;
@ -2896,7 +2896,7 @@ set_nothrow_function_flags (void)
{
TREE_NOTHROW (current_function_decl) = 0;
if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
{
cfun->all_throwers_are_sibcalls = 0;
return;
@ -3316,14 +3316,14 @@ convert_to_eh_region_ranges (void)
rtx this_landing_pad;
insn = iter;
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
if (!note)
{
if (! (GET_CODE (insn) == CALL_INSN
if (! (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn)))))
continue;

View File

@ -1118,7 +1118,7 @@ optimize_save_area_alloca (void)
{
rtx note;
if (GET_CODE (insn) != INSN)
if (!NONJUMP_INSN_P (insn))
continue;
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))

View File

@ -6405,7 +6405,7 @@ expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
may_trap_p instruction may throw. */
&& GET_CODE (PATTERN (insn)) != CLOBBER
&& GET_CODE (PATTERN (insn)) != USE
&& (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn))))
&& (CALL_P (insn) || may_trap_p (PATTERN (insn))))
{
REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
REG_NOTES (insn));

View File

@ -625,7 +625,7 @@ insn_current_reference_address (rtx branch)
seq = NEXT_INSN (PREV_INSN (branch));
seq_uid = INSN_UID (seq);
if (GET_CODE (branch) != JUMP_INSN)
if (!JUMP_P (branch))
/* This can happen for example on the PA; the objective is to know the
offset to address something in front of the start of the function.
Thus, we can treat it like a backward branch.
@ -678,7 +678,7 @@ compute_alignments (void)
int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
edge e;
if (GET_CODE (label) != CODE_LABEL
if (!LABEL_P (label)
|| probably_never_executed_bb_p (bb))
continue;
max_log = LABEL_ALIGN (label);
@ -817,7 +817,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
is separated by the former loop start insn from the
NOTE_INSN_LOOP_BEG. */
}
else if (GET_CODE (insn) == CODE_LABEL)
else if (LABEL_P (insn))
{
rtx next;
@ -839,7 +839,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
/* ADDR_VECs only take room if read-only data goes into the text
section. */
if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
if (next && GET_CODE (next) == JUMP_INSN)
if (next && JUMP_P (next))
{
rtx nextbody = PATTERN (next);
if (GET_CODE (nextbody) == ADDR_VEC
@ -858,13 +858,13 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
max_log = 0;
max_skip = 0;
}
else if (GET_CODE (insn) == BARRIER)
else if (BARRIER_P (insn))
{
rtx label;
for (label = insn; label && ! INSN_P (label);
label = NEXT_INSN (label))
if (GET_CODE (label) == CODE_LABEL)
if (LABEL_P (label))
{
log = LABEL_ALIGN_AFTER_BARRIER (insn);
if (max_log < log)
@ -901,7 +901,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
{
int uid = INSN_UID (seq);
int log;
log = (GET_CODE (seq) == CODE_LABEL ? LABEL_TO_ALIGNMENT (seq) : 0);
log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
uid_align[uid] = align_tab[0];
if (log)
{
@ -928,7 +928,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
int min_align;
addr_diff_vec_flags flags;
if (GET_CODE (insn) != JUMP_INSN
if (!JUMP_P (insn)
|| GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
continue;
pat = PATTERN (insn);
@ -977,7 +977,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
insn_lengths[uid] = 0;
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
{
int log = LABEL_TO_ALIGNMENT (insn);
if (log)
@ -990,8 +990,8 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
if (GET_CODE (insn) == NOTE || GET_CODE (insn) == BARRIER
|| GET_CODE (insn) == CODE_LABEL)
if (NOTE_P (insn) || BARRIER_P (insn)
|| LABEL_P (insn))
continue;
if (INSN_DELETED_P (insn))
continue;
@ -1082,7 +1082,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
uid = INSN_UID (insn);
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
{
int log = LABEL_TO_ALIGNMENT (insn);
if (log > insn_current_align)
@ -1107,7 +1107,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
INSN_ADDRESSES (uid) = insn_current_address;
#ifdef CASE_VECTOR_SHORTEN_MODE
if (optimize && GET_CODE (insn) == JUMP_INSN
if (optimize && JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
{
rtx body = PATTERN (insn);
@ -1217,7 +1217,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
if (! (varying_length[uid]))
{
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
{
int i;
@ -1239,7 +1239,7 @@ shorten_branches (rtx first ATTRIBUTE_UNUSED)
continue;
}
if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
{
int i;
@ -1517,7 +1517,7 @@ final (rtx first, FILE *file, int optimize, int prescan)
{
rtx last = 0;
for (insn = first; insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
{
if (last != 0
#ifdef USE_MAPPED_LOCATION
@ -1543,7 +1543,7 @@ final (rtx first, FILE *file, int optimize, int prescan)
#ifdef HAVE_cc0
/* If CC tracking across branches is enabled, record the insn which
jumps to each branch only reached from one place. */
if (optimize && GET_CODE (insn) == JUMP_INSN)
if (optimize && JUMP_P (insn))
{
rtx lab = JUMP_LABEL (insn);
if (lab && LABEL_NUSES (lab) == 1)
@ -1566,7 +1566,7 @@ final (rtx first, FILE *file, int optimize, int prescan)
{
/* This can be triggered by bugs elsewhere in the compiler if
new insns are created after init_insn_lengths is called. */
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
insn_current_address = -1;
else
abort ();
@ -1641,10 +1641,10 @@ scan_ahead_for_unlikely_executed_note (rtx insn)
for (temp = insn; temp; temp = NEXT_INSN (temp))
{
if (GET_CODE (temp) == NOTE
if (NOTE_P (temp)
&& NOTE_LINE_NUMBER (temp) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
return true;
if (GET_CODE (temp) == NOTE
if (NOTE_P (temp)
&& NOTE_LINE_NUMBER (temp) == NOTE_INSN_BASIC_BLOCK)
{
bb_note_count++;
@ -1902,10 +1902,10 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
insn, and that branch is the only way to reach this label,
set the condition codes based on the branch and its
predecessor. */
if (barrier && GET_CODE (barrier) == BARRIER
&& jump && GET_CODE (jump) == JUMP_INSN
if (barrier && BARRIER_P (barrier)
&& jump && JUMP_P (jump)
&& (prev = prev_nonnote_insn (jump))
&& GET_CODE (prev) == INSN)
&& NONJUMP_INSN_P (prev))
{
NOTICE_UPDATE_CC (PATTERN (prev), prev);
NOTICE_UPDATE_CC (PATTERN (jump), jump);
@ -1925,7 +1925,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
if (flag_reorder_blocks_and_partition)
{
rtx tmp_table, tmp_label;
if (GET_CODE (insn) == CODE_LABEL
if (LABEL_P (insn)
&& tablejump_p (NEXT_INSN (insn), &tmp_label, &tmp_table))
{
/* Do nothing; Do NOT change the current section. */
@ -1945,7 +1945,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
app_on = 0;
}
if (NEXT_INSN (insn) != 0
&& GET_CODE (NEXT_INSN (insn)) == JUMP_INSN)
&& JUMP_P (NEXT_INSN (insn)))
{
rtx nextbody = PATTERN (NEXT_INSN (insn));
@ -2214,7 +2214,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
called function. Hence we don't preserve any CC-setting
actions in these insns and the CC must be marked as being
clobbered by the function. */
if (GET_CODE (XVECEXP (body, 0, 0)) == CALL_INSN)
if (CALL_P (XVECEXP (body, 0, 0)))
{
CC_STATUS_INIT;
}
@ -2279,7 +2279,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
will cause an improper number of delay insns to be written. */
if (final_sequence == 0
&& prescan >= 0
&& GET_CODE (insn) == INSN && GET_CODE (body) == SET
&& NONJUMP_INSN_P (insn) && GET_CODE (body) == SET
&& REG_P (SET_SRC (body))
&& REG_P (SET_DEST (body))
&& REGNO (SET_SRC (body)) == REGNO (SET_DEST (body)))
@ -2293,7 +2293,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
do straightforwardly if the cc's were set up normally. */
if (cc_status.flags != 0
&& GET_CODE (insn) == JUMP_INSN
&& JUMP_P (insn)
&& GET_CODE (body) == SET
&& SET_DEST (body) == pc_rtx
&& GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
@ -2342,7 +2342,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
{
rtx cond_rtx, then_rtx, else_rtx;
if (GET_CODE (insn) != JUMP_INSN
if (!JUMP_P (insn)
&& GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
{
cond_rtx = XEXP (SET_SRC (set), 0);
@ -2470,7 +2470,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
current_output_insn = debug_insn = insn;
#if defined (DWARF2_UNWIND_INFO)
if (GET_CODE (insn) == CALL_INSN && dwarf2out_do_frame ())
if (CALL_P (insn) && dwarf2out_do_frame ())
dwarf2out_frame_debug (insn);
#endif
@ -2495,7 +2495,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
prev != last_ignored_compare;
prev = PREV_INSN (prev))
{
if (GET_CODE (prev) == NOTE)
if (NOTE_P (prev))
delete_insn (prev); /* Use delete_note. */
}
@ -2536,7 +2536,7 @@ final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
the unwind info. We've already done this for delay slots
and call instructions. */
#if defined (DWARF2_UNWIND_INFO)
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
#if !defined (HAVE_prologue)
&& !ACCUMULATE_OUTGOING_ARGS
#endif
@ -3196,8 +3196,8 @@ output_asm_label (rtx x)
if (GET_CODE (x) == LABEL_REF)
x = XEXP (x, 0);
if (GET_CODE (x) == CODE_LABEL
|| (GET_CODE (x) == NOTE
if (LABEL_P (x)
|| (NOTE_P (x)
&& NOTE_LINE_NUMBER (x) == NOTE_INSN_DELETED_LABEL))
ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
else
@ -3690,12 +3690,12 @@ leaf_function_p (void)
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) == CALL_INSN
if (CALL_P (insn)
&& ! SIBLING_CALL_P (insn))
return 0;
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE
&& GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
&& CALL_P (XVECEXP (PATTERN (insn), 0, 0))
&& ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
return 0;
}
@ -3705,12 +3705,12 @@ leaf_function_p (void)
{
insn = XEXP (link, 0);
if (GET_CODE (insn) == CALL_INSN
if (CALL_P (insn)
&& ! SIBLING_CALL_P (insn))
return 0;
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE
&& GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
&& CALL_P (XVECEXP (PATTERN (insn), 0, 0))
&& ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
return 0;
}

View File

@ -343,7 +343,7 @@ first_insn_after_basic_block_note (basic_block block)
if (insn == NULL_RTX)
return NULL_RTX;
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
insn = NEXT_INSN (insn);
if (!NOTE_INSN_BASIC_BLOCK_P (insn))
abort ();
@ -832,9 +832,9 @@ delete_dead_jumptables (void)
for (insn = get_insns (); insn; insn = next)
{
next = NEXT_INSN (insn);
if (GET_CODE (insn) == CODE_LABEL
if (LABEL_P (insn)
&& LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn)
&& GET_CODE (next) == JUMP_INSN
&& JUMP_P (next)
&& (GET_CODE (PATTERN (next)) == ADDR_VEC
|| GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
{
@ -1474,7 +1474,7 @@ propagate_block_delete_insn (rtx insn)
real good way to fix up the reference to the deleted label
when the label is deleted, so we just allow it here. */
if (inote && GET_CODE (inote) == CODE_LABEL)
if (inote && LABEL_P (inote))
{
rtx label = XEXP (inote, 0);
rtx next;
@ -1484,7 +1484,7 @@ propagate_block_delete_insn (rtx insn)
jump following it, but not the label itself. */
if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
&& (next = next_nonnote_insn (label)) != NULL
&& GET_CODE (next) == JUMP_INSN
&& JUMP_P (next)
&& (GET_CODE (PATTERN (next)) == ADDR_VEC
|| GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
{
@ -1674,7 +1674,7 @@ propagate_one_insn (struct propagate_block_info *pbi, rtx insn)
in a register clobbered by calls. Find all regs now live and
record this for them. */
if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
if (CALL_P (insn) && (flags & PROP_REG_INFO))
EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
{ REG_N_CALLS_CROSSED (i)++; });
@ -1682,7 +1682,7 @@ propagate_one_insn (struct propagate_block_info *pbi, rtx insn)
would have killed the values if they hadn't been deleted. */
mark_set_regs (pbi, PATTERN (insn), insn);
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
regset live_at_end;
bool sibcall_p;
@ -1752,7 +1752,7 @@ propagate_one_insn (struct propagate_block_info *pbi, rtx insn)
prev = PREV_INSN (insn);
#endif
if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
if (! insn_is_dead && CALL_P (insn))
{
int i;
rtx note, cond;
@ -1824,7 +1824,7 @@ init_propagate_block_info (basic_block bb, regset live, regset local_set,
/* If this block ends in a conditional branch, for each register
live from one side of the branch and not the other, record the
register as conditionally dead. */
if (GET_CODE (BB_END (bb)) == JUMP_INSN
if (JUMP_P (BB_END (bb))
&& any_condjump_p (BB_END (bb)))
{
regset_head diff_head;
@ -1932,7 +1932,7 @@ init_propagate_block_info (basic_block bb, regset live, regset local_set,
{
rtx insn, set;
for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& (set = single_set (insn))
&& MEM_P (SET_DEST (set)))
{
@ -2026,7 +2026,7 @@ propagate_block (basic_block bb, regset live, regset local_set,
/* If this is a call to `setjmp' et al, warn if any
non-volatile datum is live. */
if ((flags & PROP_REG_INFO)
&& GET_CODE (insn) == CALL_INSN
&& CALL_P (insn)
&& find_reg_note (insn, REG_SETJMP, NULL))
IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
@ -2272,7 +2272,7 @@ libcall_dead_p (struct propagate_block_info *pbi, rtx note, rtx insn)
int i;
/* Find the call insn. */
while (call != insn && GET_CODE (call) != CALL_INSN)
while (call != insn && !CALL_P (call))
call = NEXT_INSN (call);
/* If there is none, do nothing special,
@ -2745,8 +2745,8 @@ mark_set_1 (struct propagate_block_info *pbi, enum rtx_code code, rtx reg, rtx c
if (y && (BLOCK_NUM (y) == blocknum)
&& (regno_first >= FIRST_PSEUDO_REGISTER
|| (asm_noperands (PATTERN (y)) < 0
&& ! ((GET_CODE (insn) == CALL_INSN
|| GET_CODE (y) == CALL_INSN)
&& ! ((CALL_P (insn)
|| CALL_P (y))
&& global_regs[regno_first]))))
LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
}
@ -3322,7 +3322,7 @@ attempt_auto_inc (struct propagate_block_info *pbi, rtx inc, rtx insn,
use of INCR_REG. If a use of INCR_REG was just placed in
the insn before INSN, make that the next use.
Otherwise, invalidate it. */
if (GET_CODE (PREV_INSN (insn)) == INSN
if (NONJUMP_INSN_P (PREV_INSN (insn))
&& GET_CODE (PATTERN (PREV_INSN (insn))) == SET
&& SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
pbi->reg_next_use[regno] = PREV_INSN (insn);
@ -3345,7 +3345,7 @@ attempt_auto_inc (struct propagate_block_info *pbi, rtx inc, rtx insn,
/* If there are any calls between INSN and INCR, show
that REGNO now crosses them. */
for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
if (GET_CODE (temp) == CALL_INSN)
if (CALL_P (temp))
REG_N_CALLS_CROSSED (regno)++;
/* Invalidate alias info for Q since we just changed its value. */
@ -3418,7 +3418,7 @@ find_auto_inc (struct propagate_block_info *pbi, rtx x, rtx insn)
int regno;
int size = GET_MODE_SIZE (GET_MODE (x));
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
return;
/* Here we detect use of an index register which might be good for
@ -4049,7 +4049,7 @@ try_pre_increment (rtx insn, rtx reg, HOST_WIDE_INT amount)
because if the incremented register is spilled and must be reloaded
there would be no way to store the incremented value back in memory. */
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
return 0;
use = 0;

View File

@ -3641,7 +3641,7 @@ reorder_blocks_1 (rtx insns, tree current_block, varray_type *p_block_stack)
for (insn = insns; insn; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
{
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
{
@ -4201,7 +4201,7 @@ expand_function_start (tree subr)
as opposed to parm setup. */
emit_note (NOTE_INSN_FUNCTION_BEG);
if (GET_CODE (get_last_insn ()) != NOTE)
if (!NOTE_P (get_last_insn ()))
emit_note (NOTE_INSN_DELETED);
parm_birth_insn = get_last_insn ();
@ -4339,7 +4339,7 @@ expand_function_end (void)
rtx insn, seq;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
start_sequence ();
probe_stack_range (STACK_CHECK_PROTECT,
@ -4624,7 +4624,7 @@ contains (rtx insn, varray_type vec)
{
int i, j;
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
{
int count = 0;
@ -5119,14 +5119,14 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
/* Verify that there are no active instructions in the last block. */
label = BB_END (last);
while (label && GET_CODE (label) != CODE_LABEL)
while (label && !LABEL_P (label))
{
if (active_insn_p (label))
break;
label = PREV_INSN (label);
}
if (BB_HEAD (last) == label && GET_CODE (label) == CODE_LABEL)
if (BB_HEAD (last) == label && LABEL_P (label))
{
rtx epilogue_line_note = NULL_RTX;
@ -5135,7 +5135,7 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
for (seq = get_last_insn ();
seq && ! active_insn_p (seq);
seq = PREV_INSN (seq))
if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
{
epilogue_line_note = seq;
break;
@ -5151,7 +5151,7 @@ thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
continue;
jump = BB_END (bb);
if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
continue;
/* If we have an unconditional jump, we can replace that
@ -5266,7 +5266,7 @@ epilogue_done:
rtx i;
rtx newinsn;
if (GET_CODE (insn) != CALL_INSN
if (!CALL_P (insn)
|| ! SIBLING_CALL_P (insn))
continue;
@ -5308,7 +5308,7 @@ epilogue_done:
for (insn = prologue_end; insn; insn = prev)
{
prev = PREV_INSN (insn);
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
{
/* Note that we cannot reorder the first insn in the
chain, since rest_of_compilation relies on that
@ -5323,7 +5323,7 @@ epilogue_done:
for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
insn != prologue_end && insn;
insn = PREV_INSN (insn))
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
break;
/* If we didn't find one, make a copy of the first line number
@ -5333,7 +5333,7 @@ epilogue_done:
for (insn = next_active_insn (prologue_end);
insn;
insn = PREV_INSN (insn))
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
{
emit_note_copy_after (insn, prologue_end);
break;
@ -5354,7 +5354,7 @@ epilogue_done:
for (insn = epilogue_end; insn; insn = next)
{
next = NEXT_INSN (insn);
if (GET_CODE (insn) == NOTE
if (NOTE_P (insn)
&& (NOTE_LINE_NUMBER (insn) > 0
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
@ -5383,7 +5383,7 @@ reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
reorg has run. */
for (insn = f; insn; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
{
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
note = insn;
@ -5403,13 +5403,13 @@ reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
if (note == 0)
{
for (note = last; (note = NEXT_INSN (note));)
if (GET_CODE (note) == NOTE
if (NOTE_P (note)
&& NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
break;
}
/* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
if (GET_CODE (last) == CODE_LABEL)
if (LABEL_P (last))
last = NEXT_INSN (last);
reorder_insns (note, note, last);
}
@ -5424,7 +5424,7 @@ reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
reorg has run. */
for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
{
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
{
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
note = insn;
@ -5444,7 +5444,7 @@ reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
if (note == 0)
{
for (note = insn; (note = PREV_INSN (note));)
if (GET_CODE (note) == NOTE
if (NOTE_P (note)
&& NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
break;
}

View File

@ -3293,7 +3293,7 @@ constprop_register (rtx insn, rtx from, rtx to, int alter_jumps)
}
/* Handle normal insns next. */
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& try_replace_reg (from, to, insn))
return 1;
@ -4127,7 +4127,7 @@ bypass_conditional_jumps (void)
for (insn = BB_HEAD (bb);
insn != NULL && insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
if (GET_CODE (insn) == INSN)
if (NONJUMP_INSN_P (insn))
{
if (setcc)
break;
@ -4427,7 +4427,7 @@ insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
instructions in presence of non-call exceptions. */
if (JUMP_P (insn)
|| (GET_CODE (insn) == INSN
|| (NONJUMP_INSN_P (insn)
&& (bb->succ->succ_next || (bb->succ->flags & EDGE_ABNORMAL))))
{
#ifdef HAVE_cc0
@ -4436,7 +4436,7 @@ insert_insn_end_bb (struct expr *expr, basic_block bb, int pre)
/* It should always be the case that we can put these instructions
anywhere in the basic block with performing PRE optimizations.
Check this. */
if (GET_CODE (insn) == INSN && pre
if (NONJUMP_INSN_P (insn) && pre
&& !TEST_BIT (antloc[bb->index], expr->bitmap_index)
&& !TEST_BIT (transp[bb->index], expr->bitmap_index))
abort ();
@ -7375,7 +7375,7 @@ gcse_after_reload (void)
insn = NEXT_INSN (insn))
{
/* Is it a load - of the form (set (reg) (mem))? */
if (GET_CODE (insn) == INSN
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
&& REG_P (SET_DEST (PATTERN (insn)))
&& MEM_P (SET_SRC (PATTERN (insn))))

View File

@ -536,7 +536,7 @@ gen_expand (rtx expand)
|| (GET_CODE (next) == PARALLEL
&& GET_CODE (XVECEXP (next, 0, 0)) == CALL))
printf (" emit_call_insn (");
else if (GET_CODE (next) == CODE_LABEL)
else if (LABEL_P (next))
printf (" emit_label (");
else if (GET_CODE (next) == MATCH_OPERAND
|| GET_CODE (next) == MATCH_DUP
@ -646,7 +646,7 @@ gen_split (rtx split)
|| (GET_CODE (next) == PARALLEL
&& GET_CODE (XVECEXP (next, 0, 0)) == CALL))
printf (" emit_call_insn (");
else if (GET_CODE (next) == CODE_LABEL)
else if (LABEL_P (next))
printf (" emit_label (");
else if (GET_CODE (next) == MATCH_OPERAND
|| GET_CODE (next) == MATCH_OPERATOR

View File

@ -75,13 +75,13 @@ gen_peephole (rtx peep)
printf (" do { insn = NEXT_INSN (insn);\n");
printf (" if (insn == 0) goto L%d; }\n",
insn_code_number);
printf (" while (GET_CODE (insn) == NOTE\n");
printf ("\t || (GET_CODE (insn) == INSN\n");
printf (" while (NOTE_P (insn)\n");
printf ("\t || (NONJUMP_INSN_P (insn)\n");
printf ("\t && (GET_CODE (PATTERN (insn)) == USE\n");
printf ("\t\t || GET_CODE (PATTERN (insn)) == CLOBBER)));\n");
printf (" if (GET_CODE (insn) == CODE_LABEL\n\
|| GET_CODE (insn) == BARRIER)\n goto L%d;\n",
printf (" if (LABEL_P (insn)\n\
|| BARRIER_P (insn))\n goto L%d;\n",
insn_code_number);
}
@ -391,7 +391,7 @@ from the machine description file `md'. */\n\n");
/* Early out: no peepholes for insns followed by barriers. */
printf (" if (NEXT_INSN (ins1)\n");
printf (" && GET_CODE (NEXT_INSN (ins1)) == BARRIER)\n");
printf (" && BARRIER_P (NEXT_INSN (ins1)))\n");
printf (" return 0;\n\n");
/* Read the machine description. */

View File

@ -1823,7 +1823,7 @@ build_insn_chain (rtx first)
});
}
if (GET_CODE (first) != NOTE && GET_CODE (first) != BARRIER)
if (!NOTE_P (first) && !BARRIER_P (first))
{
c = new_insn_chain ();
c->prev = prev;
@ -1886,7 +1886,7 @@ build_insn_chain (rtx first)
&& ! ((GET_CODE (PATTERN (first)) == ADDR_VEC
|| GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
&& prev_real_insn (first) != 0
&& GET_CODE (prev_real_insn (first)) == JUMP_INSN))
&& JUMP_P (prev_real_insn (first))))
abort ();
break;
}

View File

@ -128,11 +128,11 @@ edge: { sourcename: \"%s.0\" targetname: \"%s.%d\" }\n",
fprintf (fp, "node: {\n title: \"%s.%d\"\n color: %s\n \
label: \"%s %d\n",
current_function_name (), XINT (tmp_rtx, 0),
GET_CODE (tmp_rtx) == NOTE ? "lightgrey"
: GET_CODE (tmp_rtx) == INSN ? "green"
: GET_CODE (tmp_rtx) == JUMP_INSN ? "darkgreen"
: GET_CODE (tmp_rtx) == CALL_INSN ? "darkgreen"
: GET_CODE (tmp_rtx) == CODE_LABEL ? "\
NOTE_P (tmp_rtx) ? "lightgrey"
: NONJUMP_INSN_P (tmp_rtx) ? "green"
: JUMP_P (tmp_rtx) ? "darkgreen"
: CALL_P (tmp_rtx) ? "darkgreen"
: LABEL_P (tmp_rtx) ? "\
darkgrey\n shape: ellipse" : "white",
GET_RTX_NAME (GET_CODE (tmp_rtx)), XINT (tmp_rtx, 0));
break;
@ -141,7 +141,7 @@ darkgrey\n shape: ellipse" : "white",
}
/* Print the RTL. */
if (GET_CODE (tmp_rtx) == NOTE)
if (NOTE_P (tmp_rtx))
{
const char *name = "";
if (NOTE_LINE_NUMBER (tmp_rtx) < 0)
@ -287,9 +287,9 @@ print_rtl_graph_with_bb (const char *base, const char *suffix, rtx rtx_first)
if (start[INSN_UID (tmp_rtx)] < 0 && end[INSN_UID (tmp_rtx)] < 0)
{
if (GET_CODE (tmp_rtx) == BARRIER)
if (BARRIER_P (tmp_rtx))
continue;
if (GET_CODE (tmp_rtx) == NOTE
if (NOTE_P (tmp_rtx)
&& (1 || in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB))
continue;
}
@ -348,7 +348,7 @@ print_rtl_graph_with_bb (const char *base, const char *suffix, rtx rtx_first)
{
/* Don't print edges to barriers. */
if (next_insn == 0
|| GET_CODE (next_insn) != BARRIER)
|| !BARRIER_P (next_insn))
draw_edge (fp, XINT (tmp_rtx, 0),
next_insn ? INSN_UID (next_insn) : 999999, 0, 0);
else
@ -359,8 +359,8 @@ print_rtl_graph_with_bb (const char *base, const char *suffix, rtx rtx_first)
do
next_insn = NEXT_INSN (next_insn);
while (next_insn
&& (GET_CODE (next_insn) == NOTE
|| GET_CODE (next_insn) == BARRIER));
&& (NOTE_P (next_insn)
|| BARRIER_P (next_insn)));
draw_edge (fp, XINT (tmp_rtx, 0),
next_insn ? INSN_UID (next_insn) : 999999, 0, 3);

View File

@ -1363,7 +1363,7 @@ unlink_other_notes (rtx insn, rtx tail)
{
rtx prev = PREV_INSN (insn);
while (insn != tail && GET_CODE (insn) == NOTE)
while (insn != tail && NOTE_P (insn))
{
rtx next = NEXT_INSN (insn);
/* Delete the note from its current position. */
@ -1399,7 +1399,7 @@ unlink_line_notes (rtx insn, rtx tail)
{
rtx prev = PREV_INSN (insn);
while (insn != tail && GET_CODE (insn) == NOTE)
while (insn != tail && NOTE_P (insn))
{
rtx next = NEXT_INSN (insn);
@ -1435,11 +1435,11 @@ get_block_head_tail (int b, rtx *headp, rtx *tailp)
basic block, or notes at the ends of basic blocks. */
while (head != tail)
{
if (GET_CODE (head) == NOTE)
if (NOTE_P (head))
head = NEXT_INSN (head);
else if (GET_CODE (tail) == NOTE)
else if (NOTE_P (tail))
tail = PREV_INSN (tail);
else if (GET_CODE (head) == CODE_LABEL)
else if (LABEL_P (head))
head = NEXT_INSN (head);
else
break;
@ -1456,7 +1456,7 @@ no_real_insns_p (rtx head, rtx tail)
{
while (head != NEXT_INSN (tail))
{
if (GET_CODE (head) != NOTE && GET_CODE (head) != CODE_LABEL)
if (!NOTE_P (head) && !LABEL_P (head))
return 0;
head = NEXT_INSN (head);
}
@ -1481,7 +1481,7 @@ rm_line_notes (rtx head, rtx tail)
/* Farm out notes, and maybe save them in NOTE_LIST.
This is needed to keep the debugger from
getting completely deranged. */
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
{
prev = insn;
insn = unlink_line_notes (insn, next_tail);
@ -1515,7 +1515,7 @@ save_line_notes (int b, rtx head, rtx tail)
next_tail = NEXT_INSN (tail);
for (insn = head; insn != next_tail; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
line = insn;
else
LINE_NOTE (insn) = line;
@ -1542,19 +1542,19 @@ restore_line_notes (rtx head, rtx tail)
of this block. If it happens to be the same, then we don't want to
emit another line number note here. */
for (line = head; line; line = PREV_INSN (line))
if (GET_CODE (line) == NOTE && NOTE_LINE_NUMBER (line) > 0)
if (NOTE_P (line) && NOTE_LINE_NUMBER (line) > 0)
break;
/* Walk the insns keeping track of the current line-number and inserting
the line-number notes as needed. */
for (insn = head; insn != next_tail; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
line = insn;
/* This used to emit line number notes before every non-deleted note.
However, this confuses a debugger, because line notes not separated
by real instructions all end up at the same address. I can find no
use for line number notes before other notes, so none are emitted. */
else if (GET_CODE (insn) != NOTE
else if (!NOTE_P (insn)
&& INSN_UID (insn) < old_max_uid
&& (note = LINE_NOTE (insn)) != 0
&& note != line
@ -1606,7 +1606,7 @@ rm_redundant_line_notes (void)
are already present. The remainder tend to occur at basic
block boundaries. */
for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
{
/* If there are no active insns following, INSN is redundant. */
if (active_insn == 0)
@ -1632,9 +1632,9 @@ rm_redundant_line_notes (void)
line = insn;
active_insn = 0;
}
else if (!((GET_CODE (insn) == NOTE
else if (!((NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
|| (GET_CODE (insn) == INSN
|| (NONJUMP_INSN_P (insn)
&& (GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER))))
active_insn++;
@ -1664,7 +1664,7 @@ rm_other_notes (rtx head, rtx tail)
/* Farm out notes, and maybe save them in NOTE_LIST.
This is needed to keep the debugger from
getting completely deranged. */
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
{
prev = insn;
@ -1849,7 +1849,7 @@ ok_for_early_queue_removal (rtx insn)
rtx dep_link = 0;
int dep_cost;
if (GET_CODE (prev_insn) != NOTE)
if (!NOTE_P (prev_insn))
{
dep_link = find_insn_list (insn, INSN_DEPEND (prev_insn));
if (dep_link)
@ -2705,7 +2705,7 @@ set_priorities (rtx head, rtx tail)
sched_max_insns_priority = 0;
for (insn = tail; insn != prev_head; insn = PREV_INSN (insn))
{
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
continue;
n_insn++;
@ -2799,7 +2799,7 @@ sched_init (FILE *dump_file)
schedule differently depending on whether or not there are
line-number notes, i.e., depending on whether or not we're
generating debugging information. */
if (GET_CODE (insn) != NOTE)
if (!NOTE_P (insn))
++luid;
if (insn == BB_END (b))
@ -2825,7 +2825,7 @@ sched_init (FILE *dump_file)
FOR_EACH_BB (b)
{
for (line = BB_HEAD (b); line; line = PREV_INSN (line))
if (GET_CODE (line) == NOTE && NOTE_LINE_NUMBER (line) > 0)
if (NOTE_P (line) && NOTE_LINE_NUMBER (line) > 0)
{
line_note_head[b->index] = line;
break;
@ -2836,7 +2836,7 @@ sched_init (FILE *dump_file)
{
if (INSN_P (line))
break;
if (GET_CODE (line) == NOTE && NOTE_LINE_NUMBER (line) > 0)
if (NOTE_P (line) && NOTE_LINE_NUMBER (line) > 0)
line_note_head[b->index] = line;
}
}
@ -2853,10 +2853,10 @@ sched_init (FILE *dump_file)
insn = BB_END (EXIT_BLOCK_PTR->prev_bb);
if (NEXT_INSN (insn) == 0
|| (GET_CODE (insn) != NOTE
&& GET_CODE (insn) != CODE_LABEL
|| (!NOTE_P (insn)
&& !LABEL_P (insn)
/* Don't emit a NOTE if it would end up before a BARRIER. */
&& GET_CODE (NEXT_INSN (insn)) != BARRIER))
&& !BARRIER_P (NEXT_INSN (insn))))
{
emit_note_after (NOTE_INSN_DELETED, BB_END (EXIT_BLOCK_PTR->prev_bb));
/* Make insn to appear outside BB. */

View File

@ -150,7 +150,7 @@ count_bb_insns (basic_block bb)
while (1)
{
if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == INSN)
if (CALL_P (insn) || NONJUMP_INSN_P (insn))
count++;
if (insn == BB_END (bb))
@ -189,21 +189,21 @@ first_active_insn (basic_block bb)
{
rtx insn = BB_HEAD (bb);
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
{
if (insn == BB_END (bb))
return NULL_RTX;
insn = NEXT_INSN (insn);
}
while (GET_CODE (insn) == NOTE)
while (NOTE_P (insn))
{
if (insn == BB_END (bb))
return NULL_RTX;
insn = NEXT_INSN (insn);
}
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
return NULL_RTX;
return insn;
@ -217,10 +217,10 @@ last_active_insn (basic_block bb, int skip_use_p)
rtx insn = BB_END (bb);
rtx head = BB_HEAD (bb);
while (GET_CODE (insn) == NOTE
|| GET_CODE (insn) == JUMP_INSN
while (NOTE_P (insn)
|| JUMP_P (insn)
|| (skip_use_p
&& GET_CODE (insn) == INSN
&& NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == USE))
{
if (insn == head)
@ -228,7 +228,7 @@ last_active_insn (basic_block bb, int skip_use_p)
insn = PREV_INSN (insn);
}
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
return NULL_RTX;
return insn;
@ -271,10 +271,10 @@ cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
for (insn = start; ; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
goto insn_done;
if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
if (!NONJUMP_INSN_P (insn) && !CALL_P (insn))
abort ();
/* Remove USE insns that get in the way. */
@ -326,7 +326,7 @@ cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
validate_change (insn, &PATTERN (insn), pattern, 1);
if (GET_CODE (insn) == CALL_INSN && prob_val)
if (CALL_P (insn) && prob_val)
validate_change (insn, &REG_NOTES (insn),
alloc_EXPR_LIST (REG_BR_PROB, prob_val,
REG_NOTES (insn)), 1);
@ -726,7 +726,7 @@ end_ifcvt_sequence (struct noce_if_info *if_info)
As an exercise for the reader, build a general mechanism that
allows proper placement of required clobbers. */
for (insn = seq; insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
|| recog_memoized (insn) == -1)
return NULL_RTX;
@ -1959,7 +1959,7 @@ noce_process_if_block (struct ce_if_block * ce_info)
COND_EARLIEST to JUMP. Make sure the relevant data is still
intact. */
if (! insn_b
|| GET_CODE (insn_b) != INSN
|| !NONJUMP_INSN_P (insn_b)
|| (set_b = single_set (insn_b)) == NULL_RTX
|| ! rtx_equal_p (x, SET_DEST (set_b))
|| reg_overlap_mentioned_p (x, SET_SRC (set_b))
@ -2231,7 +2231,7 @@ merge_if_block (struct ce_if_block * ce_info)
{
if (find_reg_note (last, REG_NORETURN, NULL))
;
else if (GET_CODE (last) == INSN
else if (NONJUMP_INSN_P (last)
&& GET_CODE (PATTERN (last)) == TRAP_IF
&& TRAP_CONDITION (PATTERN (last)) == const_true_rtx)
;
@ -2241,10 +2241,10 @@ merge_if_block (struct ce_if_block * ce_info)
/* There should still be something at the end of the THEN or ELSE
blocks taking us to our final destination. */
else if (GET_CODE (last) == JUMP_INSN)
else if (JUMP_P (last))
;
else if (combo_bb->succ->dest == EXIT_BLOCK_PTR
&& GET_CODE (last) == CALL_INSN
&& CALL_P (last)
&& SIBLING_CALL_P (last))
;
else if ((combo_bb->succ->flags & EDGE_EH)
@ -2417,11 +2417,11 @@ block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
while (insn != NULL_RTX)
{
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
return -1;
if (INSN_P (insn)
&& GET_CODE (insn) != JUMP_INSN
&& !JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER)
n_insns++;
@ -2560,12 +2560,12 @@ find_if_block (struct ce_if_block * ce_info)
rtx last_insn = BB_END (then_bb);
while (last_insn
&& GET_CODE (last_insn) == NOTE
&& NOTE_P (last_insn)
&& last_insn != BB_HEAD (then_bb))
last_insn = PREV_INSN (last_insn);
if (last_insn
&& GET_CODE (last_insn) == JUMP_INSN
&& JUMP_P (last_insn)
&& ! simplejump_p (last_insn))
return FALSE;
@ -3050,9 +3050,9 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
head = BB_HEAD (merge_bb);
end = BB_END (merge_bb);
if (GET_CODE (head) == CODE_LABEL)
if (LABEL_P (head))
head = NEXT_INSN (head);
if (GET_CODE (head) == NOTE)
if (NOTE_P (head))
{
if (head == end)
{
@ -3062,7 +3062,7 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
head = NEXT_INSN (head);
}
if (GET_CODE (end) == JUMP_INSN)
if (JUMP_P (end))
{
if (head == end)
{
@ -3126,7 +3126,7 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
/* Check for no calls or trapping operations. */
for (insn = head; ; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
return FALSE;
if (INSN_P (insn))
{

View File

@ -636,7 +636,7 @@ try_constants (rtx insn, struct inline_remap *map)
/* Enforce consistency between the addresses in the regular insn flow
and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
{
subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
apply_change_group ();

View File

@ -90,7 +90,7 @@ rebuild_jump_labels (rtx f)
count doesn't drop to zero. */
for (insn = forced_labels; insn; insn = XEXP (insn, 1))
if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
if (LABEL_P (XEXP (insn, 0)))
LABEL_NUSES (XEXP (insn, 0))++;
timevar_pop (TV_REBUILD_JUMP);
}
@ -110,10 +110,10 @@ cleanup_barriers (void)
for (insn = get_insns (); insn; insn = next)
{
next = NEXT_INSN (insn);
if (GET_CODE (insn) == BARRIER)
if (BARRIER_P (insn))
{
prev = prev_nonnote_insn (insn);
if (GET_CODE (prev) == BARRIER)
if (BARRIER_P (prev))
delete_barrier (insn);
else if (prev != PREV_INSN (insn))
reorder_insns (insn, insn, prev);
@ -132,7 +132,7 @@ purge_line_number_notes (rtx f)
even if it became empty. */
for (insn = f; insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
{
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
/* Any previous line note was for the prologue; gdb wants a new
@ -168,11 +168,11 @@ init_label_info (rtx f)
rtx insn;
for (insn = f; insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
else if (GET_CODE (insn) == JUMP_INSN)
else if (JUMP_P (insn))
JUMP_LABEL (insn) = 0;
else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx note, next;
@ -198,7 +198,7 @@ mark_all_labels (rtx f)
if (INSN_P (insn))
{
mark_jump_label (PATTERN (insn), insn, 0);
if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
if (! INSN_DELETED_P (insn) && JUMP_P (insn))
{
/* When we know the LABEL_REF contained in a REG used in
an indirect jump, we'll have a REG_LABEL note so that
@ -242,7 +242,7 @@ squeeze_notes (rtx* startp, rtx* endp)
for (insn = start; insn != past_end; insn = next)
{
next = NEXT_INSN (insn);
if (GET_CODE (insn) == NOTE
if (NOTE_P (insn)
&& (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
@ -289,7 +289,7 @@ get_label_before (rtx insn)
or make a new one if there is none. */
label = prev_nonnote_insn (insn);
if (label == 0 || GET_CODE (label) != CODE_LABEL)
if (label == 0 || !LABEL_P (label))
{
rtx prev = PREV_INSN (insn);
@ -311,7 +311,7 @@ get_label_after (rtx insn)
or make a new one if there is none. */
label = next_nonnote_insn (insn);
if (label == 0 || GET_CODE (label) != CODE_LABEL)
if (label == 0 || !LABEL_P (label))
{
label = gen_label_rtx ();
emit_label_after (label, insn);
@ -393,7 +393,7 @@ reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn
return UNKNOWN;
for (prev = prev_nonnote_insn (insn);
prev != 0 && GET_CODE (prev) != CODE_LABEL;
prev != 0 && !LABEL_P (prev);
prev = prev_nonnote_insn (prev))
{
rtx set = set_of (arg0, prev);
@ -736,7 +736,7 @@ comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
int
simplejump_p (rtx insn)
{
return (GET_CODE (insn) == JUMP_INSN
return (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
&& GET_CODE (SET_DEST (PATTERN (insn))) == PC
&& GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
@ -813,7 +813,7 @@ rtx
pc_set (rtx insn)
{
rtx pat;
if (GET_CODE (insn) != JUMP_INSN)
if (!JUMP_P (insn))
return NULL_RTX;
pat = PATTERN (insn);
@ -903,7 +903,7 @@ returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
int
returnjump_p (rtx insn)
{
if (GET_CODE (insn) != JUMP_INSN)
if (!JUMP_P (insn))
return 0;
return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
}
@ -916,7 +916,7 @@ onlyjump_p (rtx insn)
{
rtx set;
if (GET_CODE (insn) != JUMP_INSN)
if (!JUMP_P (insn))
return 0;
set = single_set (insn);
@ -1003,12 +1003,12 @@ follow_jumps (rtx label)
for (depth = 0;
(depth < 10
&& (insn = next_active_insn (value)) != 0
&& GET_CODE (insn) == JUMP_INSN
&& JUMP_P (insn)
&& ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
&& onlyjump_p (insn))
|| GET_CODE (PATTERN (insn)) == RETURN)
&& (next = NEXT_INSN (insn))
&& GET_CODE (next) == BARRIER);
&& BARRIER_P (next));
depth++)
{
/* Don't chain through the insn that jumps into a loop
@ -1018,7 +1018,7 @@ follow_jumps (rtx label)
rtx tem;
if (!reload_completed)
for (tem = value; tem != insn; tem = NEXT_INSN (tem))
if (GET_CODE (tem) == NOTE
if (NOTE_P (tem)
&& (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
/* ??? Optional. Disables some optimizations, but makes
gcov output more accurate with -O. */
@ -1092,11 +1092,11 @@ mark_jump_label (rtx x, rtx insn, int in_mem)
/* Ignore remaining references to unreachable labels that
have been deleted. */
if (GET_CODE (label) == NOTE
if (NOTE_P (label)
&& NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
break;
if (GET_CODE (label) != CODE_LABEL)
if (!LABEL_P (label))
abort ();
/* Ignore references to labels of containing functions. */
@ -1109,7 +1109,7 @@ mark_jump_label (rtx x, rtx insn, int in_mem)
if (insn)
{
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
JUMP_LABEL (insn) = label;
else
{
@ -1174,7 +1174,7 @@ delete_jump (rtx insn)
void
delete_barrier (rtx insn)
{
if (GET_CODE (insn) != BARRIER)
if (!BARRIER_P (insn))
abort ();
delete_insn (insn);
@ -1191,15 +1191,15 @@ delete_prior_computation (rtx note, rtx insn)
rtx reg = XEXP (note, 0);
for (our_prev = prev_nonnote_insn (insn);
our_prev && (GET_CODE (our_prev) == INSN
|| GET_CODE (our_prev) == CALL_INSN);
our_prev && (NONJUMP_INSN_P (our_prev)
|| CALL_P (our_prev));
our_prev = prev_nonnote_insn (our_prev))
{
rtx pat = PATTERN (our_prev);
/* If we reach a CALL which is not calling a const function
or the callee pops the arguments, then give up. */
if (GET_CODE (our_prev) == CALL_INSN
if (CALL_P (our_prev)
&& (! CONST_OR_PURE_CALL_P (our_prev)
|| GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
break;
@ -1212,14 +1212,14 @@ delete_prior_computation (rtx note, rtx insn)
break;
if (GET_CODE (pat) == USE
&& GET_CODE (XEXP (pat, 0)) == INSN)
&& NONJUMP_INSN_P (XEXP (pat, 0)))
/* reorg creates USEs that look like this. We leave them
alone because reorg needs them for its own purposes. */
break;
if (reg_set_p (reg, pat))
{
if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
if (side_effects_p (pat) && !CALL_P (our_prev))
break;
if (GET_CODE (pat) == PARALLEL)
@ -1325,7 +1325,7 @@ delete_computation (rtx insn)
will use them. So if the previous insn
exists to set the CC's, delete it
(unless it performs auto-increments, etc.). */
if (prev && GET_CODE (prev) == INSN
if (prev && NONJUMP_INSN_P (prev)
&& sets_cc0_p (PATTERN (prev)))
{
if (sets_cc0_p (PATTERN (prev)) > 0
@ -1365,7 +1365,7 @@ delete_computation (rtx insn)
rtx
delete_related_insns (rtx insn)
{
int was_code_label = (GET_CODE (insn) == CODE_LABEL);
int was_code_label = (LABEL_P (insn));
rtx note;
rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
@ -1381,13 +1381,13 @@ delete_related_insns (rtx insn)
/* If instruction is followed by a barrier,
delete the barrier too. */
if (next != 0 && GET_CODE (next) == BARRIER)
if (next != 0 && BARRIER_P (next))
delete_insn (next);
/* If deleting a jump, decrement the count of the label,
and delete the label if it is now unused. */
if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
if (JUMP_P (insn) && JUMP_LABEL (insn))
{
rtx lab = JUMP_LABEL (insn), lab_next;
@ -1418,7 +1418,7 @@ delete_related_insns (rtx insn)
/* Likewise if we're deleting a dispatch table. */
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
{
@ -1435,15 +1435,15 @@ delete_related_insns (rtx insn)
}
/* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
if (NONJUMP_INSN_P (insn) || CALL_P (insn))
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
if (REG_NOTE_KIND (note) == REG_LABEL
/* This could also be a NOTE_INSN_DELETED_LABEL note. */
&& GET_CODE (XEXP (note, 0)) == CODE_LABEL)
&& LABEL_P (XEXP (note, 0)))
if (LABEL_NUSES (XEXP (note, 0)) == 0)
delete_related_insns (XEXP (note, 0));
while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
prev = PREV_INSN (prev);
/* If INSN was a label and a dispatch table follows it,
@ -1452,14 +1452,14 @@ delete_related_insns (rtx insn)
if (was_code_label
&& NEXT_INSN (insn) != 0
&& GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
&& JUMP_P (NEXT_INSN (insn))
&& (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
|| GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
next = delete_related_insns (NEXT_INSN (insn));
/* If INSN was a label, delete insns following it if now unreachable. */
if (was_code_label && prev && GET_CODE (prev) == BARRIER)
if (was_code_label && prev && BARRIER_P (prev))
{
enum rtx_code code;
while (next)
@ -1500,7 +1500,7 @@ delete_for_peephole (rtx from, rtx to)
rtx next = NEXT_INSN (insn);
rtx prev = PREV_INSN (insn);
if (GET_CODE (insn) != NOTE)
if (!NOTE_P (insn))
{
INSN_DELETED_P (insn) = 1;
@ -1672,7 +1672,7 @@ redirect_jump (rtx jump, rtx nlabel, int delete_unused)
function, move the function end note so that -Wreturn-type works. */
if (olabel && nlabel
&& NEXT_INSN (olabel)
&& GET_CODE (NEXT_INSN (olabel)) == NOTE
&& NOTE_P (NEXT_INSN (olabel))
&& NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);

View File

@ -1201,7 +1201,7 @@ optimize_mode_switching (FILE *file)
if (eg->flags & EDGE_ABNORMAL)
{
emited = true;
if (GET_CODE (BB_END (src_bb)) == JUMP_INSN)
if (JUMP_P (BB_END (src_bb)))
emit_insn_before (mode_set, BB_END (src_bb));
/* It doesn't make sense to switch to normal mode
after a CALL_INSN, so we're going to abort if we
@ -1214,7 +1214,7 @@ optimize_mode_switching (FILE *file)
the call (it wouldn't make sense, anyway). In
the case of EH edges, EH entry points also start
in normal mode, so a similar reasoning applies. */
else if (GET_CODE (BB_END (src_bb)) == INSN)
else if (NONJUMP_INSN_P (BB_END (src_bb)))
emit_insn_after (mode_set, BB_END (src_bb));
else
abort ();
@ -1266,7 +1266,7 @@ optimize_mode_switching (FILE *file)
continue;
emited = true;
if (GET_CODE (ptr->insn_ptr) == NOTE
if (NOTE_P (ptr->insn_ptr)
&& (NOTE_LINE_NUMBER (ptr->insn_ptr)
== NOTE_INSN_BASIC_BLOCK))
emit_insn_after (mode_set, ptr->insn_ptr);

View File

@ -484,7 +484,7 @@ validate_equiv_mem (rtx start, rtx reg, rtx memref)
if (find_reg_note (insn, REG_DEAD, reg))
return 1;
if (GET_CODE (insn) == CALL_INSN && ! RTX_UNCHANGING_P (memref)
if (CALL_P (insn) && ! RTX_UNCHANGING_P (memref)
&& ! CONST_OR_PURE_CALL_P (insn))
return 0;
@ -985,7 +985,7 @@ update_equiv_regs (void)
if (REG_N_REFS (regno) == 2
&& (rtx_equal_p (XEXP (note, 0), src)
|| ! equiv_init_varies_p (src))
&& GET_CODE (insn) == INSN
&& NONJUMP_INSN_P (insn)
&& equiv_init_movable_p (PATTERN (insn), regno))
reg_equiv[regno].replace = 1;
}
@ -1190,7 +1190,7 @@ block_alloc (int b)
insn = BB_END (BASIC_BLOCK (b));
while (1)
{
if (GET_CODE (insn) != NOTE)
if (!NOTE_P (insn))
if (++insn_count > max_uid)
abort ();
if (insn == BB_HEAD (BASIC_BLOCK (b)))
@ -1213,7 +1213,7 @@ block_alloc (int b)
insn = BB_HEAD (BASIC_BLOCK (b));
while (1)
{
if (GET_CODE (insn) != NOTE)
if (!NOTE_P (insn))
insn_number++;
if (INSN_P (insn))
@ -1353,7 +1353,7 @@ block_alloc (int b)
REG_P (r0))
&& (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0
&& XEXP (link, 0) != 0
&& GET_CODE (XEXP (link, 0)) == INSN
&& NONJUMP_INSN_P (XEXP (link, 0))
&& (set = single_set (XEXP (link, 0))) != 0
&& SET_DEST (set) == r0 && SET_SRC (set) == r0
&& (note = find_reg_note (XEXP (link, 0), REG_EQUAL,
@ -1428,7 +1428,7 @@ block_alloc (int b)
CLOBBER insn, we have reached the end of a REG_NO_CONFLICT
block, so clear any register number that combined within it. */
if ((note = find_reg_note (insn, REG_RETVAL, NULL_RTX)) != 0
&& GET_CODE (XEXP (note, 0)) == INSN
&& NONJUMP_INSN_P (XEXP (note, 0))
&& GET_CODE (PATTERN (XEXP (note, 0))) == CLOBBER)
no_conflict_combined_regno = -1;
}

View File

@ -189,7 +189,7 @@ doloop_valid_p (struct loop *loop, struct niter_desc *desc)
{
/* A called function may clobber any special registers required for
low-overhead looping. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
if (dump_file)
fprintf (dump_file, "Doloop: Function call in loop.\n");
@ -199,7 +199,7 @@ doloop_valid_p (struct loop *loop, struct niter_desc *desc)
/* Some targets (eg, PPC) use the count register for branch on table
instructions. ??? This should be a target specific check. */
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_VEC))
{
@ -514,7 +514,7 @@ doloop_optimize (struct loop *loop)
{
while (NEXT_INSN (doloop_pat) != NULL_RTX)
doloop_pat = NEXT_INSN (doloop_pat);
if (GET_CODE (doloop_pat) == JUMP_INSN)
if (JUMP_P (doloop_pat))
doloop_pat = PATTERN (doloop_pat);
else
doloop_pat = NULL_RTX;

View File

@ -230,7 +230,7 @@ find_exits (struct loop *loop, basic_block *body,
{
FOR_BB_INSNS (body[i], insn)
{
if (GET_CODE (insn) == CALL_INSN
if (CALL_P (insn)
&& !CONST_OR_PURE_CALL_P (insn))
{
has_call = true;
@ -525,7 +525,7 @@ find_invariants_bb (basic_block bb, bool always_reached, bool always_executed,
find_invariants_insn (insn, always_reached, always_executed, df);
if (always_reached
&& GET_CODE (insn) == CALL_INSN
&& CALL_P (insn)
&& !CONST_OR_PURE_CALL_P (insn))
always_reached = false;
}

View File

@ -1371,7 +1371,7 @@ simplify_using_assignment (rtx insn, rtx *expr, regset altered)
ret = true;
note_stores (PATTERN (insn), mark_altered, altered);
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
int i;

View File

@ -419,7 +419,7 @@ compute_luids (rtx start, rtx end, int prev_luid)
continue;
/* Don't assign luids to line-number NOTEs, so that the distance in
luids between two insns is not affected by -g. */
if (GET_CODE (insn) != NOTE
if (!NOTE_P (insn)
|| NOTE_LINE_NUMBER (insn) <= 0)
uid_luid[INSN_UID (insn)] = ++i;
else
@ -457,7 +457,7 @@ loop_optimize (rtx f, FILE *dumpfile, int flags)
max_loop_num = 0;
for (insn = f; insn; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) == NOTE
if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
max_loop_num++;
}
@ -671,8 +671,8 @@ scan_loop (struct loop *loop, int flags)
for (p = NEXT_INSN (loop_start);
p != loop_end
&& GET_CODE (p) != CODE_LABEL && ! INSN_P (p)
&& (GET_CODE (p) != NOTE
&& !LABEL_P (p) && ! INSN_P (p)
&& (!NOTE_P (p)
|| (NOTE_LINE_NUMBER (p) != NOTE_INSN_LOOP_BEG
&& NOTE_LINE_NUMBER (p) != NOTE_INSN_LOOP_END));
p = NEXT_INSN (p))
@ -698,7 +698,7 @@ scan_loop (struct loop *loop, int flags)
Start scan from there.
But record in LOOP->TOP the place where the end-test jumps
back to so we can scan that after the end of the loop. */
if (GET_CODE (p) == JUMP_INSN
if (JUMP_P (p)
/* Loop entry must be unconditional jump (and not a RETURN) */
&& any_uncondjump_p (p)
&& JUMP_LABEL (p) != 0
@ -723,7 +723,7 @@ scan_loop (struct loop *loop, int flags)
test above. */
if (INSN_UID (loop->scan_start) >= max_uid_for_loop
|| GET_CODE (loop->scan_start) != CODE_LABEL)
|| !LABEL_P (loop->scan_start))
{
if (loop_dump_stream)
fprintf (loop_dump_stream, "\nLoop from %d to %d is phony.\n\n",
@ -765,7 +765,7 @@ scan_loop (struct loop *loop, int flags)
{
if (in_libcall && INSN_P (p) && find_reg_note (p, REG_RETVAL, NULL_RTX))
in_libcall--;
if (GET_CODE (p) == INSN)
if (NONJUMP_INSN_P (p))
{
temp = find_reg_note (p, REG_LIBCALL, NULL_RTX);
if (temp)
@ -1032,7 +1032,7 @@ scan_loop (struct loop *loop, int flags)
Also, if the value loaded into the register
depends on the same register, this cannot be done. */
else if (SET_SRC (set) == const0_rtx
&& GET_CODE (NEXT_INSN (p)) == INSN
&& NONJUMP_INSN_P (NEXT_INSN (p))
&& (set1 = single_set (NEXT_INSN (p)))
&& GET_CODE (set1) == SET
&& (GET_CODE (SET_DEST (set1)) == STRICT_LOW_PART)
@ -1102,7 +1102,7 @@ scan_loop (struct loop *loop, int flags)
/* Past a call insn, we get to insns which might not be executed
because the call might exit. This matters for insns that trap.
Constant and pure call insns always return, so they don't count. */
else if (GET_CODE (p) == CALL_INSN && ! CONST_OR_PURE_CALL_P (p))
else if (CALL_P (p) && ! CONST_OR_PURE_CALL_P (p))
call_passed = 1;
/* Past a label or a jump, we get to insns for which we
can't count on whether or how many times they will be
@ -1110,17 +1110,17 @@ scan_loop (struct loop *loop, int flags)
only move out sets of trivial variables
(those not used after the loop). */
/* Similar code appears twice in strength_reduce. */
else if ((GET_CODE (p) == CODE_LABEL || GET_CODE (p) == JUMP_INSN)
else if ((LABEL_P (p) || JUMP_P (p))
/* If we enter the loop in the middle, and scan around to the
beginning, don't set maybe_never for that. This must be an
unconditional jump, otherwise the code at the top of the
loop might never be executed. Unconditional jumps are
followed by a barrier then the loop_end. */
&& ! (GET_CODE (p) == JUMP_INSN && JUMP_LABEL (p) == loop->top
&& ! (JUMP_P (p) && JUMP_LABEL (p) == loop->top
&& NEXT_INSN (NEXT_INSN (p)) == loop_end
&& any_uncondjump_p (p)))
maybe_never = 1;
else if (GET_CODE (p) == NOTE)
else if (NOTE_P (p))
{
/* At the virtual top of a converted loop, insns are again known to
be executed: logically, the loop begins here even though the exit
@ -1171,7 +1171,7 @@ scan_loop (struct loop *loop, int flags)
loop_regs_scan (loop, 0);
for (update_start = loop_start;
PREV_INSN (update_start)
&& GET_CODE (PREV_INSN (update_start)) != CODE_LABEL;
&& !LABEL_P (PREV_INSN (update_start));
update_start = PREV_INSN (update_start))
;
update_end = NEXT_INSN (loop_end);
@ -1198,7 +1198,7 @@ scan_loop (struct loop *loop, int flags)
for (update_start = loop_start;
PREV_INSN (update_start)
&& GET_CODE (PREV_INSN (update_start)) != CODE_LABEL;
&& !LABEL_P (PREV_INSN (update_start));
update_start = PREV_INSN (update_start))
;
update_end = NEXT_INSN (loop_end);
@ -1208,7 +1208,7 @@ scan_loop (struct loop *loop, int flags)
if (flag_strength_reduce)
{
if (update_end && GET_CODE (update_end) == CODE_LABEL)
if (update_end && LABEL_P (update_end))
/* Ensure our label doesn't go away. */
LABEL_NUSES (update_end)++;
@ -1217,7 +1217,7 @@ scan_loop (struct loop *loop, int flags)
reg_scan_update (update_start, update_end, loop_max_reg);
loop_max_reg = max_reg_num ();
if (update_end && GET_CODE (update_end) == CODE_LABEL
if (update_end && LABEL_P (update_end)
&& --LABEL_NUSES (update_end) == 0)
delete_related_insns (update_end);
}
@ -1300,8 +1300,7 @@ libcall_other_reg (rtx insn, rtx equiv)
while (p != insn)
{
if (GET_CODE (p) == INSN || GET_CODE (p) == JUMP_INSN
|| GET_CODE (p) == CALL_INSN)
if (INSN_P (p))
record_excess_regs (PATTERN (p), equiv, &output);
p = NEXT_INSN (p);
}
@ -1374,10 +1373,10 @@ libcall_benefit (rtx last)
for (insn = XEXP (find_reg_note (last, REG_RETVAL, NULL_RTX), 0);
insn != last; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
benefit += 10; /* Assume at least this many insns in a library
routine. */
else if (GET_CODE (insn) == INSN
else if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER)
benefit++;
@ -1398,13 +1397,13 @@ skip_consec_insns (rtx insn, int count)
/* If first insn of libcall sequence, skip to end. */
/* Do this at start of loop, since INSN is guaranteed to
be an insn here. */
if (GET_CODE (insn) != NOTE
if (!NOTE_P (insn)
&& (temp = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
insn = XEXP (temp, 0);
do
insn = NEXT_INSN (insn);
while (GET_CODE (insn) == NOTE);
while (NOTE_P (insn));
}
return insn;
@ -1962,14 +1961,14 @@ move_movables (struct loop *loop, struct loop_movables *movables,
{
/* If this is the first insn of a library call sequence,
something is very wrong. */
if (GET_CODE (p) != NOTE
if (!NOTE_P (p)
&& (temp = find_reg_note (p, REG_LIBCALL, NULL_RTX)))
abort ();
/* If this is the last insn of a libcall sequence, then
delete every insn in the sequence except the last.
The last insn is handled in the normal manner. */
if (GET_CODE (p) != NOTE
if (!NOTE_P (p)
&& (temp = find_reg_note (p, REG_RETVAL, NULL_RTX)))
{
temp = XEXP (temp, 0);
@ -1986,7 +1985,7 @@ move_movables (struct loop *loop, struct loop_movables *movables,
pointers, but when we skip over a NOTE we must fix
it up. Otherwise that code walks into the non-deleted
insn stream. */
while (p && GET_CODE (p) == NOTE)
while (p && NOTE_P (p))
p = NEXT_INSN (temp) = NEXT_INSN (p);
if (m->insert_temp)
@ -2030,14 +2029,14 @@ move_movables (struct loop *loop, struct loop_movables *movables,
/* If first insn of libcall sequence, skip to end. */
/* Do this at start of loop, since p is guaranteed to
be an insn here. */
if (GET_CODE (p) != NOTE
if (!NOTE_P (p)
&& (temp = find_reg_note (p, REG_LIBCALL, NULL_RTX)))
p = XEXP (temp, 0);
/* If last insn of libcall sequence, move all
insns except the last before the loop. The last
insn is handled in the normal manner. */
if (GET_CODE (p) != NOTE
if (!NOTE_P (p)
&& (temp = find_reg_note (p, REG_RETVAL, NULL_RTX)))
{
rtx fn_address = 0;
@ -2052,7 +2051,7 @@ move_movables (struct loop *loop, struct loop_movables *movables,
rtx n;
rtx next;
if (GET_CODE (temp) == NOTE)
if (NOTE_P (temp))
continue;
body = PATTERN (temp);
@ -2061,9 +2060,9 @@ move_movables (struct loop *loop, struct loop_movables *movables,
not counting USE or NOTE insns. */
for (next = NEXT_INSN (temp); next != p;
next = NEXT_INSN (next))
if (! (GET_CODE (next) == INSN
if (! (NONJUMP_INSN_P (next)
&& GET_CODE (PATTERN (next)) == USE)
&& GET_CODE (next) != NOTE)
&& !NOTE_P (next))
break;
/* If that is the call, this may be the insn
@ -2077,7 +2076,7 @@ move_movables (struct loop *loop, struct loop_movables *movables,
function address into the register that the
call insn will use. flow.c will delete any
redundant stores that we have created. */
if (GET_CODE (next) == CALL_INSN
if (CALL_P (next)
&& GET_CODE (body) == SET
&& REG_P (SET_DEST (body))
&& (n = find_reg_note (temp, REG_EQUAL,
@ -2092,14 +2091,14 @@ move_movables (struct loop *loop, struct loop_movables *movables,
/* We have the call insn.
If it uses the register we suspect it might,
load it with the correct address directly. */
if (GET_CODE (temp) == CALL_INSN
if (CALL_P (temp)
&& fn_address != 0
&& reg_referenced_p (fn_reg, body))
loop_insn_emit_after (loop, 0, fn_address_insn,
gen_move_insn
(fn_reg, fn_address));
if (GET_CODE (temp) == CALL_INSN)
if (CALL_P (temp))
{
i1 = loop_call_insn_hoist (loop, body);
/* Because the USAGE information potentially
@ -2146,7 +2145,7 @@ move_movables (struct loop *loop, struct loop_movables *movables,
end_sequence ();
i1 = loop_insn_hoist (loop, sequence);
}
else if (GET_CODE (p) == CALL_INSN)
else if (CALL_P (p))
{
i1 = loop_call_insn_hoist (loop, PATTERN (p));
/* Because the USAGE information potentially
@ -2230,7 +2229,7 @@ move_movables (struct loop *loop, struct loop_movables *movables,
pointers, but when we skip over a NOTE we must fix
it up. Otherwise that code walks into the non-deleted
insn stream. */
while (p && GET_CODE (p) == NOTE)
while (p && NOTE_P (p))
p = NEXT_INSN (temp) = NEXT_INSN (p);
if (m->insert_temp)
@ -2351,8 +2350,7 @@ move_movables (struct loop *loop, struct loop_movables *movables,
/* Go through all the instructions in the loop, making
all the register substitutions scheduled in REG_MAP. */
for (p = new_start; p != loop_end; p = NEXT_INSN (p))
if (GET_CODE (p) == INSN || GET_CODE (p) == JUMP_INSN
|| GET_CODE (p) == CALL_INSN)
if (INSN_P (p))
{
replace_regs (PATTERN (p), reg_map, nregs, 0);
replace_regs (REG_NOTES (p), reg_map, nregs, 0);
@ -2542,10 +2540,10 @@ prescan_loop (struct loop *loop)
/* If loop opts run twice, this was set on 1st pass for 2nd. */
loop_info->preconditioned = NOTE_PRECONDITIONED (end);
for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
for (insn = start; insn && !LABEL_P (insn);
insn = PREV_INSN (insn))
{
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
loop_info->pre_header_has_call = 1;
break;
@ -2662,7 +2660,7 @@ prescan_loop (struct loop *loop)
if (volatile_refs_p (PATTERN (insn)))
loop_info->has_volatile = 1;
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_VEC))
loop_info->has_tablejump = 1;
@ -2756,7 +2754,7 @@ find_and_verify_loops (rtx f, struct loops *loops)
current_loop = NULL;
for (insn = f; insn; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
switch (NOTE_LINE_NUMBER (insn))
{
case NOTE_INSN_LOOP_BEG:
@ -2788,7 +2786,7 @@ find_and_verify_loops (rtx f, struct loops *loops)
break;
}
if (GET_CODE (insn) == CALL_INSN
if (CALL_P (insn)
&& find_reg_note (insn, REG_SETJMP, NULL))
{
/* In this case, we must invalidate our current loop and any
@ -2835,14 +2833,14 @@ find_and_verify_loops (rtx f, struct loops *loops)
{
struct loop *this_loop = uid_loop[INSN_UID (insn)];
if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
if (NONJUMP_INSN_P (insn) || CALL_P (insn))
{
rtx note = find_reg_note (insn, REG_LABEL, NULL_RTX);
if (note)
invalidate_loops_containing_label (XEXP (note, 0));
}
if (GET_CODE (insn) != JUMP_INSN)
if (!JUMP_P (insn))
continue;
mark_loop_jump (PATTERN (insn), this_loop);
@ -2865,10 +2863,10 @@ find_and_verify_loops (rtx f, struct loops *loops)
/* Go backwards until we reach the start of the loop, a label,
or a JUMP_INSN. */
for (p = PREV_INSN (insn);
GET_CODE (p) != CODE_LABEL
&& ! (GET_CODE (p) == NOTE
!LABEL_P (p)
&& ! (NOTE_P (p)
&& NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_BEG)
&& GET_CODE (p) != JUMP_INSN;
&& !JUMP_P (p);
p = PREV_INSN (p))
;
@ -2889,7 +2887,7 @@ find_and_verify_loops (rtx f, struct loops *loops)
/* Make sure that the target of P is within the current loop. */
if (GET_CODE (p) == JUMP_INSN && JUMP_LABEL (p)
if (JUMP_P (p) && JUMP_LABEL (p)
&& uid_loop[INSN_UID (JUMP_LABEL (p))] != this_loop)
outer_loop = this_loop;
@ -2903,7 +2901,7 @@ find_and_verify_loops (rtx f, struct loops *loops)
and move the block of code to the spot we found. */
if (! outer_loop
&& GET_CODE (p) == JUMP_INSN
&& JUMP_P (p)
&& JUMP_LABEL (p) != 0
/* Just ignore jumps to labels that were never emitted.
These always indicate compilation errors. */
@ -2924,17 +2922,17 @@ find_and_verify_loops (rtx f, struct loops *loops)
/* Search for possible garbage past the conditional jumps
and look for the last barrier. */
for (tmp = last_insn_to_move;
tmp && GET_CODE (tmp) != CODE_LABEL; tmp = NEXT_INSN (tmp))
if (GET_CODE (tmp) == BARRIER)
tmp && !LABEL_P (tmp); tmp = NEXT_INSN (tmp))
if (BARRIER_P (tmp))
last_insn_to_move = tmp;
for (loc = target; loc; loc = PREV_INSN (loc))
if (GET_CODE (loc) == BARRIER
if (BARRIER_P (loc)
/* Don't move things inside a tablejump. */
&& ((loc2 = next_nonnote_insn (loc)) == 0
|| GET_CODE (loc2) != CODE_LABEL
|| !LABEL_P (loc2)
|| (loc2 = next_nonnote_insn (loc2)) == 0
|| GET_CODE (loc2) != JUMP_INSN
|| !JUMP_P (loc2)
|| (GET_CODE (PATTERN (loc2)) != ADDR_VEC
&& GET_CODE (PATTERN (loc2)) != ADDR_DIFF_VEC))
&& uid_loop[INSN_UID (loc)] == target_loop)
@ -2942,12 +2940,12 @@ find_and_verify_loops (rtx f, struct loops *loops)
if (loc == 0)
for (loc = target; loc; loc = NEXT_INSN (loc))
if (GET_CODE (loc) == BARRIER
if (BARRIER_P (loc)
/* Don't move things inside a tablejump. */
&& ((loc2 = next_nonnote_insn (loc)) == 0
|| GET_CODE (loc2) != CODE_LABEL
|| !LABEL_P (loc2)
|| (loc2 = next_nonnote_insn (loc2)) == 0
|| GET_CODE (loc2) != JUMP_INSN
|| !JUMP_P (loc2)
|| (GET_CODE (PATTERN (loc2)) != ADDR_VEC
&& GET_CODE (PATTERN (loc2)) != ADDR_DIFF_VEC))
&& uid_loop[INSN_UID (loc)] == target_loop)
@ -3217,7 +3215,7 @@ labels_in_range_p (rtx insn, int end)
{
while (insn && INSN_LUID (insn) <= end)
{
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
return 1;
insn = NEXT_INSN (insn);
}
@ -4327,7 +4325,7 @@ for_each_insn_in_loop (struct loop *loop, loop_insn_callback fncall)
returns, exits the loop, is a jump to a location that is still
behind the label, or is a jump to the loop start. */
if (GET_CODE (p) == CODE_LABEL)
if (LABEL_P (p))
{
rtx insn = p;
@ -4348,7 +4346,7 @@ for_each_insn_in_loop (struct loop *loop, loop_insn_callback fncall)
break;
}
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) != RETURN
&& (!any_condjump_p (insn)
|| (JUMP_LABEL (insn) != 0
@ -4365,7 +4363,7 @@ for_each_insn_in_loop (struct loop *loop, loop_insn_callback fncall)
on whether they will be executed during each iteration. */
/* This code appears twice in strength_reduce. There is also similar
code in scan_loop. */
if (GET_CODE (p) == JUMP_INSN
if (JUMP_P (p)
/* If we enter the loop in the middle, and scan around to the
beginning, don't set not_every_iteration for that.
This can be any kind of jump, since we want to know if insns
@ -4389,7 +4387,7 @@ for_each_insn_in_loop (struct loop *loop, loop_insn_callback fncall)
not_every_iteration = 1;
}
else if (GET_CODE (p) == NOTE)
else if (NOTE_P (p))
{
/* At the virtual top of a converted loop, insns are again known to
be executed each iteration: logically, the loop begins here
@ -4415,7 +4413,7 @@ for_each_insn_in_loop (struct loop *loop, loop_insn_callback fncall)
Note that LOOP_TOP is only set for rotated loops and we need
this check for all loops, so compare against the CODE_LABEL
which immediately follows LOOP_START. */
if (GET_CODE (p) == JUMP_INSN
if (JUMP_P (p)
&& JUMP_LABEL (p) == NEXT_INSN (loop->start))
past_loop_latch = 1;
@ -4431,7 +4429,7 @@ for_each_insn_in_loop (struct loop *loop, loop_insn_callback fncall)
if (not_every_iteration
&& !past_loop_latch
&& GET_CODE (p) == CODE_LABEL
&& LABEL_P (p)
&& no_labels_between_p (p, loop->end)
&& loop_insn_first_p (p, loop->cont))
not_every_iteration = 0;
@ -4499,13 +4497,13 @@ loop_bivs_init_find (struct loop *loop)
halting at first label. Also record any test condition. */
call_seen = 0;
for (p = loop->start; p && GET_CODE (p) != CODE_LABEL; p = PREV_INSN (p))
for (p = loop->start; p && !LABEL_P (p); p = PREV_INSN (p))
{
rtx test;
note_insn = p;
if (GET_CODE (p) == CALL_INSN)
if (CALL_P (p))
call_seen = 1;
if (INSN_P (p))
@ -4514,7 +4512,7 @@ loop_bivs_init_find (struct loop *loop)
/* Record any test of a biv that branches around the loop if no store
between it and the start of loop. We only care about tests with
constants and registers and only certain of those. */
if (GET_CODE (p) == JUMP_INSN
if (JUMP_P (p)
&& JUMP_LABEL (p) != 0
&& next_real_insn (JUMP_LABEL (p)) == next_real_insn (loop->end)
&& (test = get_condition_for_loop (loop, p)) != 0
@ -5319,8 +5317,7 @@ strength_reduce (struct loop *loop, int flags)
register substitutions scheduled in REG_MAP. */
for (p = loop->start; p != loop->end; p = NEXT_INSN (p))
if (GET_CODE (p) == INSN || GET_CODE (p) == JUMP_INSN
|| GET_CODE (p) == CALL_INSN)
if (INSN_P (p))
{
replace_regs (PATTERN (p), reg_map, reg_map_size, 0);
replace_regs (REG_NOTES (p), reg_map, reg_map_size, 0);
@ -5384,7 +5381,7 @@ check_insn_for_bivs (struct loop *loop, rtx p, int not_every_iteration,
rtx mult_val;
rtx *location;
if (GET_CODE (p) == INSN
if (NONJUMP_INSN_P (p)
&& (set = single_set (p))
&& REG_P (SET_DEST (set)))
{
@ -5425,7 +5422,7 @@ check_insn_for_givs (struct loop *loop, rtx p, int not_every_iteration,
rtx set;
/* Look for a general induction variable in a register. */
if (GET_CODE (p) == INSN
if (NONJUMP_INSN_P (p)
&& (set = single_set (p))
&& REG_P (SET_DEST (set))
&& ! regs->array[REGNO (SET_DEST (set))].may_not_optimize)
@ -5482,14 +5479,13 @@ check_insn_for_givs (struct loop *loop, rtx p, int not_every_iteration,
}
/* Look for givs which are memory addresses. */
if (GET_CODE (p) == INSN)
if (NONJUMP_INSN_P (p))
find_mem_givs (loop, PATTERN (p), p, not_every_iteration,
maybe_multiple);
/* Update the status of whether giv can derive other givs. This can
change when we pass a label or an insn that updates a biv. */
if (GET_CODE (p) == INSN || GET_CODE (p) == JUMP_INSN
|| GET_CODE (p) == CODE_LABEL)
if (INSN_P (p))
update_giv_derive (loop, p);
return p;
}
@ -5992,8 +5988,7 @@ check_final_value (const struct loop *loop, struct induction *v)
if (p == v->insn)
break;
if (GET_CODE (p) == INSN || GET_CODE (p) == JUMP_INSN
|| GET_CODE (p) == CALL_INSN)
if (INSN_P (p))
{
/* It is possible for the BIV increment to use the GIV if we
have a cycle. Thus we must be sure to check each insn for
@ -6032,7 +6027,7 @@ check_final_value (const struct loop *loop, struct induction *v)
if (p == last_giv_use)
break;
if (GET_CODE (p) == JUMP_INSN && JUMP_LABEL (p)
if (JUMP_P (p) && JUMP_LABEL (p)
&& LABEL_NAME (JUMP_LABEL (p))
&& ((loop_insn_first_p (JUMP_LABEL (p), v->insn)
&& loop_insn_first_p (loop->start, JUMP_LABEL (p)))
@ -6110,7 +6105,7 @@ update_giv_derive (const struct loop *loop, rtx p)
for (bl = ivs->list; bl; bl = bl->next)
for (biv = bl->biv; biv; biv = biv->next_iv)
if (GET_CODE (p) == CODE_LABEL || GET_CODE (p) == JUMP_INSN
if (LABEL_P (p) || JUMP_P (p)
|| biv->insn == p)
{
/* Skip if location is the same as a previous one. */
@ -6126,7 +6121,7 @@ update_giv_derive (const struct loop *loop, rtx p)
/* If this giv is conditionally set and we have passed a label,
it cannot derive anything. */
if (GET_CODE (p) == CODE_LABEL && ! giv->always_computable)
if (LABEL_P (p) && ! giv->always_computable)
giv->cant_derive = 1;
/* Skip givs that have mult_val == 0, since
@ -6163,8 +6158,8 @@ update_giv_derive (const struct loop *loop, rtx p)
else
giv->cant_derive = 1;
}
else if ((GET_CODE (p) == CODE_LABEL && ! biv->always_computable)
|| (GET_CODE (p) == JUMP_INSN && biv->maybe_multiple))
else if ((LABEL_P (p) && ! biv->always_computable)
|| (JUMP_P (p) && biv->maybe_multiple))
giv->cant_derive = 1;
}
}
@ -6292,7 +6287,7 @@ basic_induction_var (const struct loop *loop, rtx x, enum machine_mode mode,
{
insn = PREV_INSN (insn);
}
while (insn && GET_CODE (insn) == NOTE
while (insn && NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG);
if (!insn)
@ -6367,7 +6362,7 @@ basic_induction_var (const struct loop *loop, rtx x, enum machine_mode mode,
case ASHIFTRT:
/* Similar, since this can be a sign extension. */
for (insn = PREV_INSN (p);
(insn && GET_CODE (insn) == NOTE
(insn && NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG);
insn = PREV_INSN (insn))
;
@ -7932,7 +7927,7 @@ product_cheap_p (rtx a, rtx b)
rtx next = NEXT_INSN (tmp);
if (++n_insns > 3
|| GET_CODE (tmp) != INSN
|| !NONJUMP_INSN_P (tmp)
|| (GET_CODE (PATTERN (tmp)) == SET
&& GET_CODE (SET_SRC (PATTERN (tmp))) == MULT)
|| (GET_CODE (PATTERN (tmp)) == PARALLEL
@ -8024,7 +8019,7 @@ check_dbra_loop (struct loop *loop, int insn_count)
rtx jump1;
if ((jump1 = prev_nonnote_insn (first_compare)) != loop->cont)
if (GET_CODE (jump1) == JUMP_INSN)
if (JUMP_P (jump1))
return 0;
}
@ -8095,9 +8090,9 @@ check_dbra_loop (struct loop *loop, int insn_count)
the loop, then we can safely optimize. */
for (p = loop_start; p; p = PREV_INSN (p))
{
if (GET_CODE (p) == CODE_LABEL)
if (LABEL_P (p))
break;
if (GET_CODE (p) != JUMP_INSN)
if (!JUMP_P (p))
continue;
before_comparison = get_condition_for_loop (loop, p);
@ -8512,7 +8507,7 @@ check_dbra_loop (struct loop *loop, int insn_count)
emit_jump_insn_before (tem, loop_end);
for (tem = PREV_INSN (loop_end);
tem && GET_CODE (tem) != JUMP_INSN;
tem && !JUMP_P (tem);
tem = PREV_INSN (tem))
;
@ -8691,11 +8686,11 @@ loop_insn_first_p (rtx insn, rtx reference)
P is a note. */
if (INSN_UID (p) < max_uid_for_loop
&& INSN_UID (q) < max_uid_for_loop
&& GET_CODE (p) != NOTE)
&& !NOTE_P (p))
return INSN_LUID (p) <= INSN_LUID (q);
if (INSN_UID (p) >= max_uid_for_loop
|| GET_CODE (p) == NOTE)
|| NOTE_P (p))
p = NEXT_INSN (p);
if (INSN_UID (q) >= max_uid_for_loop)
q = NEXT_INSN (q);
@ -9110,7 +9105,7 @@ last_use_this_basic_block (rtx reg, rtx insn)
{
rtx n;
for (n = insn;
n && GET_CODE (n) != CODE_LABEL && GET_CODE (n) != JUMP_INSN;
n && !LABEL_P (n) && !JUMP_P (n);
n = NEXT_INSN (n))
{
if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (n))
@ -9246,7 +9241,7 @@ canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
if (op0 == cc0_rtx)
{
if ((prev = prev_nonnote_insn (prev)) == 0
|| GET_CODE (prev) != INSN
|| !NONJUMP_INSN_P (prev)
|| (set = single_set (prev)) == 0
|| SET_DEST (set) != cc0_rtx)
return 0;
@ -9273,7 +9268,7 @@ canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
we don't want to bother dealing with it. */
if ((prev = prev_nonnote_insn (prev)) == 0
|| GET_CODE (prev) != INSN
|| !NONJUMP_INSN_P (prev)
|| FIND_REG_INC_NOTE (prev, NULL_RTX))
break;
@ -9461,7 +9456,7 @@ get_condition (rtx jump, rtx *earliest, int allow_cc_mode)
rtx set;
/* If this is not a standard conditional jump, we can't parse it. */
if (GET_CODE (jump) != JUMP_INSN
if (!JUMP_P (jump)
|| ! any_condjump_p (jump))
return 0;
set = pc_set (jump);
@ -9665,13 +9660,13 @@ loop_regs_scan (const struct loop *loop, int extra_size)
}
}
if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN)
if (LABEL_P (insn) || JUMP_P (insn))
memset (last_set, 0, regs->num * sizeof (rtx));
/* Invalidate all registers used for function argument passing.
We check rtx_varies_p for the same reason as below, to allow
optimizing PIC calculations. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
rtx link;
for (link = CALL_INSN_FUNCTION_USAGE (insn);
@ -9753,7 +9748,7 @@ load_mems (const struct loop *loop)
/* We cannot use next_label here because it skips over normal insns. */
end_label = next_nonnote_insn (loop->end);
if (end_label && GET_CODE (end_label) != CODE_LABEL)
if (end_label && !LABEL_P (end_label))
end_label = NULL_RTX;
/* Check to see if it's possible that some instructions in the loop are
@ -9763,16 +9758,16 @@ load_mems (const struct loop *loop)
p != NULL_RTX;
p = next_insn_in_loop (loop, p))
{
if (GET_CODE (p) == CODE_LABEL)
if (LABEL_P (p))
maybe_never = 1;
else if (GET_CODE (p) == JUMP_INSN
else if (JUMP_P (p)
/* If we enter the loop in the middle, and scan
around to the beginning, don't set maybe_never
for that. This must be an unconditional jump,
otherwise the code at the top of the loop might
never be executed. Unconditional jumps are
followed a by barrier then loop end. */
&& ! (GET_CODE (p) == JUMP_INSN
&& ! (JUMP_P (p)
&& JUMP_LABEL (p) == loop->top
&& NEXT_INSN (NEXT_INSN (p)) == loop->end
&& any_uncondjump_p (p)))
@ -9803,7 +9798,7 @@ load_mems (const struct loop *loop)
/* Find start of the extended basic block that enters the loop. */
for (p = loop->start;
PREV_INSN (p) && GET_CODE (p) != CODE_LABEL;
PREV_INSN (p) && !LABEL_P (p);
p = PREV_INSN (p))
;
prev_ebb_head = p;
@ -9938,7 +9933,7 @@ load_mems (const struct loop *loop)
/* If this is a call which uses / clobbers this memory
location, we must not change the interface here. */
if (GET_CODE (p) == CALL_INSN
if (CALL_P (p)
&& reg_mentioned_p (loop_info->mems[i].mem,
CALL_INSN_FUNCTION_USAGE (p)))
{
@ -9952,8 +9947,8 @@ load_mems (const struct loop *loop)
loop_info->mems[i].reg, written);
}
if (GET_CODE (p) == CODE_LABEL
|| GET_CODE (p) == JUMP_INSN)
if (LABEL_P (p)
|| JUMP_P (p))
maybe_never = 1;
}
@ -10068,7 +10063,7 @@ load_mems (const struct loop *loop)
label with the new one. */
if (label != NULL_RTX && end_label != NULL_RTX)
for (p = loop->start; p != loop->end; p = NEXT_INSN (p))
if (GET_CODE (p) == JUMP_INSN && JUMP_LABEL (p) == end_label)
if (JUMP_P (p) && JUMP_LABEL (p) == end_label)
redirect_jump (p, label, false);
cselib_finish ();
@ -10115,7 +10110,7 @@ try_copy_prop (const struct loop *loop, rtx replacement, unsigned int regno)
/* Only substitute within one extended basic block from the initializing
insn. */
if (GET_CODE (insn) == CODE_LABEL && init_insn)
if (LABEL_P (insn) && init_insn)
break;
if (! INSN_P (insn))
@ -10719,7 +10714,7 @@ debug_giv (const struct induction *v)
/* The notes do not have an assigned block, so look at the next insn. */
#define LOOP_BLOCK_NUM(INSN) \
((INSN) ? (GET_CODE (INSN) == NOTE \
((INSN) ? (NOTE_P (INSN) \
? LOOP_BLOCK_NUM_1 (next_nonnote_insn (INSN)) \
: LOOP_BLOCK_NUM_1 (INSN)) \
: -1)
@ -10738,13 +10733,13 @@ loop_dump_aux (const struct loop *loop, FILE *file,
/* Print diagnostics to compare our concept of a loop with
what the loop notes say. */
if (! PREV_INSN (BB_HEAD (loop->first))
|| GET_CODE (PREV_INSN (BB_HEAD (loop->first))) != NOTE
|| !NOTE_P (PREV_INSN (BB_HEAD (loop->first)))
|| NOTE_LINE_NUMBER (PREV_INSN (BB_HEAD (loop->first)))
!= NOTE_INSN_LOOP_BEG)
fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
INSN_UID (PREV_INSN (BB_HEAD (loop->first))));
if (! NEXT_INSN (BB_END (loop->last))
|| GET_CODE (NEXT_INSN (BB_END (loop->last))) != NOTE
|| !NOTE_P (NEXT_INSN (BB_END (loop->last)))
|| NOTE_LINE_NUMBER (NEXT_INSN (BB_END (loop->last)))
!= NOTE_INSN_LOOP_END)
fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",

View File

@ -265,7 +265,7 @@ doloop_register_get (rtx insn, rtx *comp)
{
rtx pattern, cmp, inc, reg, condition;
if (GET_CODE (insn) != JUMP_INSN)
if (!JUMP_P (insn))
return NULL_RTX;
pattern = PATTERN (insn);
@ -789,7 +789,7 @@ static rtx
find_line_note (rtx insn)
{
for (; insn; insn = PREV_INSN (insn))
if (GET_CODE (insn) == NOTE
if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) >= 0)
break;
@ -938,9 +938,9 @@ sms_schedule (FILE *dump_file)
/* Don't handle BBs with calls or barriers, or !single_set insns. */
for (insn = head; insn != NEXT_INSN (tail); insn = NEXT_INSN (insn))
if (GET_CODE (insn) == CALL_INSN
|| GET_CODE (insn) == BARRIER
|| (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN
if (CALL_P (insn)
|| BARRIER_P (insn)
|| (INSN_P (insn) && !JUMP_P (insn)
&& !single_set (insn) && GET_CODE (PATTERN (insn)) != USE))
break;
@ -948,9 +948,9 @@ sms_schedule (FILE *dump_file)
{
if (stats_file)
{
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
fprintf (stats_file, "SMS loop-with-call\n");
else if (GET_CODE (insn) == BARRIER)
else if (BARRIER_P (insn))
fprintf (stats_file, "SMS loop-with-barrier\n");
else
fprintf (stats_file, "SMS loop-with-not-single-set\n");
@ -1245,7 +1245,7 @@ sms_schedule_by_order (ddg_ptr g, int mii, int maxii, int *nodes_order, FILE *du
if (!INSN_P (insn))
continue;
if (GET_CODE (insn) == JUMP_INSN) /* Closing branch handled later. */
if (JUMP_P (insn)) /* Closing branch handled later. */
continue;
/* 1. compute sched window for u (start, end, step). */

View File

@ -3292,7 +3292,7 @@ emit_no_conflict_block (rtx insns, rtx target, rtx op0, rtx op1, rtx equiv)
return emit_insn (insns);
else
for (insn = insns; insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) != INSN
if (!NONJUMP_INSN_P (insn)
|| find_reg_note (insn, REG_LIBCALL, NULL_RTX))
return emit_insn (insns);
@ -3436,7 +3436,7 @@ emit_libcall_block (rtx insns, rtx target, rtx result, rtx equiv)
if (flag_non_call_exceptions && may_trap_p (equiv))
{
for (insn = insns; insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
@ -3450,7 +3450,7 @@ emit_libcall_block (rtx insns, rtx target, rtx result, rtx equiv)
goto (unless there is already a REG_EH_REGION note, in which case
we update it). */
for (insn = insns; insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
@ -3503,7 +3503,7 @@ emit_libcall_block (rtx insns, rtx target, rtx result, rtx equiv)
/* Some ports use a loop to copy large arguments onto the stack.
Don't move anything outside such a loop. */
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
break;
}

View File

@ -396,7 +396,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg)
/* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
right, so avoid the problem here. Likewise if we have a constant
and the insn pattern doesn't tell us the mode we need. */
if (GET_CODE (recog_data.operand[i]) == CODE_LABEL
if (LABEL_P (recog_data.operand[i])
|| (CONSTANT_P (recog_data.operand[i])
&& recog_data.operand_mode[i] == VOIDmode))
continue;
@ -721,7 +721,7 @@ reload_combine (void)
FOR_EACH_BB_REVERSE (bb)
{
insn = BB_HEAD (bb);
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
{
HARD_REG_SET live;
@ -752,9 +752,9 @@ reload_combine (void)
/* We cannot do our optimization across labels. Invalidating all the use
information we have would be costly, so we just note where the label
is and then later disable any optimization that would cross it. */
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
last_label_ruid = reload_combine_ruid;
else if (GET_CODE (insn) == BARRIER)
else if (BARRIER_P (insn))
for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
if (! fixed_regs[r])
reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
@ -898,7 +898,7 @@ reload_combine (void)
note_stores (PATTERN (insn), reload_combine_note_store, NULL);
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
rtx link;
@ -932,7 +932,7 @@ reload_combine (void)
}
}
else if (GET_CODE (insn) == JUMP_INSN
else if (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) != RETURN)
{
/* Non-spill registers might be used at the call destination in
@ -1192,7 +1192,7 @@ reload_cse_move2add (rtx first)
{
rtx pat, note;
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
{
move2add_last_label_luid = move2add_luid;
/* We're going to increment move2add_luid twice after a
@ -1398,7 +1398,7 @@ reload_cse_move2add (rtx first)
/* If this is a CALL_INSN, all call used registers are stored with
unknown values. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
{

View File

@ -247,7 +247,7 @@ tree_predict_edge (edge e, enum br_predictor predictor, int probability)
static bool
can_predict_insn_p (rtx insn)
{
return (GET_CODE (insn) == JUMP_INSN
return (JUMP_P (insn)
&& any_condjump_p (insn)
&& BLOCK_FOR_INSN (insn)->succ->succ_next);
}
@ -672,7 +672,7 @@ estimate_probability (struct loops *loops_info)
messages. */
for (insn = BB_HEAD (e->dest); insn != NEXT_INSN (BB_END (e->dest));
insn = NEXT_INSN (insn))
if (GET_CODE (insn) == CALL_INSN
if (CALL_P (insn)
/* Constant and pure calls are hardly used to signalize
something exceptional. */
&& ! CONST_OR_PURE_CALL_P (insn))
@ -774,7 +774,7 @@ estimate_probability (struct loops *loops_info)
/* Attach the combined probability to each conditional jump. */
FOR_EACH_BB (bb)
if (GET_CODE (BB_END (bb)) == JUMP_INSN
if (JUMP_P (BB_END (bb))
&& any_condjump_p (BB_END (bb))
&& bb->succ->succ_next != NULL)
combine_predictions_for_insn (BB_END (bb), bb);
@ -1023,7 +1023,7 @@ expected_value_to_br_prob (void)
case JUMP_INSN:
/* Look for simple conditional branches. If we haven't got an
expected value yet, no point going further. */
if (GET_CODE (insn) != JUMP_INSN || ev == NULL_RTX
if (!JUMP_P (insn) || ev == NULL_RTX
|| ! any_condjump_p (insn))
continue;
break;
@ -1155,7 +1155,7 @@ process_note_predictions (basic_block bb, int *heads)
for (insn = BB_END (bb); insn;
was_bb_head |= (insn == BB_HEAD (bb)), insn = PREV_INSN (insn))
{
if (GET_CODE (insn) != NOTE)
if (!NOTE_P (insn))
{
if (was_bb_head)
break;
@ -1163,7 +1163,7 @@ process_note_predictions (basic_block bb, int *heads)
{
/* Noreturn calls cause program to exit, therefore they are
always predicted as not taken. */
if (GET_CODE (insn) == CALL_INSN
if (CALL_P (insn)
&& find_reg_note (insn, REG_NORETURN, NULL))
contained_noreturn_call = 1;
continue;

View File

@ -124,8 +124,8 @@ print_rtx (rtx in_rtx)
/* When printing in VCG format we write INSNs, NOTE, LABEL, and BARRIER
in separate nodes and therefore have to handle them special here. */
if (dump_for_graph
&& (is_insn || GET_CODE (in_rtx) == NOTE
|| GET_CODE (in_rtx) == CODE_LABEL || GET_CODE (in_rtx) == BARRIER))
&& (is_insn || NOTE_P (in_rtx)
|| LABEL_P (in_rtx) || BARRIER_P (in_rtx)))
{
i = 3;
indent = 0;
@ -231,7 +231,7 @@ print_rtx (rtx in_rtx)
print_node_brief (outfile, "", decl, 0);
}
#endif
else if (i == 4 && GET_CODE (in_rtx) == NOTE)
else if (i == 4 && NOTE_P (in_rtx))
{
switch (NOTE_LINE_NUMBER (in_rtx))
{
@ -385,7 +385,7 @@ print_rtx (rtx in_rtx)
fprintf(outfile, " %s:%i", insn_file (in_rtx), insn_line (in_rtx));
#endif
}
else if (i == 6 && GET_CODE (in_rtx) == NOTE)
else if (i == 6 && NOTE_P (in_rtx))
{
/* This field is only used for NOTE_INSN_DELETED_LABEL, and
other times often contains garbage from INSN->NOTE death. */
@ -421,7 +421,7 @@ print_rtx (rtx in_rtx)
else
#endif
if (flag_dump_unnumbered
&& (is_insn || GET_CODE (in_rtx) == NOTE))
&& (is_insn || NOTE_P (in_rtx)))
fputc ('#', outfile);
else
fprintf (outfile, " %d", value);
@ -577,8 +577,8 @@ print_rtx (rtx in_rtx)
}
if (dump_for_graph
&& (is_insn || GET_CODE (in_rtx) == NOTE
|| GET_CODE (in_rtx) == CODE_LABEL || GET_CODE (in_rtx) == BARRIER))
&& (is_insn || NOTE_P (in_rtx)
|| LABEL_P (in_rtx) || BARRIER_P (in_rtx)))
sawclose = 0;
else
{
@ -717,7 +717,7 @@ print_rtl (FILE *outf, rtx rtx_first)
case BARRIER:
for (tmp_rtx = rtx_first; tmp_rtx != 0; tmp_rtx = NEXT_INSN (tmp_rtx))
if (! flag_dump_unnumbered
|| GET_CODE (tmp_rtx) != NOTE || NOTE_LINE_NUMBER (tmp_rtx) < 0)
|| !NOTE_P (tmp_rtx) || NOTE_LINE_NUMBER (tmp_rtx) < 0)
{
fputs (print_rtx_head, outfile);
print_rtx (tmp_rtx);
@ -740,7 +740,7 @@ print_rtl_single (FILE *outf, rtx x)
outfile = outf;
sawclose = 0;
if (! flag_dump_unnumbered
|| GET_CODE (x) != NOTE || NOTE_LINE_NUMBER (x) < 0)
|| !NOTE_P (x) || NOTE_LINE_NUMBER (x) < 0)
{
fputs (print_rtx_head, outfile);
print_rtx (x);

View File

@ -907,7 +907,7 @@ branch_prob (void)
while (insn != BB_END (bb))
{
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
{
/* Must ignore the line number notes that
immediately follow the end of an inline function

View File

@ -711,7 +711,7 @@ live_out_1 (struct df *df ATTRIBUTE_UNUSED, struct curr_use *use, rtx insn)
/* We want to access the root webpart. */
wp = find_web_part (wp);
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
wp->crosses_call = 1;
else if (copy_insn_p (insn, &s, NULL))
source_regno = REGNO (GET_CODE (s) == SUBREG ? SUBREG_REG (s) : s);
@ -1045,7 +1045,7 @@ livethrough_conflicts_bb (basic_block bb)
bitmap_set_bit (all_defs, DF_REF_ID (info.defs[n]));
if (TEST_BIT (insns_with_deaths, INSN_UID (insn)))
deaths++;
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
contains_call = 1;
}
if (insn == BB_END (bb))
@ -2676,7 +2676,7 @@ detect_webs_set_in_cond_jump (void)
{
basic_block bb;
FOR_EACH_BB (bb)
if (GET_CODE (BB_END (bb)) == JUMP_INSN)
if (JUMP_P (BB_END (bb)))
{
struct df_link *link;
for (link = DF_INSN_DEFS (df, BB_END (bb)); link; link = link->next)

View File

@ -317,10 +317,10 @@ ra_print_rtx_object (FILE *file, rtx x)
case LABEL_REF:
{
rtx sub = XEXP (x, 0);
if (GET_CODE (sub) == NOTE
if (NOTE_P (sub)
&& NOTE_LINE_NUMBER (sub) == NOTE_INSN_DELETED_LABEL)
fprintf (file, "(deleted uid=%d)", INSN_UID (sub));
else if (GET_CODE (sub) == CODE_LABEL)
else if (LABEL_P (sub))
fprintf (file, "L%d", CODE_LABEL_NUMBER (sub));
else
fprintf (file, "(nonlabel uid=%d)", INSN_UID (sub));
@ -566,7 +566,7 @@ ra_debug_insns (rtx insn, int num)
insn = PREV_INSN (insn);
for (i = count; i > 0 && insn; insn = NEXT_INSN (insn), i--)
{
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
fprintf (stderr, "\n");
ra_print_rtx_top (stderr, insn, (i == count || i == 1));
}
@ -586,7 +586,7 @@ ra_print_rtl_with_bb (FILE *file, rtx insn)
last_bb = NULL;
for (; insn; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) == BARRIER)
if (BARRIER_P (insn))
bb = NULL;
else
bb = BLOCK_FOR_INSN (insn);
@ -598,9 +598,9 @@ ra_print_rtl_with_bb (FILE *file, rtx insn)
fprintf (file, ";; Begin of basic block %d\n", bb->index);
last_bb = bb;
}
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
fputc ('\n', file);
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
{
/* Ignore basic block and maybe other notes not referencing
deleted things. */

View File

@ -637,7 +637,7 @@ insert_stores (bitmap new_deaths)
/* If we reach a basic block border, which has more than one
outgoing edge, we simply forget all already emitted stores. */
if (GET_CODE (insn) == BARRIER
if (BARRIER_P (insn)
|| JUMP_P (insn) || can_throw_internal (insn))
{
last_slot = NULL_RTX;
@ -1252,7 +1252,7 @@ rewrite_program2 (bitmap new_deaths)
XXX Note, that sometimes reload barfs when we emit insns between
a call and the insn which copies the return register into a
pseudo. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
ri.need_load = 1;
else if (INSN_P (insn))
for (n = 0; n < info.num_uses; n++)
@ -1339,7 +1339,7 @@ rewrite_program2 (bitmap new_deaths)
web->one_load = 0;
}
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
break;
}

View File

@ -715,9 +715,7 @@ next_insn_tests_no_inequality (rtx insn)
if (next == 0)
return 0;
return ((GET_CODE (next) == JUMP_INSN
|| GET_CODE (next) == INSN
|| GET_CODE (next) == CALL_INSN)
return (INSN_P (next)
&& ! inequality_comparisons_p (PATTERN (next)));
}
#endif
@ -850,7 +848,7 @@ find_single_use (rtx dest, rtx insn, rtx *ploc)
{
next = NEXT_INSN (insn);
if (next == 0
|| (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
|| (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
return 0;
result = find_single_use_1 (dest, &PATTERN (next));
@ -864,7 +862,7 @@ find_single_use (rtx dest, rtx insn, rtx *ploc)
return 0;
for (next = next_nonnote_insn (insn);
next != 0 && GET_CODE (next) != CODE_LABEL;
next != 0 && !LABEL_P (next);
next = next_nonnote_insn (next))
if (INSN_P (next) && dead_or_set_p (next, dest))
{
@ -2740,7 +2738,7 @@ split_all_insns (int upd_life)
BB boundary we are interested in will be set to
previous one. */
while (GET_CODE (last) == BARRIER)
while (BARRIER_P (last))
last = PREV_INSN (last);
SET_BIT (blocks, bb->index);
changed = true;
@ -3077,14 +3075,14 @@ peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
if (j >= MAX_INSNS_PER_PEEP2 + 1)
j -= MAX_INSNS_PER_PEEP2 + 1;
old_insn = peep2_insn_data[j].insn;
if (GET_CODE (old_insn) != CALL_INSN)
if (!CALL_P (old_insn))
continue;
was_call = true;
new_insn = try;
while (new_insn != NULL_RTX)
{
if (GET_CODE (new_insn) == CALL_INSN)
if (CALL_P (new_insn))
break;
new_insn = NEXT_INSN (new_insn);
}
@ -3119,7 +3117,7 @@ peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
if (j >= MAX_INSNS_PER_PEEP2 + 1)
j -= MAX_INSNS_PER_PEEP2 + 1;
old_insn = peep2_insn_data[j].insn;
if (GET_CODE (old_insn) == CALL_INSN)
if (CALL_P (old_insn))
abort ();
}
break;
@ -3149,7 +3147,7 @@ peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
break;
for (x = try ; x != before_try ; x = PREV_INSN (x))
if (GET_CODE (x) == CALL_INSN
if (CALL_P (x)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (x))
&& !find_reg_note (x, REG_EH_REGION, NULL)))
@ -3168,7 +3166,7 @@ peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
nfte = split_block (bb, x);
flags = (eh_edge->flags
& (EDGE_EH | EDGE_ABNORMAL));
if (GET_CODE (x) == CALL_INSN)
if (CALL_P (x))
flags |= EDGE_ABNORMAL_CALL;
nehe = make_edge (nfte->src, eh_edge->dest,
flags);
@ -3236,7 +3234,7 @@ peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
/* If we generated a jump instruction, it won't have
JUMP_LABEL set. Recompute after we're done. */
for (x = try; x != before_try; x = PREV_INSN (x))
if (GET_CODE (x) == JUMP_INSN)
if (JUMP_P (x))
{
do_rebuild_jump_labels = true;
break;
@ -3342,7 +3340,7 @@ if_test_bypass_p (rtx out_insn, rtx in_insn)
in_set = single_set (in_insn);
if (! in_set)
{
if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
if (JUMP_P (in_insn) || CALL_P (in_insn))
return false;
abort ();
}

View File

@ -344,7 +344,7 @@ next_flags_user (rtx insn)
if (INSN_P (insn) && reg_mentioned_p (ix86_flags_rtx, PATTERN (insn)))
return insn;
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
return NULL_RTX;
}
return NULL_RTX;
@ -506,7 +506,7 @@ record_label_references (rtx insn, rtx pat)
rtx label = XEXP (pat, 0);
rtx ref;
if (GET_CODE (label) != CODE_LABEL)
if (!LABEL_P (label))
abort ();
/* If this is an undefined label, LABEL_REFS (label) contains
@ -986,10 +986,10 @@ emit_swap_insn (rtx insn, stack regstack, rtx reg)
rtx limit = PREV_INSN (BB_HEAD (current_block));
while (tmp != limit)
{
if (GET_CODE (tmp) == CODE_LABEL
|| GET_CODE (tmp) == CALL_INSN
if (LABEL_P (tmp)
|| CALL_P (tmp)
|| NOTE_INSN_BASIC_BLOCK_P (tmp)
|| (GET_CODE (tmp) == INSN
|| (NONJUMP_INSN_P (tmp)
&& stack_regs_mentioned (tmp)))
{
i1 = tmp;
@ -1292,7 +1292,7 @@ swap_rtx_condition (rtx insn)
insn = NEXT_INSN (insn);
if (INSN_P (insn) && reg_mentioned_p (dest, insn))
break;
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
return 0;
}
@ -2326,7 +2326,7 @@ subst_stack_regs (rtx insn, stack regstack)
bool control_flow_insn_deleted = false;
int i;
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
int top = regstack->top;
@ -2388,7 +2388,7 @@ subst_stack_regs (rtx insn, stack regstack)
/* subst_stack_regs_pat may have deleted a no-op insn. If so, any
REG_UNUSED will already have been dealt with, so just return. */
if (GET_CODE (insn) == NOTE || INSN_DELETED_P (insn))
if (NOTE_P (insn) || INSN_DELETED_P (insn))
return control_flow_insn_deleted;
/* If there is a REG_UNUSED note on a stack register on this insn,
@ -2748,7 +2748,7 @@ compensate_edge (edge e, FILE *file)
tmpstack = regstack;
change_stack (BB_END (block), &tmpstack, target_stack,
(GET_CODE (BB_END (block)) == JUMP_INSN
(JUMP_P (BB_END (block))
? EMIT_BEFORE : EMIT_AFTER));
}
else
@ -2870,7 +2870,7 @@ convert_regs_1 (FILE *file, basic_block block)
/* Don't bother processing unless there is a stack reg
mentioned or if it's a CALL_INSN. */
if (stack_regs_mentioned (insn)
|| GET_CODE (insn) == CALL_INSN)
|| CALL_P (insn))
{
if (file)
{
@ -2894,7 +2894,7 @@ convert_regs_1 (FILE *file, basic_block block)
}
insn = BB_END (block);
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
insn = PREV_INSN (insn);
/* If the function is declared to return a value, but it returns one

View File

@ -1087,7 +1087,7 @@ scan_one_insn (rtx insn, int pass)
We need not check for code_label here;
while a basic block can start with a code_label,
INSN could not be at the beginning of that block. */
if (previnsn == 0 || GET_CODE (previnsn) == JUMP_INSN)
if (previnsn == 0 || JUMP_P (previnsn))
{
basic_block b;
FOR_EACH_BB (b)

View File

@ -491,7 +491,7 @@ optimize_reg_copy_1 (rtx insn, rtx dest, rtx src)
/* If the insn in which SRC dies is a CALL_INSN, don't count it
as a call that has been crossed. Otherwise, count it. */
if (q != p && GET_CODE (q) == CALL_INSN)
if (q != p && CALL_P (q))
{
/* Similarly, total calls for SREGNO, total calls beyond
the death note for DREGNO. */
@ -620,7 +620,7 @@ optimize_reg_copy_2 (rtx insn, rtx dest, rtx src)
PATTERN (q) = replace_rtx (PATTERN (q), dest, src);
if (GET_CODE (q) == CALL_INSN)
if (CALL_P (q))
{
REG_N_CALLS_CROSSED (dregno)--;
REG_N_CALLS_CROSSED (sregno)++;
@ -636,7 +636,7 @@ optimize_reg_copy_2 (rtx insn, rtx dest, rtx src)
if (reg_set_p (src, p)
|| find_reg_note (p, REG_DEAD, dest)
|| (GET_CODE (p) == CALL_INSN && REG_N_CALLS_CROSSED (sregno) == 0))
|| (CALL_P (p) && REG_N_CALLS_CROSSED (sregno) == 0))
break;
}
}
@ -971,8 +971,8 @@ fixup_match_2 (rtx insn, rtx dst, rtx src, rtx offset, FILE *regmove_dump_file)
#ifdef AUTO_INC_DEC
for (p = PREV_INSN (insn); p; p = PREV_INSN (p))
{
if (GET_CODE (p) == CODE_LABEL
|| GET_CODE (p) == JUMP_INSN)
if (LABEL_P (p)
|| JUMP_P (p))
break;
if (! INSN_P (p))
continue;
@ -985,8 +985,8 @@ fixup_match_2 (rtx insn, rtx dst, rtx src, rtx offset, FILE *regmove_dump_file)
}
for (p = NEXT_INSN (insn); p; p = NEXT_INSN (p))
{
if (GET_CODE (p) == CODE_LABEL
|| GET_CODE (p) == JUMP_INSN)
if (LABEL_P (p)
|| JUMP_P (p))
break;
if (! INSN_P (p))
continue;
@ -1010,7 +1010,7 @@ fixup_match_2 (rtx insn, rtx dst, rtx src, rtx offset, FILE *regmove_dump_file)
/* reg_set_p is overly conservative for CALL_INSNS, thinks that all
hard regs are clobbered. Thus, we only use it for src for
non-call insns. */
if (GET_CODE (p) == CALL_INSN)
if (CALL_P (p))
{
if (! dst_death)
num_calls++;
@ -1423,7 +1423,7 @@ regmove_optimize (rtx f, int nregs, FILE *regmove_dump_file)
/* If we have passed a call instruction, and the
pseudo-reg DST is not already live across a call,
then don't perform the optimization. */
if (GET_CODE (p) == CALL_INSN)
if (CALL_P (p))
{
num_calls++;
@ -1702,7 +1702,7 @@ fixup_match_1 (rtx insn, rtx set, rtx src, rtx src_subreg, rtx dst,
for (length = s_length = 0, p = NEXT_INSN (insn); p; p = NEXT_INSN (p))
{
if (GET_CODE (p) == CALL_INSN)
if (CALL_P (p))
replace_in_call_usage (& CALL_INSN_FUNCTION_USAGE (p),
REGNO (dst), src, p);
@ -1839,7 +1839,7 @@ fixup_match_1 (rtx insn, rtx set, rtx src, rtx src_subreg, rtx dst,
/* If we have passed a call instruction, and the pseudo-reg SRC is not
already live across a call, then don't perform the optimization. */
if (GET_CODE (p) == CALL_INSN)
if (CALL_P (p))
{
if (REG_N_CALLS_CROSSED (REGNO (src)) == 0)
break;
@ -1930,7 +1930,7 @@ fixup_match_1 (rtx insn, rtx set, rtx src, rtx src_subreg, rtx dst,
q = 0;
break;
}
if (GET_CODE (p) == CALL_INSN)
if (CALL_P (p))
num_calls2++;
}
if (q && set2 && SET_DEST (set2) == src && CONSTANT_P (SET_SRC (set2))
@ -2159,7 +2159,7 @@ single_set_for_csa (rtx insn)
if (tmp)
return tmp;
if (GET_CODE (insn) != INSN
if (!NONJUMP_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) != PARALLEL)
return NULL_RTX;
@ -2441,7 +2441,7 @@ combine_stack_adjustments_for_block (basic_block bb)
data.insn = insn;
data.memlist = memlist;
if (GET_CODE (insn) != CALL_INSN && last_sp_set
if (!CALL_P (insn) && last_sp_set
&& !for_each_rtx (&PATTERN (insn), record_stack_memrefs, &data))
{
memlist = data.memlist;
@ -2452,7 +2452,7 @@ combine_stack_adjustments_for_block (basic_block bb)
/* Otherwise, we were not able to process the instruction.
Do not continue collecting data across such a one. */
if (last_sp_set
&& (GET_CODE (insn) == CALL_INSN
&& (CALL_P (insn)
|| reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
{
if (last_sp_set && last_sp_adjust == 0)

View File

@ -824,7 +824,7 @@ build_def_use (basic_block bb)
*recog_data.operand_loc[i] = old_operands[i];
/* Step 2B: Can't rename function call argument registers. */
if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
if (CALL_P (insn) && CALL_INSN_FUNCTION_USAGE (insn))
scan_rtx (insn, &CALL_INSN_FUNCTION_USAGE (insn),
NO_REGS, terminate_all_read, OP_IN, 0);
@ -879,7 +879,7 @@ build_def_use (basic_block bb)
/* Step 4B: If this is a call, any chain live at this point
requires a caller-saved reg. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
struct du_chain *p;
for (p = open_chains; p; p = p->next_chain)
@ -934,7 +934,7 @@ build_def_use (basic_block bb)
recog_op_alt[i][alt].earlyclobber);
}
}
else if (GET_CODE (insn) != CALL_INSN)
else if (!CALL_P (insn))
for (i = 0; i < n_ops + recog_data.n_dups; i++)
{
int opn = i < n_ops ? i : recog_data.dup_num[i - n_ops];
@ -1702,7 +1702,7 @@ copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
did_replacement:
/* Clobber call-clobbered registers. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
kill_value_regno (i, vd);

View File

@ -2536,7 +2536,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
/* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
neither are insns that SET cc0. Insns that use CC0 are not allowed
to have any input reloads. */
if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CALL_INSN)
if (JUMP_P (insn) || CALL_P (insn))
no_output_reloads = 1;
#ifdef HAVE_cc0
@ -4020,7 +4020,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
/* If we're replacing an operand with a LABEL_REF, we need
to make sure that there's a REG_LABEL note attached to
this instruction. */
if (GET_CODE (insn) != JUMP_INSN
if (!JUMP_P (insn)
&& GET_CODE (substitution) == LABEL_REF
&& !find_reg_note (insn, REG_LABEL, XEXP (substitution, 0)))
REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL,
@ -5528,7 +5528,7 @@ find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
? XEXP (x, 0)
: reg_equiv_mem[regno]);
int icode = (int) add_optab->handlers[(int) Pmode].insn_code;
if (insn && GET_CODE (insn) == INSN && equiv
if (insn && NONJUMP_INSN_P (insn) && equiv
&& memory_operand (equiv, GET_MODE (equiv))
#ifdef HAVE_cc0
&& ! sets_cc0_p (PATTERN (insn))
@ -5954,7 +5954,7 @@ subst_reloads (rtx insn)
REG_LABEL note to indicate to flow which label this
register refers to. */
if (GET_CODE (*r->where) == LABEL_REF
&& GET_CODE (insn) == JUMP_INSN)
&& JUMP_P (insn))
REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL,
XEXP (*r->where, 0),
REG_NOTES (insn));
@ -6460,11 +6460,11 @@ find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other,
{
p = PREV_INSN (p);
num++;
if (p == 0 || GET_CODE (p) == CODE_LABEL
if (p == 0 || LABEL_P (p)
|| num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
return 0;
if (GET_CODE (p) == INSN
if (NONJUMP_INSN_P (p)
/* If we don't want spill regs ... */
&& (! (reload_reg_p != 0
&& reload_reg_p != (short *) (HOST_WIDE_INT) 1)
@ -6655,7 +6655,7 @@ find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other,
/* Don't trust the conversion past a function call
if either of the two is in a call-clobbered register, or memory. */
if (GET_CODE (p) == CALL_INSN)
if (CALL_P (p))
{
int i;
@ -6778,7 +6778,7 @@ find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other,
}
}
if (GET_CODE (p) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (p))
if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
{
rtx link;

View File

@ -842,8 +842,7 @@ reload (rtx first, int global)
main reload loop in the most common case where register elimination
cannot be done. */
for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
|| GET_CODE (insn) == CALL_INSN)
if (INSN_P (insn))
note_stores (PATTERN (insn), mark_not_eliminable, NULL);
maybe_fix_stack_asms ();
@ -1053,7 +1052,7 @@ reload (rtx first, int global)
if an insn has a variable address, gets a REG_EH_REGION
note added to it, and then gets converted into an load
from a constant address. */
if (GET_CODE (equiv_insn) == NOTE
if (NOTE_P (equiv_insn)
|| can_throw_internal (equiv_insn))
;
else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
@ -1162,7 +1161,7 @@ reload (rtx first, int global)
{
rtx *pnote;
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
@ -1436,7 +1435,7 @@ calculate_needs_all_insns (int global)
include REG_LABEL), we need to see what effects this has on the
known offsets at labels. */
if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
if (LABEL_P (insn) || JUMP_P (insn)
|| (INSN_P (insn) && REG_NOTES (insn) != 0))
set_label_offsets (insn, insn, 0);
@ -2130,7 +2129,7 @@ set_label_offsets (rtx x, rtx insn, int initial_p)
else if (x == insn
&& (tem = prev_nonnote_insn (insn)) != 0
&& GET_CODE (tem) == BARRIER)
&& BARRIER_P (tem))
set_offsets_for_label (insn);
else
/* If neither of the above cases is true, compare each offset
@ -3788,7 +3787,7 @@ reload_as_needed (int live_known)
/* If we pass a label, copy the offsets from the label information
into the current offsets of each elimination. */
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
set_offsets_for_label (insn);
else if (INSN_P (insn))
@ -3811,7 +3810,7 @@ reload_as_needed (int live_known)
if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
{
eliminate_regs_in_insn (insn, 1);
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
{
update_eliminable_offsets ();
continue;
@ -3901,7 +3900,7 @@ reload_as_needed (int live_known)
/* There may have been CLOBBER insns placed after INSN. So scan
between INSN and NEXT and use them to forget old reloads. */
for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
if (NONJUMP_INSN_P (x) && GET_CODE (PATTERN (x)) == CLOBBER)
note_stores (PATTERN (x), forget_old_reloads_1, NULL);
#ifdef AUTO_INC_DEC
@ -4022,13 +4021,13 @@ reload_as_needed (int live_known)
#endif
}
/* A reload reg's contents are unknown after a label. */
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
CLEAR_HARD_REG_SET (reg_reloaded_valid);
/* Don't assume a reload reg is still good after a call insn
if it is a call-used reg, or if it contains a value that will
be partially clobbered by the call. */
else if (GET_CODE (insn) == CALL_INSN)
else if (CALL_P (insn))
{
AND_COMPL_HARD_REG_SET (reg_reloaded_valid, call_used_reg_set);
AND_COMPL_HARD_REG_SET (reg_reloaded_valid, reg_reloaded_call_part_clobbered);
@ -6351,10 +6350,10 @@ emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
rl->when_needed, old, rl->out, j, 0))
{
rtx temp = PREV_INSN (insn);
while (temp && GET_CODE (temp) == NOTE)
while (temp && NOTE_P (temp))
temp = PREV_INSN (temp);
if (temp
&& GET_CODE (temp) == INSN
&& NONJUMP_INSN_P (temp)
&& GET_CODE (PATTERN (temp)) == SET
&& SET_DEST (PATTERN (temp)) == old
/* Make sure we can access insn_operand_constraint. */
@ -6928,7 +6927,7 @@ do_output_reload (struct insn_chain *chain, struct reload *rl, int j)
return;
/* If is a JUMP_INSN, we can't support output reloads yet. */
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
abort ();
emit_output_reload_insns (chain, rld + j, j);
@ -7647,14 +7646,14 @@ delete_output_reload (rtx insn, int j, int last_reload_reg)
for (i1 = NEXT_INSN (output_reload_insn);
i1 != insn; i1 = NEXT_INSN (i1))
{
if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
if (LABEL_P (i1) || JUMP_P (i1))
return;
if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
if ((NONJUMP_INSN_P (i1) || CALL_P (i1))
&& reg_mentioned_p (reg, PATTERN (i1)))
{
/* If this is USE in front of INSN, we only have to check that
there are no more references than accounted for by inheritance. */
while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE)
while (NONJUMP_INSN_P (i1) && GET_CODE (PATTERN (i1)) == USE)
{
n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
i1 = NEXT_INSN (i1);
@ -7700,10 +7699,10 @@ delete_output_reload (rtx insn, int j, int last_reload_reg)
since if they are the only uses, they are dead. */
if (set != 0 && SET_DEST (set) == reg)
continue;
if (GET_CODE (i2) == CODE_LABEL
|| GET_CODE (i2) == JUMP_INSN)
if (LABEL_P (i2)
|| JUMP_P (i2))
break;
if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
if ((NONJUMP_INSN_P (i2) || CALL_P (i2))
&& reg_mentioned_p (reg, PATTERN (i2)))
{
/* Some other ref remains; just delete the output reload we
@ -7725,8 +7724,8 @@ delete_output_reload (rtx insn, int j, int last_reload_reg)
delete_address_reloads (i2, insn);
delete_insn (i2);
}
if (GET_CODE (i2) == CODE_LABEL
|| GET_CODE (i2) == JUMP_INSN)
if (LABEL_P (i2)
|| JUMP_P (i2))
break;
}
@ -7837,7 +7836,7 @@ delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
it might have been inherited. */
for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
{
if (GET_CODE (i2) == CODE_LABEL)
if (LABEL_P (i2))
break;
if (! INSN_P (i2))
continue;
@ -7861,7 +7860,7 @@ delete_address_reloads_1 (rtx dead_insn, rtx x, rtx current_insn)
}
return;
}
if (GET_CODE (i2) == JUMP_INSN)
if (JUMP_P (i2))
break;
/* If DST is still live at CURRENT_INSN, check if it is used for
any reload. Note that even if CURRENT_INSN sets DST, we still
@ -8059,7 +8058,7 @@ fixup_abnormal_edges (void)
== (EDGE_ABNORMAL | EDGE_EH))
break;
}
if (e && GET_CODE (BB_END (bb)) != CALL_INSN
if (e && !CALL_P (BB_END (bb))
&& !can_throw_internal (BB_END (bb)))
{
rtx insn = BB_END (bb), stop = NEXT_INSN (BB_END (bb));
@ -8069,11 +8068,11 @@ fixup_abnormal_edges (void)
break;
/* Get past the new insns generated. Allow notes, as the insns may
be already deleted. */
while ((GET_CODE (insn) == INSN || GET_CODE (insn) == NOTE)
while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
&& !can_throw_internal (insn)
&& insn != BB_HEAD (bb))
insn = PREV_INSN (insn);
if (GET_CODE (insn) != CALL_INSN && !can_throw_internal (insn))
if (!CALL_P (insn) && !can_throw_internal (insn))
abort ();
BB_END (bb) = insn;
inserted = true;

View File

@ -351,8 +351,8 @@ find_end_label (void)
label and we don't have to do anything else. */
insn = get_last_insn ();
while (GET_CODE (insn) == NOTE
|| (GET_CODE (insn) == INSN
while (NOTE_P (insn)
|| (NONJUMP_INSN_P (insn)
&& (GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER)))
insn = PREV_INSN (insn);
@ -360,8 +360,8 @@ find_end_label (void)
/* When a target threads its epilogue we might already have a
suitable return insn. If so put a label before it for the
end_of_function_label. */
if (GET_CODE (insn) == BARRIER
&& GET_CODE (PREV_INSN (insn)) == JUMP_INSN
if (BARRIER_P (insn)
&& JUMP_P (PREV_INSN (insn))
&& GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
{
rtx temp = PREV_INSN (PREV_INSN (insn));
@ -375,7 +375,7 @@ find_end_label (void)
emit_label_after (end_of_function_label, temp);
}
else if (GET_CODE (insn) == CODE_LABEL)
else if (LABEL_P (insn))
end_of_function_label = insn;
else
{
@ -470,7 +470,7 @@ emit_delay_sequence (rtx insn, rtx list, int length)
/* If INSN is followed by a BARRIER, delete the BARRIER since it will only
confuse further processing. Update LAST in case it was the last insn.
We will put the BARRIER back in later. */
if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
if (NEXT_INSN (insn) && BARRIER_P (NEXT_INSN (insn)))
{
delete_related_insns (NEXT_INSN (insn));
last = get_last_insn ();
@ -534,7 +534,7 @@ emit_delay_sequence (rtx insn, rtx list, int length)
case REG_LABEL:
/* Keep the label reference count up to date. */
if (GET_CODE (XEXP (note, 0)) == CODE_LABEL)
if (LABEL_P (XEXP (note, 0)))
LABEL_NUSES (XEXP (note, 0)) ++;
break;
@ -550,13 +550,13 @@ emit_delay_sequence (rtx insn, rtx list, int length)
last insn in that SEQUENCE to point to us. Similarly for the first
insn in the following insn if it is a SEQUENCE. */
if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
if (PREV_INSN (seq_insn) && NONJUMP_INSN_P (PREV_INSN (seq_insn))
&& GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
= seq_insn;
if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
if (NEXT_INSN (seq_insn) && NONJUMP_INSN_P (NEXT_INSN (seq_insn))
&& GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
@ -616,7 +616,7 @@ delete_from_delay_slot (rtx insn)
seq_insn = PREV_INSN (NEXT_INSN (trial));
seq = PATTERN (seq_insn);
if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == BARRIER)
if (NEXT_INSN (seq_insn) && BARRIER_P (NEXT_INSN (seq_insn)))
had_barrier = 1;
/* Create a delay list consisting of all the insns other than the one
@ -641,9 +641,7 @@ delete_from_delay_slot (rtx insn)
annul flag. */
if (delay_list)
trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
else if (GET_CODE (trial) == JUMP_INSN
|| GET_CODE (trial) == CALL_INSN
|| GET_CODE (trial) == INSN)
else if (INSN_P (trial))
INSN_ANNULLED_BRANCH_P (trial) = 0;
INSN_FROM_TARGET_P (insn) = 0;
@ -686,7 +684,7 @@ delete_scheduled_jump (rtx insn)
a delay slot. It will be the last insn in the delay slot, if
it is. */
rtx trial = previous_insn (insn);
if (GET_CODE (trial) == NOTE)
if (NOTE_P (trial))
trial = prev_nonnote_insn (trial);
if (sets_cc0_p (PATTERN (trial)) != 1
|| FIND_REG_INC_NOTE (trial, NULL_RTX))
@ -768,7 +766,7 @@ optimize_skip (rtx insn)
flags = get_jump_flags (insn, JUMP_LABEL (insn));
if (trial == 0
|| GET_CODE (trial) != INSN
|| !NONJUMP_INSN_P (trial)
|| GET_CODE (PATTERN (trial)) == SEQUENCE
|| recog_memoized (trial) < 0
|| (! eligible_for_annul_false (insn, 0, trial, flags)
@ -785,7 +783,7 @@ optimize_skip (rtx insn)
if ((next_trial == next_active_insn (JUMP_LABEL (insn))
&& ! (next_trial == 0 && current_function_epilogue_delay_list != 0))
|| (next_trial != 0
&& GET_CODE (next_trial) == JUMP_INSN
&& JUMP_P (next_trial)
&& JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
&& (simplejump_p (next_trial)
|| GET_CODE (PATTERN (next_trial)) == RETURN)))
@ -807,7 +805,7 @@ optimize_skip (rtx insn)
branch, thread our jump to the target of that branch. Don't
change this into a RETURN here, because it may not accept what
we have in the delay slot. We'll fix this up later. */
if (next_trial && GET_CODE (next_trial) == JUMP_INSN
if (next_trial && JUMP_P (next_trial)
&& (simplejump_p (next_trial)
|| GET_CODE (PATTERN (next_trial)) == RETURN))
{
@ -851,7 +849,7 @@ get_jump_flags (rtx insn, rtx label)
If LABEL is zero, then there is no way to determine the branch
direction. */
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
&& INSN_UID (insn) <= max_uid
&& label != 0
@ -867,7 +865,7 @@ get_jump_flags (rtx insn, rtx label)
determine the branch prediction.
Non conditional branches are predicted as very likely taken. */
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn)))
{
int prediction;
@ -911,7 +909,7 @@ rare_destination (rtx insn)
for (; insn; insn = next)
{
if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
next = NEXT_INSN (insn);
@ -997,7 +995,7 @@ mostly_true_jump (rtx jump_insn, rtx condition)
before the next real insn, we assume the branch is to the top of
the loop. */
for (insn = PREV_INSN (target_label);
insn && GET_CODE (insn) == NOTE;
insn && NOTE_P (insn);
insn = PREV_INSN (insn))
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
return 2;
@ -1007,7 +1005,7 @@ mostly_true_jump (rtx jump_insn, rtx condition)
before the next real insn, we assume the branch is to the loop branch
test. */
for (insn = NEXT_INSN (target_label);
insn && GET_CODE (insn) == NOTE;
insn && NOTE_P (insn);
insn = PREV_INSN (insn))
if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
return 1;
@ -1506,7 +1504,7 @@ try_merge_delay_insns (rtx insn, rtx thread)
next_trial = next_nonnote_insn (trial);
/* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
if (GET_CODE (trial) == INSN
if (NONJUMP_INSN_P (trial)
&& (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
continue;
@ -1554,7 +1552,7 @@ try_merge_delay_insns (rtx insn, rtx thread)
/* See if we stopped on a filled insn. If we did, try to see if its
delay slots match. */
if (slot_number != num_slots
&& trial && GET_CODE (trial) == INSN
&& trial && NONJUMP_INSN_P (trial)
&& GET_CODE (PATTERN (trial)) == SEQUENCE
&& ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
{
@ -1679,7 +1677,7 @@ redundant_insn (rtx insn, rtx target, rtx delay_list)
trial && insns_to_search > 0;
trial = PREV_INSN (trial), --insns_to_search)
{
if (GET_CODE (trial) == CODE_LABEL)
if (LABEL_P (trial))
return 0;
if (! INSN_P (trial))
@ -1693,7 +1691,7 @@ redundant_insn (rtx insn, rtx target, rtx delay_list)
{
/* Stop for a CALL and its delay slots because it is difficult to
track its resource needs correctly. */
if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
if (CALL_P (XVECEXP (pat, 0, 0)))
return 0;
/* Stop for an INSN or JUMP_INSN with delayed effects and its delay
@ -1741,7 +1739,7 @@ redundant_insn (rtx insn, rtx target, rtx delay_list)
mark_referenced_resources (insn, &needed, 1);
/* If TARGET is a SEQUENCE, get the main insn. */
if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
target_main = XVECEXP (PATTERN (target), 0, 0);
if (resource_conflicts_p (&needed, &set)
@ -1770,7 +1768,7 @@ redundant_insn (rtx insn, rtx target, rtx delay_list)
delay_list = XEXP (delay_list, 1);
}
if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
return 0;
@ -1780,11 +1778,10 @@ redundant_insn (rtx insn, rtx target, rtx delay_list)
for (trial = PREV_INSN (target),
insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
trial && GET_CODE (trial) != CODE_LABEL && insns_to_search > 0;
trial && !LABEL_P (trial) && insns_to_search > 0;
trial = PREV_INSN (trial), --insns_to_search)
{
if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
&& GET_CODE (trial) != JUMP_INSN)
if (!INSN_P (trial))
continue;
pat = PATTERN (trial);
@ -1795,7 +1792,7 @@ redundant_insn (rtx insn, rtx target, rtx delay_list)
{
/* If this is a CALL_INSN and its delay slots, it is hard to track
the resource needs properly, so give up. */
if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
if (CALL_P (XVECEXP (pat, 0, 0)))
return 0;
/* If this is an INSN or JUMP_INSN with delayed effects, it
@ -1879,7 +1876,7 @@ own_thread_p (rtx thread, rtx label, int allow_fallthrough)
active_insn = next_active_insn (PREV_INSN (thread));
for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == CODE_LABEL
if (LABEL_P (insn)
&& (insn != label || LABEL_NUSES (insn) != 1))
return 0;
@ -1888,11 +1885,11 @@ own_thread_p (rtx thread, rtx label, int allow_fallthrough)
/* Ensure that we reach a BARRIER before any insn or label. */
for (insn = prev_nonnote_insn (thread);
insn == 0 || GET_CODE (insn) != BARRIER;
insn == 0 || !BARRIER_P (insn);
insn = prev_nonnote_insn (insn))
if (insn == 0
|| GET_CODE (insn) == CODE_LABEL
|| (GET_CODE (insn) == INSN
|| LABEL_P (insn)
|| (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER))
return 0;
@ -2061,10 +2058,10 @@ fill_simple_delay_slots (int non_jumps_p)
insn = unfilled_slots_base[i];
if (insn == 0
|| INSN_DELETED_P (insn)
|| (GET_CODE (insn) == INSN
|| (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
|| (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
|| (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
|| (JUMP_P (insn) && non_jumps_p)
|| (!JUMP_P (insn) && ! non_jumps_p))
continue;
/* It may have been that this insn used to need delay slots, but
@ -2108,13 +2105,13 @@ fill_simple_delay_slots (int non_jumps_p)
slots_filled = 0;
delay_list = 0;
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
flags = get_jump_flags (insn, JUMP_LABEL (insn));
else
flags = get_jump_flags (insn, NULL_RTX);
if ((trial = next_active_insn (insn))
&& GET_CODE (trial) == JUMP_INSN
&& JUMP_P (trial)
&& simplejump_p (trial)
&& eligible_for_delay (insn, slots_filled, trial, flags)
&& no_labels_between_p (insn, trial)
@ -2221,7 +2218,7 @@ fill_simple_delay_slots (int non_jumps_p)
#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
if (slots_filled != slots_to_fill
&& delay_list == 0
&& GET_CODE (insn) == JUMP_INSN
&& JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn)))
{
delay_list = optimize_skip (insn);
@ -2265,7 +2262,7 @@ fill_simple_delay_slots (int non_jumps_p)
Presumably, we should also check to see if we could get
back to this function via `setjmp'. */
&& ! can_throw_internal (insn)
&& (GET_CODE (insn) != JUMP_INSN
&& (!JUMP_P (insn)
|| ((condjump_p (insn) || condjump_in_parallel_p (insn))
&& ! simplejump_p (insn)
&& JUMP_LABEL (insn) != 0)))
@ -2279,7 +2276,7 @@ fill_simple_delay_slots (int non_jumps_p)
CLEAR_RESOURCE (&needed);
CLEAR_RESOURCE (&set);
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
mark_referenced_resources (insn, &needed, 1);
@ -2289,7 +2286,7 @@ fill_simple_delay_slots (int non_jumps_p)
{
mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
mark_referenced_resources (insn, &needed, 1);
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
target = JUMP_LABEL (insn);
}
@ -2298,8 +2295,8 @@ fill_simple_delay_slots (int non_jumps_p)
{
next_trial = next_nonnote_insn (trial);
if (GET_CODE (trial) == CODE_LABEL
|| GET_CODE (trial) == BARRIER)
if (LABEL_P (trial)
|| BARRIER_P (trial))
break;
/* We must have an INSN, JUMP_INSN, or CALL_INSN. */
@ -2317,7 +2314,7 @@ fill_simple_delay_slots (int non_jumps_p)
trial_delay = trial;
/* Stop our search when seeing an unconditional jump. */
if (GET_CODE (trial_delay) == JUMP_INSN)
if (JUMP_P (trial_delay))
break;
/* See if we have a resource problem before we try to
@ -2357,8 +2354,8 @@ fill_simple_delay_slots (int non_jumps_p)
set.cc = 1;
/* If this is a call or jump, we might not get here. */
if (GET_CODE (trial_delay) == CALL_INSN
|| GET_CODE (trial_delay) == JUMP_INSN)
if (CALL_P (trial_delay)
|| JUMP_P (trial_delay))
maybe_never = 1;
}
@ -2369,13 +2366,13 @@ fill_simple_delay_slots (int non_jumps_p)
Don't do this if the insn at the branch target is a branch. */
if (slots_to_fill != slots_filled
&& trial
&& GET_CODE (trial) == JUMP_INSN
&& JUMP_P (trial)
&& simplejump_p (trial)
&& (target == 0 || JUMP_LABEL (trial) == target)
&& (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
&& ! (GET_CODE (next_trial) == INSN
&& ! (NONJUMP_INSN_P (next_trial)
&& GET_CODE (PATTERN (next_trial)) == SEQUENCE)
&& GET_CODE (next_trial) != JUMP_INSN
&& !JUMP_P (next_trial)
&& ! insn_references_resource_p (next_trial, &set, 1)
&& ! insn_sets_resource_p (next_trial, &set, 1)
&& ! insn_sets_resource_p (next_trial, &needed, 1)
@ -2413,7 +2410,7 @@ fill_simple_delay_slots (int non_jumps_p)
/* If this is an unconditional jump, then try to get insns from the
target of the jump. */
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& simplejump_p (insn)
&& slots_filled != slots_to_fill)
delay_list
@ -2479,7 +2476,7 @@ fill_simple_delay_slots (int non_jumps_p)
for (trial = get_last_insn (); ! stop_search_p (trial, 1);
trial = PREV_INSN (trial))
{
if (GET_CODE (trial) == NOTE)
if (NOTE_P (trial))
continue;
pat = PATTERN (trial);
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
@ -2607,7 +2604,7 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
rtx pat, old_trial;
/* If we have passed a label, we no longer own this thread. */
if (GET_CODE (trial) == CODE_LABEL)
if (LABEL_P (trial))
{
own_thread = 0;
continue;
@ -2728,12 +2725,12 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
label lest it be deleted by delete_related_insns. */
note = find_reg_note (trial, REG_LABEL, 0);
/* REG_LABEL could be NOTE_INSN_DELETED_LABEL too. */
if (note && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
if (note && LABEL_P (XEXP (note, 0)))
LABEL_NUSES (XEXP (note, 0))++;
delete_related_insns (trial);
if (note && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
if (note && LABEL_P (XEXP (note, 0)))
LABEL_NUSES (XEXP (note, 0))--;
}
else
@ -2800,14 +2797,14 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
a PRE_INC. We also can't do this if there's overlap of source and
destination. Overlap may happen for larger-than-register-size modes. */
if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
if (NONJUMP_INSN_P (trial) && GET_CODE (pat) == SET
&& REG_P (SET_SRC (pat))
&& REG_P (SET_DEST (pat))
&& !reg_overlap_mentioned_p (SET_DEST (pat), SET_SRC (pat)))
{
rtx next = next_nonnote_insn (trial);
if (next && GET_CODE (next) == INSN
if (next && NONJUMP_INSN_P (next)
&& GET_CODE (PATTERN (next)) != USE
&& ! reg_set_p (SET_DEST (pat), next)
&& ! reg_set_p (SET_SRC (pat), next)
@ -2819,9 +2816,9 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
/* If we stopped on a branch insn that has delay slots, see if we can
steal some of the insns in those slots. */
if (trial && GET_CODE (trial) == INSN
if (trial && NONJUMP_INSN_P (trial)
&& GET_CODE (PATTERN (trial)) == SEQUENCE
&& GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
&& JUMP_P (XVECEXP (PATTERN (trial), 0, 0)))
{
/* If this is the `true' thread, we will want to follow the jump,
so we can only do this if we have taken everything up to here. */
@ -2854,7 +2851,7 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
arithmetic insn after the jump insn and put the arithmetic insn in the
delay slot. If we can't do this, return. */
if (delay_list == 0 && likely && new_thread
&& GET_CODE (new_thread) == INSN
&& NONJUMP_INSN_P (new_thread)
&& GET_CODE (PATTERN (new_thread)) != ASM_INPUT
&& asm_noperands (PATTERN (new_thread)) < 0)
{
@ -2865,7 +2862,7 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
trial = new_thread;
pat = PATTERN (trial);
if (GET_CODE (trial) != INSN
if (!NONJUMP_INSN_P (trial)
|| GET_CODE (pat) != SET
|| ! eligible_for_delay (insn, 0, trial, flags)
|| can_throw_internal (trial))
@ -2937,7 +2934,7 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
if (! thread_if_true)
abort ();
if (new_thread && GET_CODE (new_thread) == JUMP_INSN
if (new_thread && JUMP_P (new_thread)
&& (simplejump_p (new_thread)
|| GET_CODE (PATTERN (new_thread)) == RETURN)
&& redirect_with_delay_list_safe_p (insn,
@ -2947,7 +2944,7 @@ fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
if (new_thread == 0)
label = find_end_label ();
else if (GET_CODE (new_thread) == CODE_LABEL)
else if (LABEL_P (new_thread))
label = new_thread;
else
label = get_label_before (new_thread);
@ -2988,7 +2985,7 @@ fill_eager_delay_slots (void)
insn = unfilled_slots_base[i];
if (insn == 0
|| INSN_DELETED_P (insn)
|| GET_CODE (insn) != JUMP_INSN
|| !JUMP_P (insn)
|| ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
continue;
@ -3113,7 +3110,7 @@ relax_delay_slots (rtx first)
/* If this is a jump insn, see if it now jumps to a jump, jumps to
the next insn, or jumps to a label that is not the last of a
group of consecutive labels. */
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
&& (target_label = JUMP_LABEL (insn)) != 0)
{
@ -3134,7 +3131,7 @@ relax_delay_slots (rtx first)
/* See if this jump branches around an unconditional jump.
If so, invert this jump and point it to the target of the
second jump. */
if (next && GET_CODE (next) == JUMP_INSN
if (next && JUMP_P (next)
&& (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
&& target_label
&& next_active_insn (target_label) == next_active_insn (next)
@ -3177,7 +3174,7 @@ relax_delay_slots (rtx first)
Don't do this if we expect the conditional branch to be true, because
we would then be making the more common case longer. */
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
&& (other = prev_active_insn (insn)) != 0
&& (condjump_p (other) || condjump_in_parallel_p (other))
@ -3194,7 +3191,7 @@ relax_delay_slots (rtx first)
}
/* Now look only at cases where we have filled a delay slot. */
if (GET_CODE (insn) != INSN
if (!NONJUMP_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) != SEQUENCE)
continue;
@ -3221,7 +3218,7 @@ relax_delay_slots (rtx first)
if (optimize_size
&& GET_CODE (PATTERN (delay_insn)) == RETURN
&& next
&& GET_CODE (next) == JUMP_INSN
&& JUMP_P (next)
&& GET_CODE (PATTERN (next)) == RETURN)
{
rtx after;
@ -3255,7 +3252,7 @@ relax_delay_slots (rtx first)
}
/* Now look only at the cases where we have a filled JUMP_INSN. */
if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
if (!JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
|| ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
|| condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
continue;
@ -3308,7 +3305,7 @@ relax_delay_slots (rtx first)
delay list and that insn is redundant, thread the jump. */
if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
&& XVECLEN (PATTERN (trial), 0) == 2
&& GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
&& JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
&& (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
|| GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
&& redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
@ -3376,7 +3373,7 @@ relax_delay_slots (rtx first)
/* See if this is an unconditional jump around a single insn which is
identical to the one in its delay slot. In this case, we can just
delete the branch and the insn in its delay slot. */
if (next && GET_CODE (next) == INSN
if (next && NONJUMP_INSN_P (next)
&& prev_label (next_active_insn (next)) == target_label
&& simplejump_p (insn)
&& XVECLEN (pat, 0) == 2
@ -3392,7 +3389,7 @@ relax_delay_slots (rtx first)
annulled jumps, though. Again, don't convert a jump to a RETURN
here. */
if (! INSN_ANNULLED_BRANCH_P (delay_insn)
&& next && GET_CODE (next) == JUMP_INSN
&& next && JUMP_P (next)
&& (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
&& next_active_insn (target_label) == next_active_insn (next)
&& no_labels_between_p (insn, next))
@ -3480,7 +3477,7 @@ make_return_insns (rtx first)
made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
into a RETURN to jump to it. */
for (insn = first; insn; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN)
{
real_return_label = get_label_before (insn);
break;
@ -3499,9 +3496,9 @@ make_return_insns (rtx first)
/* Only look at filled JUMP_INSNs that go to the end of function
label. */
if (GET_CODE (insn) != INSN
if (!NONJUMP_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) != SEQUENCE
|| GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
|| !JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
|| JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
continue;
@ -3617,7 +3614,7 @@ dbr_schedule (rtx first, FILE *file)
{
if (INSN_UID (insn) > max_uid)
max_uid = INSN_UID (insn);
if (GET_CODE (insn) == NOTE
if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
epilogue_insn = insn;
}
@ -3641,7 +3638,7 @@ dbr_schedule (rtx first, FILE *file)
INSN_FROM_TARGET_P (insn) = 0;
/* Skip vector tables. We can't get attributes for them. */
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
continue;
@ -3650,7 +3647,7 @@ dbr_schedule (rtx first, FILE *file)
obstack_ptr_grow (&unfilled_slots_obstack, insn);
/* Ensure all jumps go to the last of a set of consecutive labels. */
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
&& JUMP_LABEL (insn) != 0
&& ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
@ -3686,7 +3683,7 @@ dbr_schedule (rtx first, FILE *file)
{
next = NEXT_INSN (insn);
if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
&& INSN_P (XEXP (PATTERN (insn), 0)))
next = delete_related_insns (insn);
}
@ -3743,7 +3740,7 @@ dbr_schedule (rtx first, FILE *file)
for (insn = first; insn; insn = NEXT_INSN (insn))
{
if (! INSN_DELETED_P (insn)
&& GET_CODE (insn) == INSN
&& NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) != USE
&& GET_CODE (PATTERN (insn)) != CLOBBER)
{
@ -3801,14 +3798,14 @@ dbr_schedule (rtx first, FILE *file)
{
int pred_flags;
if (GET_CODE (insn) == INSN)
if (NONJUMP_INSN_P (insn))
{
rtx pat = PATTERN (insn);
if (GET_CODE (pat) == SEQUENCE)
insn = XVECEXP (pat, 0, 0);
}
if (GET_CODE (insn) != JUMP_INSN)
if (!JUMP_P (insn))
continue;
pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));

View File

@ -136,7 +136,7 @@ find_basic_block (rtx insn, int search_limit)
/* Scan backwards to the previous BARRIER. Then see if we can find a
label that starts a basic block. Return the basic block number. */
for (insn = prev_nonnote_insn (insn);
insn && GET_CODE (insn) != BARRIER && search_limit != 0;
insn && !BARRIER_P (insn) && search_limit != 0;
insn = prev_nonnote_insn (insn), --search_limit)
;
@ -151,7 +151,7 @@ find_basic_block (rtx insn, int search_limit)
/* See if any of the upcoming CODE_LABELs start a basic block. If we reach
anything other than a CODE_LABEL or note, we can't find this code. */
for (insn = next_nonnote_insn (insn);
insn && GET_CODE (insn) == CODE_LABEL;
insn && LABEL_P (insn);
insn = next_nonnote_insn (insn))
{
FOR_EACH_BB (bb)
@ -172,9 +172,7 @@ next_insn_no_annul (rtx insn)
{
/* If INSN is an annulled branch, skip any insns from the target
of the branch. */
if ((GET_CODE (insn) == JUMP_INSN
|| GET_CODE (insn) == CALL_INSN
|| GET_CODE (insn) == INSN)
if (INSN_P (insn)
&& INSN_ANNULLED_BRANCH_P (insn)
&& NEXT_INSN (PREV_INSN (insn)) != insn)
{
@ -191,7 +189,7 @@ next_insn_no_annul (rtx insn)
}
insn = NEXT_INSN (insn);
if (insn && GET_CODE (insn) == INSN
if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
}
@ -495,7 +493,7 @@ find_dead_or_set_registers (rtx target, struct resources *res,
for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
{
this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
if (GET_CODE (this_jump_insn) == JUMP_INSN)
if (JUMP_P (this_jump_insn))
break;
}
}
@ -504,7 +502,7 @@ find_dead_or_set_registers (rtx target, struct resources *res,
break;
}
if (GET_CODE (this_jump_insn) == JUMP_INSN)
if (JUMP_P (this_jump_insn))
{
if (jump_count++ < 10)
{
@ -982,11 +980,11 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
start_insn = (b == 0 ? insns : BB_HEAD (BASIC_BLOCK (b)));
stop_insn = target;
if (GET_CODE (start_insn) == INSN
if (NONJUMP_INSN_P (start_insn)
&& GET_CODE (PATTERN (start_insn)) == SEQUENCE)
start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
if (GET_CODE (stop_insn) == INSN
if (NONJUMP_INSN_P (stop_insn)
&& GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
stop_insn = next_insn (PREV_INSN (stop_insn));
@ -1010,7 +1008,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
&& INSN_P (XEXP (PATTERN (insn), 0)))
real_insn = XEXP (PATTERN (insn), 0);
if (GET_CODE (real_insn) == CALL_INSN)
if (CALL_P (real_insn))
{
/* CALL clobbers all call-used regs that aren't fixed except
sp, ap, and fp. Do this before setting the result of the
@ -1030,11 +1028,11 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
parameters. But they might be early. A CALL_INSN will usually
clobber registers used for parameters. It isn't worth bothering
with the unlikely case when it won't. */
if ((GET_CODE (real_insn) == INSN
if ((NONJUMP_INSN_P (real_insn)
&& GET_CODE (PATTERN (real_insn)) != USE
&& GET_CODE (PATTERN (real_insn)) != CLOBBER)
|| GET_CODE (real_insn) == JUMP_INSN
|| GET_CODE (real_insn) == CALL_INSN)
|| JUMP_P (real_insn)
|| CALL_P (real_insn))
{
for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
if (REG_NOTE_KIND (link) == REG_DEAD
@ -1071,7 +1069,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
}
}
else if (GET_CODE (real_insn) == CODE_LABEL)
else if (LABEL_P (real_insn))
{
/* A label clobbers the pending dead registers since neither
reload nor jump will propagate a value across a label. */
@ -1082,7 +1080,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
/* The beginning of the epilogue corresponds to the end of the
RTL chain when there are no epilogue insns. Certain resources
are implicitly required at that point. */
else if (GET_CODE (real_insn) == NOTE
else if (NOTE_P (real_insn)
&& NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
}

View File

@ -276,12 +276,10 @@ struct rtx_def GTY((chain_next ("RTX_NEXT (&%h)"),
/* FIXME: the "NEXT_INSN (PREV_INSN (X)) == X" condition shouldn't be needed.
*/
#define RTX_PREV(X) ((GET_CODE (X) == INSN \
|| GET_CODE (X) == CALL_INSN \
|| GET_CODE (X) == JUMP_INSN \
|| GET_CODE (X) == NOTE \
|| GET_CODE (X) == BARRIER \
|| GET_CODE (X) == CODE_LABEL) \
#define RTX_PREV(X) ((INSN_P (X) \
|| NOTE_P (X) \
|| BARRIER_P (X) \
|| LABEL_P (X)) \
&& PREV_INSN (X) != NULL \
&& NEXT_INSN (PREV_INSN (X)) == X \
? PREV_INSN (X) : NULL)

View File

@ -631,7 +631,7 @@ global_reg_mentioned_p (rtx x)
{
if (INSN_P (x))
{
if (GET_CODE (x) == CALL_INSN)
if (CALL_P (x))
{
if (! CONST_OR_PURE_CALL_P (x))
return 1;
@ -784,7 +784,7 @@ no_labels_between_p (rtx beg, rtx end)
if (beg == end)
return 0;
for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
if (GET_CODE (p) == CODE_LABEL)
if (LABEL_P (p))
return 0;
return 1;
}
@ -797,7 +797,7 @@ no_jumps_between_p (rtx beg, rtx end)
{
rtx p;
for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
if (GET_CODE (p) == JUMP_INSN)
if (JUMP_P (p))
return 0;
return 1;
}
@ -816,7 +816,7 @@ reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
if (INSN_P (insn)
&& (reg_overlap_mentioned_p (reg, PATTERN (insn))
|| (GET_CODE (insn) == CALL_INSN
|| (CALL_P (insn)
&& (find_reg_fusage (insn, USE, reg)
|| find_reg_fusage (insn, CLOBBER, reg)))))
return 1;
@ -915,7 +915,7 @@ reg_referenced_between_p (rtx reg, rtx from_insn, rtx to_insn)
for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
if (INSN_P (insn)
&& (reg_referenced_p (reg, PATTERN (insn))
|| (GET_CODE (insn) == CALL_INSN
|| (CALL_P (insn)
&& find_reg_fusage (insn, USE, reg))))
return 1;
return 0;
@ -946,7 +946,7 @@ reg_set_p (rtx reg, rtx insn)
check if a side-effect of the insn clobbers REG. */
if (INSN_P (insn)
&& (FIND_REG_INC_NOTE (insn, reg)
|| (GET_CODE (insn) == CALL_INSN
|| (CALL_P (insn)
/* We'd like to test call_used_regs here, but rtlanal.c can't
reference that variable due to its use in genattrtab. So
we'll just be more conservative.
@ -1368,7 +1368,7 @@ find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
{
rtx p;
for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
p = PREV_INSN (p))
if (INSN_P (p))
{
@ -1782,7 +1782,7 @@ dead_or_set_regno_p (rtx insn, unsigned int test_regno)
if (find_regno_note (insn, REG_DEAD, test_regno))
return 1;
if (GET_CODE (insn) == CALL_INSN
if (CALL_P (insn)
&& find_regno_fusage (insn, CLOBBER, test_regno))
return 1;
@ -1935,7 +1935,7 @@ find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
{
/* If it's not a CALL_INSN, it can't possibly have a
CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
if (GET_CODE (insn) != CALL_INSN)
if (!CALL_P (insn))
return 0;
if (! datum)
@ -1986,7 +1986,7 @@ find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
to pseudo registers, so don't bother checking. */
if (regno >= FIRST_PSEUDO_REGISTER
|| GET_CODE (insn) != CALL_INSN )
|| !CALL_P (insn) )
return 0;
for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
@ -2011,7 +2011,7 @@ pure_call_p (rtx insn)
{
rtx link;
if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
return 0;
/* Look for the note that differentiates const and pure functions. */
@ -2711,7 +2711,7 @@ replace_label (rtx *x, void *data)
/* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
field. This is not handled by for_each_rtx because it doesn't
handle unprinted ('0') fields. */
if (GET_CODE (l) == JUMP_INSN && JUMP_LABEL (l) == old_label)
if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
JUMP_LABEL (l) = new_label;
if ((GET_CODE (l) == LABEL_REF
@ -2743,7 +2743,7 @@ rtx_referenced_p_1 (rtx *body, void *x)
return y == NULL_RTX;
/* Return true if a label_ref *BODY refers to label Y. */
if (GET_CODE (*body) == LABEL_REF && GET_CODE (y) == CODE_LABEL)
if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
return XEXP (*body, 0) == y;
/* If *BODY is a reference to pool constant traverse the constant. */
@ -2771,10 +2771,10 @@ tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
{
rtx label, table;
if (GET_CODE (insn) == JUMP_INSN
if (JUMP_P (insn)
&& (label = JUMP_LABEL (insn)) != NULL_RTX
&& (table = next_active_insn (label)) != NULL_RTX
&& GET_CODE (table) == JUMP_INSN
&& JUMP_P (table)
&& (GET_CODE (PATTERN (table)) == ADDR_VEC
|| GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
{
@ -2849,7 +2849,7 @@ int
computed_jump_p (rtx insn)
{
int i;
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
{
rtx pat = PATTERN (insn);
@ -3103,7 +3103,7 @@ insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
while (r)
{
if (GET_CODE (r) == NOTE)
if (NOTE_P (r))
{
switch (NOTE_LINE_NUMBER (r))
{
@ -3411,14 +3411,14 @@ find_first_parameter_load (rtx call_insn, rtx boundary)
/* It is possible that some loads got CSEed from one call to
another. Stop in that case. */
if (GET_CODE (before) == CALL_INSN)
if (CALL_P (before))
break;
/* Our caller needs either ensure that we will find all sets
(in case code has not been optimized yet), or take care
for possible labels in a way by setting boundary to preceding
CODE_LABEL. */
if (GET_CODE (before) == CODE_LABEL)
if (LABEL_P (before))
{
if (before != boundary)
abort ();
@ -3536,7 +3536,7 @@ can_hoist_insn_p (rtx insn, rtx val, regset live)
return false;
/* We can move CALL_INSN, but we need to check that all caller clobbered
regs are dead. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
return false;
/* In future we will handle hoisting of libcall sequences, but
give up for now. */

View File

@ -147,7 +147,7 @@ get_condition (rtx insn)
return 0;
if (GET_CODE (pat) == COND_EXEC)
return COND_EXEC_TEST (pat);
if (GET_CODE (insn) != JUMP_INSN)
if (!JUMP_P (insn))
return 0;
if (GET_CODE (pat) != SET || SET_SRC (pat) != pc_rtx)
return 0;
@ -199,7 +199,7 @@ add_dependence (rtx insn, rtx elem, enum reg_note dep_type)
/* We can get a dependency on deleted insns due to optimizations in
the register allocation and reloading or due to splitting. Any
such dependency is useless and can be ignored. */
if (GET_CODE (elem) == NOTE)
if (NOTE_P (elem))
return 0;
/* flow.c doesn't handle conditional lifetimes entirely correctly;
@ -207,7 +207,7 @@ add_dependence (rtx insn, rtx elem, enum reg_note dep_type)
/* ??? add_dependence is the wrong place to be eliding dependencies,
as that forgets that the condition expressions themselves may
be dependent. */
if (GET_CODE (insn) != CALL_INSN && GET_CODE (elem) != CALL_INSN)
if (!CALL_P (insn) && !CALL_P (elem))
{
cond1 = get_condition (insn);
cond2 = get_condition (elem);
@ -231,7 +231,7 @@ add_dependence (rtx insn, rtx elem, enum reg_note dep_type)
No need for interblock dependences with calls, since
calls are not moved between blocks. Note: the edge where
elem is a CALL is still required. */
if (GET_CODE (insn) == CALL_INSN
if (CALL_P (insn)
&& (INSN_BB (elem) != INSN_BB (insn)))
return 0;
#endif
@ -716,7 +716,7 @@ sched_analyze_2 (struct deps *deps, rtx x, rtx insn)
}
for (u = deps->last_pending_memory_flush; u; u = XEXP (u, 1))
if (GET_CODE (XEXP (u, 0)) != JUMP_INSN
if (!JUMP_P (XEXP (u, 0))
|| deps_may_trap_p (x))
add_dependence (insn, XEXP (u, 0), REG_DEP_ANTI);
@ -853,7 +853,7 @@ sched_analyze_insn (struct deps *deps, rtx x, rtx insn, rtx loop_notes)
sched_analyze_2 (deps, x, insn);
/* Mark registers CLOBBERED or used by called function. */
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
{
@ -866,11 +866,11 @@ sched_analyze_insn (struct deps *deps, rtx x, rtx insn, rtx loop_notes)
reg_pending_barrier = MOVE_BARRIER;
}
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
{
rtx next;
next = next_nonnote_insn (insn);
if (next && GET_CODE (next) == BARRIER)
if (next && BARRIER_P (next))
reg_pending_barrier = TRUE_BARRIER;
else
{
@ -1179,24 +1179,24 @@ sched_analyze (struct deps *deps, rtx head, rtx tail)
/* Before reload, if the previous block ended in a call, show that
we are inside a post-call group, so as to keep the lifetimes of
hard registers correct. */
if (! reload_completed && GET_CODE (head) != CODE_LABEL)
if (! reload_completed && !LABEL_P (head))
{
insn = prev_nonnote_insn (head);
if (insn && GET_CODE (insn) == CALL_INSN)
if (insn && CALL_P (insn))
deps->in_post_call_group_p = post_call_initial;
}
for (insn = head;; insn = NEXT_INSN (insn))
{
rtx link, end_seq, r0, set;
if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
if (NONJUMP_INSN_P (insn) || JUMP_P (insn))
{
/* Clear out the stale LOG_LINKS from flow. */
free_INSN_LIST_list (&LOG_LINKS (insn));
/* Make each JUMP_INSN a scheduling barrier for memory
references. */
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
{
/* Keep the list a reasonable size. */
if (deps->pending_flush_length++ > MAX_PENDING_LIST_LENGTH)
@ -1208,7 +1208,7 @@ sched_analyze (struct deps *deps, rtx head, rtx tail)
sched_analyze_insn (deps, PATTERN (insn), insn, loop_notes);
loop_notes = 0;
}
else if (GET_CODE (insn) == CALL_INSN)
else if (CALL_P (insn))
{
int i;
@ -1282,7 +1282,7 @@ sched_analyze (struct deps *deps, rtx head, rtx tail)
/* See comments on reemit_notes as to why we do this.
??? Actually, the reemit_notes just say what is done, not why. */
if (GET_CODE (insn) == NOTE
if (NOTE_P (insn)
&& (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
|| NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
@ -1324,7 +1324,7 @@ sched_analyze (struct deps *deps, rtx head, rtx tail)
the outermost libcall sequence. */
&& deps->libcall_block_tail_insn == 0
/* The sequence must start with a clobber of a register. */
&& GET_CODE (insn) == INSN
&& NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == CLOBBER
&& (r0 = XEXP (PATTERN (insn), 0), REG_P (r0))
&& REG_P (XEXP (PATTERN (insn), 0))
@ -1373,7 +1373,7 @@ add_forward_dependence (rtx from, rtx to, enum reg_note dep_type)
However, if we have enabled checking we might as well go
ahead and verify that add_dependence worked properly. */
if (GET_CODE (from) == NOTE
if (NOTE_P (from)
|| INSN_DELETED_P (from)
|| (forward_dependency_cache != NULL
&& bitmap_bit_p (&forward_dependency_cache[INSN_LUID (from)],

View File

@ -240,7 +240,7 @@ fix_basic_block_boundaries (basic_block bb, basic_block last, rtx head,
for (; insn != aftertail; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
abort ();
/* Create new basic blocks just before first insn. */
if (inside_basic_block_p (insn))
@ -250,7 +250,7 @@ fix_basic_block_boundaries (basic_block bb, basic_block last, rtx head,
rtx note;
/* Re-emit the basic block note for newly found BB header. */
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
{
note = emit_note_after (NOTE_INSN_BASIC_BLOCK, insn);
head = insn;
@ -314,7 +314,7 @@ fix_basic_block_boundaries (basic_block bb, basic_block last, rtx head,
delete_insn_chain (head, insn);
/* We keep some notes in the way that may split barrier from the
jump. */
if (GET_CODE (next) == BARRIER)
if (BARRIER_P (next))
{
emit_barrier_after (prev_nonnote_insn (head));
delete_insn (next);
@ -328,7 +328,7 @@ fix_basic_block_boundaries (basic_block bb, basic_block last, rtx head,
BB_END (curr_bb) = insn;
add_missing_bbs (BB_HEAD (curr_bb), bb, curr_bb->prev_bb);
}
note = GET_CODE (head) == CODE_LABEL ? NEXT_INSN (head) : head;
note = LABEL_P (head) ? NEXT_INSN (head) : head;
NOTE_BASIC_BLOCK (note) = curr_bb;
update_bb_for_insn (curr_bb);
bb = curr_bb->next_bb;
@ -420,7 +420,7 @@ add_deps_for_risky_insns (rtx head, rtx tail)
basic_block last_block = NULL, bb;
for (insn = head; insn != next_tail; insn = NEXT_INSN (insn))
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
{
bb = BLOCK_FOR_INSN (insn);
bb->aux = last_block;
@ -592,7 +592,7 @@ schedule_ebbs (FILE *dump_file)
edge e;
tail = BB_END (bb);
if (bb->next_bb == EXIT_BLOCK_PTR
|| GET_CODE (BB_HEAD (bb->next_bb)) == CODE_LABEL)
|| LABEL_P (BB_HEAD (bb->next_bb)))
break;
for (e = bb->succ; e; e = e->succ_next)
if ((e->flags & EDGE_FALLTHRU) != 0)
@ -608,11 +608,11 @@ schedule_ebbs (FILE *dump_file)
a note or two. */
while (head != tail)
{
if (GET_CODE (head) == NOTE)
if (NOTE_P (head))
head = NEXT_INSN (head);
else if (GET_CODE (tail) == NOTE)
else if (NOTE_P (tail))
tail = PREV_INSN (tail);
else if (GET_CODE (head) == CODE_LABEL)
else if (LABEL_P (head))
head = NEXT_INSN (head);
else
break;

View File

@ -349,7 +349,7 @@ is_cfg_nonregular (void)
rtx note = find_reg_note (insn, REG_LABEL, NULL_RTX);
if (note
&& ! (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
&& ! (JUMP_P (NEXT_INSN (insn))
&& find_reg_note (NEXT_INSN (insn), REG_LABEL,
XEXP (note, 0))))
return 1;
@ -1500,7 +1500,7 @@ find_conditional_protection (rtx insn, int load_insn_bb)
&& IS_REACHABLE (INSN_BB (next), load_insn_bb)
&& load_insn_bb != INSN_BB (next)
&& GET_MODE (link) == VOIDmode
&& (GET_CODE (next) == JUMP_INSN
&& (JUMP_P (next)
|| find_conditional_protection (next, load_insn_bb)))
return 1;
}
@ -1532,7 +1532,7 @@ is_conditionally_protected (rtx load_insn, int bb_src, int bb_trg)
/* Must be a DEF-USE dependence upon non-branch. */
if (GET_MODE (link) != VOIDmode
|| GET_CODE (insn1) == JUMP_INSN)
|| JUMP_P (insn1))
continue;
/* Must exist a path: region-entry -> ... -> bb_trg -> ... load_insn. */
@ -1812,7 +1812,7 @@ init_ready_list (struct ready_list *ready)
static int
can_schedule_ready_p (rtx insn)
{
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
last_was_jump = 1;
/* An interblock motion? */
@ -2042,9 +2042,9 @@ add_branch_dependences (rtx head, rtx tail)
insn = tail;
last = 0;
while (GET_CODE (insn) == CALL_INSN
|| GET_CODE (insn) == JUMP_INSN
|| (GET_CODE (insn) == INSN
while (CALL_P (insn)
|| JUMP_P (insn)
|| (NONJUMP_INSN_P (insn)
&& (GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER
|| can_throw_internal (insn)
@ -2053,9 +2053,9 @@ add_branch_dependences (rtx head, rtx tail)
#endif
|| (!reload_completed
&& sets_likely_spilled (PATTERN (insn)))))
|| GET_CODE (insn) == NOTE)
|| NOTE_P (insn))
{
if (GET_CODE (insn) != NOTE)
if (!NOTE_P (insn))
{
if (last != 0 && !find_insn_list (insn, LOG_LINKS (last)))
{
@ -2315,7 +2315,7 @@ debug_dependencies (void)
{
int n;
fprintf (sched_dump, ";; %6d ", INSN_UID (insn));
if (GET_CODE (insn) == NOTE)
if (NOTE_P (insn))
{
n = NOTE_LINE_NUMBER (insn);
if (n < 0)

View File

@ -362,8 +362,8 @@ emit_nop (void)
last_insn = get_last_insn ();
if (!optimize
&& (GET_CODE (last_insn) == CODE_LABEL
|| (GET_CODE (last_insn) == NOTE
&& (LABEL_P (last_insn)
|| (NOTE_P (last_insn)
&& prev_real_insn (last_insn) == 0)))
emit_insn (gen_nop ());
}
@ -2817,7 +2817,7 @@ expand_start_case (int exit_flag, tree expr, tree type,
/* Make sure case_stmt.start points to something that won't
need any transformation before expand_end_case. */
if (GET_CODE (get_last_insn ()) != NOTE)
if (!NOTE_P (get_last_insn ()))
emit_note (NOTE_INSN_DELETED);
thiscase->data.case_stmt.start = get_last_insn ();
@ -4104,7 +4104,7 @@ node_is_bounded (case_node_ptr node, tree index_type)
static void
emit_jump_if_reachable (rtx label)
{
if (GET_CODE (get_last_insn ()) != BARRIER)
if (!BARRIER_P (get_last_insn ()))
emit_jump (label);
}

View File

@ -310,13 +310,13 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
/* If the last instruction is not a BARRIER or a JUMP_INSN, then
don't do anything. */
if (GET_CODE (last_loop_insn) == BARRIER)
if (BARRIER_P (last_loop_insn))
{
/* Delete the jump insn. This will delete the barrier also. */
last_loop_insn = PREV_INSN (last_loop_insn);
}
if (ujump && GET_CODE (last_loop_insn) == JUMP_INSN)
if (ujump && JUMP_P (last_loop_insn))
{
#ifdef HAVE_cc0
rtx prev = PREV_INSN (last_loop_insn);
@ -441,9 +441,9 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
Just return without unrolling the loop in such cases. */
insn = loop_start;
while (GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != JUMP_INSN)
while (!LABEL_P (insn) && !JUMP_P (insn))
insn = NEXT_INSN (insn);
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
return;
}
@ -464,9 +464,9 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
insert_before = NEXT_INSN (last_loop_insn);
/* Set copy_end to the insn before the jump at the end of the loop. */
if (GET_CODE (last_loop_insn) == BARRIER)
if (BARRIER_P (last_loop_insn))
copy_end = PREV_INSN (PREV_INSN (last_loop_insn));
else if (GET_CODE (last_loop_insn) == JUMP_INSN)
else if (JUMP_P (last_loop_insn))
{
copy_end = PREV_INSN (last_loop_insn);
#ifdef HAVE_cc0
@ -500,12 +500,12 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
/* Set insert_before to the jump insn at the end of the loop.
Set copy_end to before the jump insn at the end of the loop. */
if (GET_CODE (last_loop_insn) == BARRIER)
if (BARRIER_P (last_loop_insn))
{
insert_before = PREV_INSN (last_loop_insn);
copy_end = PREV_INSN (insert_before);
}
else if (GET_CODE (last_loop_insn) == JUMP_INSN)
else if (JUMP_P (last_loop_insn))
{
insert_before = last_loop_insn;
#ifdef HAVE_cc0
@ -533,7 +533,7 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
/* Normal case: Must copy the compare and branch instructions at the
end of the loop. */
if (GET_CODE (last_loop_insn) == BARRIER)
if (BARRIER_P (last_loop_insn))
{
/* Loop ends with an unconditional jump and a barrier.
Handle this like above, don't copy jump and barrier.
@ -546,7 +546,7 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
insert_before = PREV_INSN (last_loop_insn);
copy_end = PREV_INSN (insert_before);
}
else if (GET_CODE (last_loop_insn) == JUMP_INSN)
else if (JUMP_P (last_loop_insn))
{
/* Set insert_before to immediately after the JUMP_INSN, so that
NOTEs at the end of the loop will be correctly handled by
@ -576,10 +576,10 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
exit_label = gen_label_rtx ();
insn = loop_start;
while (GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != JUMP_INSN)
while (!LABEL_P (insn) && !JUMP_P (insn))
insn = NEXT_INSN (insn);
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
{
/* The loop starts with a jump down to the exit condition test.
Start copying the loop after the barrier following this
@ -603,9 +603,9 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
/* This should always be the first label in the loop. */
start_label = NEXT_INSN (copy_start);
/* There may be a line number note and/or a loop continue note here. */
while (GET_CODE (start_label) == NOTE)
while (NOTE_P (start_label))
start_label = NEXT_INSN (start_label);
if (GET_CODE (start_label) != CODE_LABEL)
if (!LABEL_P (start_label))
{
/* This can happen as a result of jump threading. If the first insns in
the loop test the same condition as the loop's backward jump, or the
@ -633,8 +633,8 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
}
if (unroll_type == UNROLL_NAIVE
&& GET_CODE (last_loop_insn) == BARRIER
&& GET_CODE (PREV_INSN (last_loop_insn)) == JUMP_INSN
&& BARRIER_P (last_loop_insn)
&& JUMP_P (PREV_INSN (last_loop_insn))
&& start_label != JUMP_LABEL (PREV_INSN (last_loop_insn)))
{
/* In this case, we must copy the jump and barrier, because they will
@ -645,7 +645,7 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
}
if (unroll_type == UNROLL_NAIVE
&& GET_CODE (last_loop_insn) == JUMP_INSN
&& JUMP_P (last_loop_insn)
&& start_label != JUMP_LABEL (last_loop_insn))
{
/* ??? The loop ends with a conditional branch that does not branch back
@ -692,9 +692,9 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
{
rtx note;
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
local_label[CODE_LABEL_NUMBER (insn)] = 1;
else if (GET_CODE (insn) == JUMP_INSN)
else if (JUMP_P (insn))
{
if (JUMP_LABEL (insn))
set_label_in_map (map,
@ -758,13 +758,13 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
/* If a register is used in the jump insn, we must not duplicate it
since it will also be used outside the loop. */
if (GET_CODE (copy_end) == JUMP_INSN)
if (JUMP_P (copy_end))
copy_end_luid--;
/* If we have a target that uses cc0, then we also must not duplicate
the insn that sets cc0 before the jump insn, if one is present. */
#ifdef HAVE_cc0
if (GET_CODE (copy_end) == JUMP_INSN
if (JUMP_P (copy_end)
&& sets_cc0_p (PREV_INSN (copy_end)))
copy_end_luid--;
#endif
@ -1029,9 +1029,9 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
and then reset it inside the loop when get to the last
copy. */
if (GET_CODE (last_loop_insn) == BARRIER)
if (BARRIER_P (last_loop_insn))
copy_end = PREV_INSN (PREV_INSN (last_loop_insn));
else if (GET_CODE (last_loop_insn) == JUMP_INSN)
else if (JUMP_P (last_loop_insn))
{
copy_end = PREV_INSN (last_loop_insn);
#ifdef HAVE_cc0
@ -1073,7 +1073,7 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
if (i == unroll_number - 1)
{
if (GET_CODE (last_loop_insn) == BARRIER)
if (BARRIER_P (last_loop_insn))
copy_end = PREV_INSN (PREV_INSN (last_loop_insn));
else
copy_end = last_loop_insn;
@ -1087,7 +1087,7 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
}
emit_label_after (labels[0], PREV_INSN (loop_start));
if (GET_CODE (last_loop_insn) == BARRIER)
if (BARRIER_P (last_loop_insn))
{
insert_before = PREV_INSN (last_loop_insn);
copy_end = PREV_INSN (insert_before);
@ -1191,7 +1191,7 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
if (unroll_type == UNROLL_MODULO)
{
insn = NEXT_INSN (copy_end);
if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
if (NONJUMP_INSN_P (insn) || JUMP_P (insn))
PATTERN (insn) = remap_split_bivs (loop, PATTERN (insn));
}
@ -1270,8 +1270,8 @@ unroll_loop (struct loop *loop, int insn_count, int strength_reduce_p)
associated LABEL_DECL to point to one of the new label instances. */
/* ??? Likewise, we can't delete a NOTE_INSN_DELETED_LABEL note. */
if (insn != start_label
&& ! (GET_CODE (insn) == CODE_LABEL && LABEL_NAME (insn))
&& ! (GET_CODE (insn) == NOTE
&& ! (LABEL_P (insn) && LABEL_NAME (insn))
&& ! (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL))
insn = delete_related_insns (insn);
else
@ -2125,7 +2125,7 @@ copy_loop_body (struct loop *loop, rtx copy_start, rtx copy_end,
}
}
if (label && GET_CODE (label) == CODE_LABEL)
if (label && LABEL_P (label))
JUMP_LABEL (copy) = label;
else
{
@ -2252,8 +2252,7 @@ copy_loop_body (struct loop *loop, rtx copy_start, rtx copy_end,
do
{
insn = NEXT_INSN (insn);
if ((GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
|| GET_CODE (insn) == CALL_INSN)
if (INSN_P (insn)
&& map->insn_map[INSN_UID (insn)])
final_reg_note_copy (&REG_NOTES (map->insn_map[INSN_UID (insn)]), map);
}
@ -2279,7 +2278,7 @@ copy_loop_body (struct loop *loop, rtx copy_start, rtx copy_end,
instructions before the last insn in the loop, COPY_NOTES_FROM
can be a NOTE_INSN_LOOP_CONT note if there is no VTOP note,
as in a do .. while loop. */
if (GET_CODE (insn) == NOTE
if (NOTE_P (insn)
&& ((NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK
&& NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_VTOP
@ -2330,7 +2329,7 @@ back_branch_in_range_p (const struct loop *loop, rtx insn)
/* Stop before we get to the backward branch at the end of the loop. */
loop_end = prev_nonnote_insn (loop_end);
if (GET_CODE (loop_end) == BARRIER)
if (BARRIER_P (loop_end))
loop_end = PREV_INSN (loop_end);
/* Check in case insn has been deleted, search forward for first non
@ -2346,7 +2345,7 @@ back_branch_in_range_p (const struct loop *loop, rtx insn)
for (p = NEXT_INSN (insn); p != loop_end; p = NEXT_INSN (p))
{
if (GET_CODE (p) == JUMP_INSN)
if (JUMP_P (p))
{
target_insn = JUMP_LABEL (p);
@ -2912,7 +2911,7 @@ reg_dead_after_loop (const struct loop *loop, rtx reg)
if (set && rtx_equal_p (SET_DEST (set), reg))
break;
if (GET_CODE (insn) == JUMP_INSN)
if (JUMP_P (insn))
{
if (GET_CODE (PATTERN (insn)) == RETURN)
break;
@ -3145,7 +3144,7 @@ loop_find_equiv_value (const struct loop *loop, rtx reg)
ret = reg;
for (insn = PREV_INSN (loop_start); insn; insn = PREV_INSN (insn))
{
if (GET_CODE (insn) == CODE_LABEL)
if (LABEL_P (insn))
break;
else if (INSN_P (insn) && reg_set_p (reg, insn))
@ -3282,7 +3281,7 @@ loop_iterations (struct loop *loop)
/* ??? We should probably try harder to find the jump insn
at the end of the loop. The following code assumes that
the last loop insn is a jump to the top of the loop. */
if (GET_CODE (last_loop_insn) != JUMP_INSN)
if (!JUMP_P (last_loop_insn))
{
if (loop_dump_stream)
fprintf (loop_dump_stream,
@ -3308,7 +3307,7 @@ loop_iterations (struct loop *loop)
do
{
if (GET_CODE (temp) == JUMP_INSN)
if (JUMP_P (temp))
{
/* There are some kinds of jumps we can't deal with easily. */
if (JUMP_LABEL (temp) == 0)
@ -3967,7 +3966,7 @@ set_dominates_use (int regno, int first_uid, int last_uid, rtx copy_start,
while (INSN_UID (p) != first_uid)
{
if (GET_CODE (p) == JUMP_INSN)
if (JUMP_P (p))
passed_jump = 1;
/* Could not find FIRST_UID. */
if (p == copy_end)
@ -3987,7 +3986,7 @@ set_dominates_use (int regno, int first_uid, int last_uid, rtx copy_start,
{
/* If we see a CODE_LABEL between FIRST_UID and LAST_UID, then we
can not be sure that FIRST_UID dominates LAST_UID. */
if (GET_CODE (p) == CODE_LABEL)
if (LABEL_P (p))
return 0;
/* Could not find LAST_UID, but we reached the end of the loop, so
it must be safe. */
@ -4024,7 +4023,7 @@ ujump_to_loop_cont (rtx loop_start, rtx loop_cont)
/* Examine insn after loop continuation note. Return if not a label. */
label = next_nonnote_insn (loop_cont);
if (label == 0 || GET_CODE (label) != CODE_LABEL)
if (label == 0 || !LABEL_P (label))
return NULL_RTX;
/* Return the loop start if the branch label matches the code label. */

View File

@ -503,7 +503,7 @@ prologue_stack_adjust (void)
end = NEXT_INSN (BB_END (bb));
for (insn = BB_HEAD (bb); insn != end; insn = NEXT_INSN (insn))
{
if (GET_CODE (insn) == NOTE
if (NOTE_P (insn)
&& NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
break;
@ -2579,7 +2579,7 @@ vt_initialize (void)
}
note_uses (&PATTERN (insn), count_uses_1, insn);
note_stores (PATTERN (insn), count_stores, insn);
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
VTI (bb)->n_mos++;
}
}
@ -2629,7 +2629,7 @@ vt_initialize (void)
}
}
if (GET_CODE (insn) == CALL_INSN)
if (CALL_P (insn))
{
micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++;

View File

@ -3018,7 +3018,7 @@ output_constant_pool_1 (struct constant_descriptor_rtx *desc)
case LABEL_REF:
tmp = XEXP (x, 0);
if (INSN_DELETED_P (tmp)
|| (GET_CODE (tmp) == NOTE
|| (NOTE_P (tmp)
&& NOTE_LINE_NUMBER (tmp) == NOTE_INSN_DELETED))
{
abort ();