Work towards NEXT_INSN/PREV_INSN requiring insns as their params

gcc/
2014-08-28  David Malcolm  <dmalcolm@redhat.com>

	* cfgexpand.c (pass_expand::execute): Strengthen local "after"
	from rtx to rtx_insn *.
	* cfgrtl.c (force_nonfallthru_and_redirect): Replace use of local
	rtx "note" with new local rtx_insn * "new_head" when calculating
	head insn of new basic block.
	* combine.c (combine_split_insns): Strengthen return type and local
	"ret" from rtx to rtx_insn *.
	(likely_spilled_retval_p): Likewise for locals "use" and "p".
	(try_combine): Eliminate local "m_split", splitting into new
	locals "m_split_insn" and "m_split_pat".
	(find_split_point): Strengthen local "seq" from rtx into
	rtx_insn *.
	* config/spu/spu.c (spu_machine_dependent_reorg): Likewise for
	locals "label", "branch".
	* config/spu/spu.md (define_expand "smulsi3_highpart"): Likewise
	for local "insn".
	(define_expand "umulsi3_highpart"): Likewise for local "insn".
	* dse.c (note_add_store_info): Likewise for fields "first",
	"current".
	(note_add_store): Likewise for local "insn".
	(emit_inc_dec_insn_before): Likewise for locals "insn",
	"new_insn", "cur".
	(find_shift_sequence): Likewise for locals "shift_seq", "insn".
	(replace_read): Likewise for locals "insns", "this_insn".
	* dwarf2cfi.c (dw_trace_info): Likewise for field "eh_head".
	(notice_eh_throw): Likewise for param "insn".
	(before_next_cfi_note): Likewise for return type, param, and local
	"prev".
	(connect_traces): Likewise for local "note".
	* emit-rtl.c (reset_all_used_flags): Likewise for local "p".
	(verify_rtl_sharing): Likewise.
	(unshare_all_rtl_in_chain): Likewise for param "insn".
	(get_first_nonnote_insn): Likewise for local "insn".
	(get_last_nonnote_insn): Likewise.  Introduce local rtx_sequence *
	"seq" and use its methods to clarify things.
	(next_insn): Strengthen return type from rtx to rtx_insn *.
	Rename param "insn" to "uncast_insn" and reintroduce "insn" as a
	local rtx_insn * using a checked cast, dropping a checked cast
	made redundant by this change.  Use a cast to and method of
	rtx_sequence to clarify the code.
	(previous_insn): Rename param "insn" to "uncast_insn" and
	reintroduce "insn" as a local rtx_insn * using a checked cast,
	dropping a checked cast made redundant by this change.  Use a cast
	to and method of rtx_sequence to clarify the code.
	(next_nonnote_insn): Rename param "insn" to "uncast_insn" and
	reintroduce "insn" as a local rtx_insn * using a checked cast,
	dropping a checked cast made redundant by this change.
	(next_nonnote_insn_bb): Likewise.
	(prev_nonnote_insn): Likewise.
	(prev_nonnote_insn_bb): Likewise.
	(next_nondebug_insn): Likewise.
	(prev_nondebug_insn): Likewise.
	(next_nonnote_nondebug_insn): Likewise.
	(prev_nonnote_nondebug_insn): Likewise.
	(next_real_insn): Likewise.
	(prev_real_insn): Likewise.
	(next_active_insn): Likewise.
	(prev_active_insn): Likewise.
	(next_cc0_user): Likewise.  Use rtx_sequence and a method for
	clarity.
	(prev_cc0_setter): Likewise.
	(try_split): Rename param "trial" to "uncast_trial" and
	reintroduce "insn" as a local rtx_insn * using a checked cast,
	dropping checked casts made redundant by this change.
	Strengthen locals "seq", "tem", "insn_last", "insn", "next" from
	rtx to rtx_insn *.
	(remove_insn): Rename param "insn" to "uncast_insn" and
	reintroduce "insn" as a local rtx_insn * using a checked cast.
	(emit_pattern_after_setloc): Likewise for param "after", as
	"uncast_after".
	(emit_pattern_after): Likewise.  Strengthen local "prev" from
	rtx to rtx_insn *.
	(emit_pattern_before_setloc): Rename param "before" to
	"uncast_before" and reintroduce "before" as a local rtx_insn *
	using a checked cast.  Strengthen locals "first", "last" from
	rtx to rtx_insn *.
	(emit_pattern_before): Likewise rename/cast param "before" to
	"uncast_before". Strengthen local "next" from rtx to rtx_insn *.
	* except.c (copy_reg_eh_region_note_forward): Strengthen param
	"first" and local "insn" from rtx to rtx_insn *.
	(copy_reg_eh_region_note_backward): Likewise for param "last"
	and local "insn".
	* expr.c (fixup_args_size_notes): Rename param "last" to
	"uncast_last" and reintroduce "last" as a local rtx_insn *
	using a checked cast.  Strengthen local "insn" from rtx to
	rtx_insn *.
	* function.c (set_insn_locations): Strengthen param "insn" from
	rtx to rtx_insn *.
	(record_insns): Likewise for param "insns" and local "tmp".
	(active_insn_between): Rename param "tail" to
	"uncast_tail" and reintroduce "tail" as a local rtx_insn *
	using a checked cast.
	(thread_prologue_and_epilogue_insns): Split out top-level local
	rtx "seq" into three different rtx_insn * locals.  Strengthen
	local "prologue_seq" from rtx to rtx_insn *.
	* gcse.c (insert_insn_end_basic_block): Strenghen local "insn"
	from rtx to rtx_insn *.
	* haifa-sched.c (initiate_bb_reg_pressure_info): Likewise.
	(priority): Likewise for locals "prev_first", "twin".
	(setup_insn_max_reg_pressure): Likewise for param "after".
	(sched_setup_bb_reg_pressure_info): Likewise.
	(no_real_insns_p): Strengthen params from const_rtx to
	const rtx_insn *.
	(schedule_block): Strengthen local "next_tail" from rtx to
	rtx_insn *.
	* ifcvt.c (find_active_insn_before): Strengthen return type and
	param "insn" from rtx to rtx_insn *.
	(find_active_insn_after): Likewise.
	(cond_exec_process_insns): Likewise for param "start" and local "insn".
	(cond_exec_process_if_block): Likewise for locals "then_start",
	"then_end", "else_start", "else_end", "insn", "start", "end", "from".
	(noce_process_if_block): Likewise for local "jump".
	(merge_if_block): Likewise for two locals named "end".
	(cond_exec_find_if_block): Likewise for local "last_insn".
	* jump.c (delete_related_insns): Rename param "insn" to
	"uncast_insn" and reintroduce "insn" as a local rtx_insn * using a
	checked cast.  Strengthen local "p" from rtx to rtx_insn *.
	* lra-constraints.c (inherit_reload_reg): Replace NULL_RTX with
	NULL.
	(split_reg): Likewise.
	* lra.c (lra_process_new_insns): Likewise.
	* modulo-sched.c (permute_partial_schedule): Strengthen param
	"last" from rtx to rtx_insn *.
	* optabs.c (add_equal_note): Likewise for param "insns" and local
	"last_insn".
	(expand_binop_directly): Add checked casts to rtx_insn * within
	NEXT_INSN (pat) uses.
	(expand_unop_direct): Likewise.
	(maybe_emit_unop_insn): Likewise.
	* recog.c (peep2_attempt): Strengthen locals "last",
	"before_try", "x" from rtx to rtx_insn *.
	* reorg.c (optimize_skip): Strengthen return type and local
	"delay_list" from rtx to rtx_insn_list *.  Strengthen param "insn"
	and locals "trial", "next_trial" from rtx to rtx_insn *.
	* resource.c (next_insn_no_annul): Strengthen return type and
	param "insn" from rtx to rtx_insn *.  Use a cast to and method of
	rtx_sequence to clarify the code.
	(mark_referenced_resources): Add a checked cast to rtx_insn *
	within PREV_INSN (x).
	(find_dead_or_set_registers): Strengthen return type, param
	"target", locals "insn", "next", "jump_insn", "this_jump_insn"
	from rtx to rtx_insn *.  Strengthen param "jump_target" from rtx *
	to rtx_insn **.
	(mark_target_live_regs): Strengthen params "insns" and "target",
	locals "insn", "jump_target", "start_insn", "stop_insn" from rtx
	to rtx_insn *.  Use cast to and method of rtx_sequence to clarify
	the code.
	* resource.h (mark_target_live_regs): Strengthen params 1 and 2
	from rtx to rtx_insn *.
	* rtl.h (copy_reg_eh_region_note_forward): Strengthen second param
	from rtx to rtx_insn *.
	(copy_reg_eh_region_note_backward): Likewise.
	(unshare_all_rtl_in_chain): Likewise for sole param.
	(dump_rtl_slim): Strengthen second and third params from const_rtx
	to const rtx_insn *.
	* sched-deps.c (sched_free_deps): Strengthen params "head" and
	"tail" and locals "insn", "next_tail" from rtx to rtx_insn *.
	* sched-ebb.c (init_ready_list): Strengthen locals "prev_head",
	"next_tail" from rtx to rtx_insn *.
	(begin_move_insn): Likewise for local "next".
	* sched-int.h (sched_free_deps): Likewise for first and second
	params.
	(no_real_insns_p): Strengthen both params from const_rtx to
	const rtx_insn *.
	(sched_setup_bb_reg_pressure_info): Strengthen second params from
	rtx to rtx_insn *.
	* sched-rgn.c (init_ready_list): Likewise for locals "prev_head",
	"next_tail".
	* sched-vis.c (dump_rtl_slim): Strengthen params "first", "last"
	and locals "insn", "tail" from const_rtx to const rtx_insn *.
	(rtl_dump_bb_for_graph): Strengthen local "insn" from rtx to
	rtx_insn *.
	(debug_rtl_slim): Strengthen params "first" and "last" from
	const_rtx to const rtx_insn *.
	* shrink-wrap.c (try_shrink_wrapping): Strengthen param
	"prologue_seq" and locals "seq", "p_insn" from rtx to rtx_insn *.
	(convert_to_simple_return): Likewise for param "returnjump".
	* shrink-wrap.h (try_shrink_wrapping): Likewise for param
	"prologue_seq".
	(convert_to_simple_return): Likewise for param "returnjump".
	* valtrack.c (propagate_for_debug): Likewise for params
	"insn", "last".
	* valtrack.h (propagate_for_debug): Likewise for second param.

From-SVN: r214693
This commit is contained in:
David Malcolm 2014-08-28 20:45:40 +00:00 committed by David Malcolm
parent 8ba24b7b5f
commit dc01c3d194
34 changed files with 490 additions and 249 deletions

View File

@ -1,3 +1,189 @@
2014-08-28 David Malcolm <dmalcolm@redhat.com>
* cfgexpand.c (pass_expand::execute): Strengthen local "after"
from rtx to rtx_insn *.
* cfgrtl.c (force_nonfallthru_and_redirect): Replace use of local
rtx "note" with new local rtx_insn * "new_head" when calculating
head insn of new basic block.
* combine.c (combine_split_insns): Strengthen return type and local
"ret" from rtx to rtx_insn *.
(likely_spilled_retval_p): Likewise for locals "use" and "p".
(try_combine): Eliminate local "m_split", splitting into new
locals "m_split_insn" and "m_split_pat".
(find_split_point): Strengthen local "seq" from rtx into
rtx_insn *.
* config/spu/spu.c (spu_machine_dependent_reorg): Likewise for
locals "label", "branch".
* config/spu/spu.md (define_expand "smulsi3_highpart"): Likewise
for local "insn".
(define_expand "umulsi3_highpart"): Likewise for local "insn".
* dse.c (note_add_store_info): Likewise for fields "first",
"current".
(note_add_store): Likewise for local "insn".
(emit_inc_dec_insn_before): Likewise for locals "insn",
"new_insn", "cur".
(find_shift_sequence): Likewise for locals "shift_seq", "insn".
(replace_read): Likewise for locals "insns", "this_insn".
* dwarf2cfi.c (dw_trace_info): Likewise for field "eh_head".
(notice_eh_throw): Likewise for param "insn".
(before_next_cfi_note): Likewise for return type, param, and local
"prev".
(connect_traces): Likewise for local "note".
* emit-rtl.c (reset_all_used_flags): Likewise for local "p".
(verify_rtl_sharing): Likewise.
(unshare_all_rtl_in_chain): Likewise for param "insn".
(get_first_nonnote_insn): Likewise for local "insn".
(get_last_nonnote_insn): Likewise. Introduce local rtx_sequence *
"seq" and use its methods to clarify things.
(next_insn): Strengthen return type from rtx to rtx_insn *.
Rename param "insn" to "uncast_insn" and reintroduce "insn" as a
local rtx_insn * using a checked cast, dropping a checked cast
made redundant by this change. Use a cast to and method of
rtx_sequence to clarify the code.
(previous_insn): Rename param "insn" to "uncast_insn" and
reintroduce "insn" as a local rtx_insn * using a checked cast,
dropping a checked cast made redundant by this change. Use a cast
to and method of rtx_sequence to clarify the code.
(next_nonnote_insn): Rename param "insn" to "uncast_insn" and
reintroduce "insn" as a local rtx_insn * using a checked cast,
dropping a checked cast made redundant by this change.
(next_nonnote_insn_bb): Likewise.
(prev_nonnote_insn): Likewise.
(prev_nonnote_insn_bb): Likewise.
(next_nondebug_insn): Likewise.
(prev_nondebug_insn): Likewise.
(next_nonnote_nondebug_insn): Likewise.
(prev_nonnote_nondebug_insn): Likewise.
(next_real_insn): Likewise.
(prev_real_insn): Likewise.
(next_active_insn): Likewise.
(prev_active_insn): Likewise.
(next_cc0_user): Likewise. Use rtx_sequence and a method for
clarity.
(prev_cc0_setter): Likewise.
(try_split): Rename param "trial" to "uncast_trial" and
reintroduce "insn" as a local rtx_insn * using a checked cast,
dropping checked casts made redundant by this change.
Strengthen locals "seq", "tem", "insn_last", "insn", "next" from
rtx to rtx_insn *.
(remove_insn): Rename param "insn" to "uncast_insn" and
reintroduce "insn" as a local rtx_insn * using a checked cast.
(emit_pattern_after_setloc): Likewise for param "after", as
"uncast_after".
(emit_pattern_after): Likewise. Strengthen local "prev" from
rtx to rtx_insn *.
(emit_pattern_before_setloc): Rename param "before" to
"uncast_before" and reintroduce "before" as a local rtx_insn *
using a checked cast. Strengthen locals "first", "last" from
rtx to rtx_insn *.
(emit_pattern_before): Likewise rename/cast param "before" to
"uncast_before". Strengthen local "next" from rtx to rtx_insn *.
* except.c (copy_reg_eh_region_note_forward): Strengthen param
"first" and local "insn" from rtx to rtx_insn *.
(copy_reg_eh_region_note_backward): Likewise for param "last"
and local "insn".
* expr.c (fixup_args_size_notes): Rename param "last" to
"uncast_last" and reintroduce "last" as a local rtx_insn *
using a checked cast. Strengthen local "insn" from rtx to
rtx_insn *.
* function.c (set_insn_locations): Strengthen param "insn" from
rtx to rtx_insn *.
(record_insns): Likewise for param "insns" and local "tmp".
(active_insn_between): Rename param "tail" to
"uncast_tail" and reintroduce "tail" as a local rtx_insn *
using a checked cast.
(thread_prologue_and_epilogue_insns): Split out top-level local
rtx "seq" into three different rtx_insn * locals. Strengthen
local "prologue_seq" from rtx to rtx_insn *.
* gcse.c (insert_insn_end_basic_block): Strenghen local "insn"
from rtx to rtx_insn *.
* haifa-sched.c (initiate_bb_reg_pressure_info): Likewise.
(priority): Likewise for locals "prev_first", "twin".
(setup_insn_max_reg_pressure): Likewise for param "after".
(sched_setup_bb_reg_pressure_info): Likewise.
(no_real_insns_p): Strengthen params from const_rtx to
const rtx_insn *.
(schedule_block): Strengthen local "next_tail" from rtx to
rtx_insn *.
* ifcvt.c (find_active_insn_before): Strengthen return type and
param "insn" from rtx to rtx_insn *.
(find_active_insn_after): Likewise.
(cond_exec_process_insns): Likewise for param "start" and local "insn".
(cond_exec_process_if_block): Likewise for locals "then_start",
"then_end", "else_start", "else_end", "insn", "start", "end", "from".
(noce_process_if_block): Likewise for local "jump".
(merge_if_block): Likewise for two locals named "end".
(cond_exec_find_if_block): Likewise for local "last_insn".
* jump.c (delete_related_insns): Rename param "insn" to
"uncast_insn" and reintroduce "insn" as a local rtx_insn * using a
checked cast. Strengthen local "p" from rtx to rtx_insn *.
* lra-constraints.c (inherit_reload_reg): Replace NULL_RTX with
NULL.
(split_reg): Likewise.
* lra.c (lra_process_new_insns): Likewise.
* modulo-sched.c (permute_partial_schedule): Strengthen param
"last" from rtx to rtx_insn *.
* optabs.c (add_equal_note): Likewise for param "insns" and local
"last_insn".
(expand_binop_directly): Add checked casts to rtx_insn * within
NEXT_INSN (pat) uses.
(expand_unop_direct): Likewise.
(maybe_emit_unop_insn): Likewise.
* recog.c (peep2_attempt): Strengthen locals "last",
"before_try", "x" from rtx to rtx_insn *.
* reorg.c (optimize_skip): Strengthen return type and local
"delay_list" from rtx to rtx_insn_list *. Strengthen param "insn"
and locals "trial", "next_trial" from rtx to rtx_insn *.
* resource.c (next_insn_no_annul): Strengthen return type and
param "insn" from rtx to rtx_insn *. Use a cast to and method of
rtx_sequence to clarify the code.
(mark_referenced_resources): Add a checked cast to rtx_insn *
within PREV_INSN (x).
(find_dead_or_set_registers): Strengthen return type, param
"target", locals "insn", "next", "jump_insn", "this_jump_insn"
from rtx to rtx_insn *. Strengthen param "jump_target" from rtx *
to rtx_insn **.
(mark_target_live_regs): Strengthen params "insns" and "target",
locals "insn", "jump_target", "start_insn", "stop_insn" from rtx
to rtx_insn *. Use cast to and method of rtx_sequence to clarify
the code.
* resource.h (mark_target_live_regs): Strengthen params 1 and 2
from rtx to rtx_insn *.
* rtl.h (copy_reg_eh_region_note_forward): Strengthen second param
from rtx to rtx_insn *.
(copy_reg_eh_region_note_backward): Likewise.
(unshare_all_rtl_in_chain): Likewise for sole param.
(dump_rtl_slim): Strengthen second and third params from const_rtx
to const rtx_insn *.
* sched-deps.c (sched_free_deps): Strengthen params "head" and
"tail" and locals "insn", "next_tail" from rtx to rtx_insn *.
* sched-ebb.c (init_ready_list): Strengthen locals "prev_head",
"next_tail" from rtx to rtx_insn *.
(begin_move_insn): Likewise for local "next".
* sched-int.h (sched_free_deps): Likewise for first and second
params.
(no_real_insns_p): Strengthen both params from const_rtx to
const rtx_insn *.
(sched_setup_bb_reg_pressure_info): Strengthen second params from
rtx to rtx_insn *.
* sched-rgn.c (init_ready_list): Likewise for locals "prev_head",
"next_tail".
* sched-vis.c (dump_rtl_slim): Strengthen params "first", "last"
and locals "insn", "tail" from const_rtx to const rtx_insn *.
(rtl_dump_bb_for_graph): Strengthen local "insn" from rtx to
rtx_insn *.
(debug_rtl_slim): Strengthen params "first" and "last" from
const_rtx to const rtx_insn *.
* shrink-wrap.c (try_shrink_wrapping): Strengthen param
"prologue_seq" and locals "seq", "p_insn" from rtx to rtx_insn *.
(convert_to_simple_return): Likewise for param "returnjump".
* shrink-wrap.h (try_shrink_wrapping): Likewise for param
"prologue_seq".
(convert_to_simple_return): Likewise for param "returnjump".
* valtrack.c (propagate_for_debug): Likewise for params
"insn", "last".
* valtrack.h (propagate_for_debug): Likewise for second param.
2014-08-28 David Malcolm <dmalcolm@redhat.com>
* output.h (insn_current_reference_address): Strengthen param

View File

@ -5847,7 +5847,7 @@ pass_expand::execute (function *fun)
if (var_ret_seq)
{
rtx after = return_label;
rtx_insn *after = return_label;
rtx_insn *next = NEXT_INSN (after);
if (next && NOTE_INSN_BASIC_BLOCK_P (next))
after = next;

View File

@ -1603,6 +1603,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
{
rtx_insn *new_head;
gcov_type count = e->count;
int probability = e->probability;
/* Create the new structures. */
@ -1612,12 +1613,12 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
forward from the last instruction of the old block. */
rtx_jump_table_data *table;
if (tablejump_p (BB_END (e->src), NULL, &table))
note = table;
new_head = table;
else
note = BB_END (e->src);
note = NEXT_INSN (note);
new_head = BB_END (e->src);
new_head = NEXT_INSN (new_head);
jump_block = create_basic_block (note, NULL, e->src);
jump_block = create_basic_block (new_head, NULL, e->src);
jump_block->count = count;
jump_block->frequency = EDGE_FREQUENCY (e);

View File

@ -514,13 +514,13 @@ target_canonicalize_comparison (enum rtx_code *code, rtx *op0, rtx *op1,
reg_stat vector is made larger if the splitter creates a new
register. */
static rtx
static rtx_insn *
combine_split_insns (rtx pattern, rtx insn)
{
rtx ret;
rtx_insn *ret;
unsigned int nregs;
ret = split_insns (pattern, insn);
ret = safe_as_a <rtx_insn *> (split_insns (pattern, insn));
nregs = max_reg_num ();
if (nregs > reg_stat.length ())
reg_stat.safe_grow_cleared (nregs);
@ -2294,8 +2294,9 @@ likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
static int
likely_spilled_retval_p (rtx_insn *insn)
{
rtx use = BB_END (this_basic_block);
rtx reg, p;
rtx_insn *use = BB_END (this_basic_block);
rtx reg;
rtx_insn *p;
unsigned regno, nregs;
/* We assume here that no machine mode needs more than
32 hard registers when the value overlaps with a register
@ -3333,13 +3334,14 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
&& asm_noperands (newpat) < 0)
{
rtx parallel, m_split, *split;
rtx parallel, *split;
rtx_insn *m_split_insn;
/* See if the MD file can split NEWPAT. If it can't, see if letting it
use I2DEST as a scratch register will help. In the latter case,
convert I2DEST to the mode of the source of NEWPAT if we can. */
m_split = combine_split_insns (newpat, i3);
m_split_insn = combine_split_insns (newpat, i3);
/* We can only use I2DEST as a scratch reg if it doesn't overlap any
inputs of NEWPAT. */
@ -3348,7 +3350,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
possible to try that as a scratch reg. This would require adding
more code to make it work though. */
if (m_split == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
if (m_split_insn == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
{
enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
@ -3358,11 +3360,11 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
gen_rtvec (2, newpat,
gen_rtx_CLOBBER (VOIDmode,
i2dest)));
m_split = combine_split_insns (parallel, i3);
m_split_insn = combine_split_insns (parallel, i3);
/* If that didn't work, try changing the mode of I2DEST if
we can. */
if (m_split == 0
if (m_split_insn == 0
&& new_mode != GET_MODE (i2dest)
&& new_mode != VOIDmode
&& can_change_dest_mode (i2dest, added_sets_2, new_mode))
@ -3383,9 +3385,9 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
gen_rtvec (2, newpat,
gen_rtx_CLOBBER (VOIDmode,
ni2dest))));
m_split = combine_split_insns (parallel, i3);
m_split_insn = combine_split_insns (parallel, i3);
if (m_split == 0
if (m_split_insn == 0
&& REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
{
struct undo *buf;
@ -3398,34 +3400,34 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
}
}
i2scratch = m_split != 0;
i2scratch = m_split_insn != 0;
}
/* If recog_for_combine has discarded clobbers, try to use them
again for the split. */
if (m_split == 0 && newpat_vec_with_clobbers)
if (m_split_insn == 0 && newpat_vec_with_clobbers)
{
parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers);
m_split = combine_split_insns (parallel, i3);
m_split_insn = combine_split_insns (parallel, i3);
}
if (m_split && NEXT_INSN (m_split) == NULL_RTX)
if (m_split_insn && NEXT_INSN (m_split_insn) == NULL_RTX)
{
m_split = PATTERN (m_split);
insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
rtx m_split_pat = PATTERN (m_split_insn);
insn_code_number = recog_for_combine (&m_split_pat, i3, &new_i3_notes);
if (insn_code_number >= 0)
newpat = m_split;
newpat = m_split_pat;
}
else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
else if (m_split_insn && NEXT_INSN (NEXT_INSN (m_split_insn)) == NULL_RTX
&& (next_nonnote_nondebug_insn (i2) == i3
|| ! use_crosses_set_p (PATTERN (m_split), DF_INSN_LUID (i2))))
|| ! use_crosses_set_p (PATTERN (m_split_insn), DF_INSN_LUID (i2))))
{
rtx i2set, i3set;
rtx newi3pat = PATTERN (NEXT_INSN (m_split));
newi2pat = PATTERN (m_split);
rtx newi3pat = PATTERN (NEXT_INSN (m_split_insn));
newi2pat = PATTERN (m_split_insn);
i3set = single_set (NEXT_INSN (m_split));
i2set = single_set (m_split);
i3set = single_set (NEXT_INSN (m_split_insn));
i2set = single_set (m_split_insn);
i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
@ -4534,9 +4536,9 @@ find_split_point (rtx *loc, rtx_insn *insn, bool set_src)
MEM_ADDR_SPACE (x)))
{
rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
rtx seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg,
XEXP (x, 0)),
subst_insn);
rtx_insn *seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg,
XEXP (x, 0)),
subst_insn);
/* This should have produced two insns, each of which sets our
placeholder. If the source of the second is a valid address,

View File

@ -2664,8 +2664,8 @@ spu_machine_dependent_reorg (void)
label because GCC expects it at the beginning of the block. */
rtx unspec = SET_SRC (XVECEXP (PATTERN (insn), 0, 0));
rtx label_ref = XVECEXP (unspec, 0, 0);
rtx label = XEXP (label_ref, 0);
rtx branch;
rtx_insn *label = as_a <rtx_insn *> (XEXP (label_ref, 0));
rtx_insn *branch;
int offset = 0;
for (branch = NEXT_INSN (label);
!JUMP_P (branch) && !CALL_P (branch);

View File

@ -1733,7 +1733,7 @@
rtx t0_hi = gen_rtx_SUBREG (HImode, t0, 2);
rtx t1_hi = gen_rtx_SUBREG (HImode, t1, 2);
rtx insn = emit_insn (gen_lshrsi3 (t0, operands[1], GEN_INT (16)));
rtx_insn *insn = emit_insn (gen_lshrsi3 (t0, operands[1], GEN_INT (16)));
emit_insn (gen_lshrsi3 (t1, operands[2], GEN_INT (16)));
emit_insn (gen_umulhisi3 (t2, op1_hi, op2_hi));
emit_insn (gen_mpyh_si (t3, operands[1], operands[2]));
@ -1794,7 +1794,7 @@
rtx op2_hi = gen_rtx_SUBREG (HImode, operands[2], 2);
rtx t0_hi = gen_rtx_SUBREG (HImode, t0, 2);
rtx insn = emit_insn (gen_rotlsi3 (t0, operands[2], GEN_INT (16)));
rtx_insn *insn = emit_insn (gen_rotlsi3 (t0, operands[2], GEN_INT (16)));
emit_insn (gen_umulhisi3 (t1, op1_hi, op2_hi));
emit_insn (gen_umulhisi3 (t2, op1_hi, t0_hi));
emit_insn (gen_mpyhhu_si (t3, operands[1], t0));

View File

@ -812,7 +812,7 @@ free_store_info (insn_info_t insn_info)
typedef struct
{
rtx first, current;
rtx_insn *first, *current;
regset fixed_regs_live;
bool failure;
} note_add_store_info;
@ -823,7 +823,7 @@ typedef struct
static void
note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
{
rtx insn;
rtx_insn *insn;
note_add_store_info *info = (note_add_store_info *) data;
int r, n;
@ -864,7 +864,7 @@ emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
rtx dest, rtx src, rtx srcoff, void *arg)
{
insn_info_t insn_info = (insn_info_t) arg;
rtx insn = insn_info->insn, new_insn, cur;
rtx_insn *insn = insn_info->insn, *new_insn, *cur;
note_add_store_info info;
/* We can reuse all operands without copying, because we are about
@ -877,7 +877,7 @@ emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
end_sequence ();
}
else
new_insn = gen_move_insn (dest, src);
new_insn = as_a <rtx_insn *> (gen_move_insn (dest, src));
info.first = new_insn;
info.fixed_regs_live = insn_info->fixed_regs_live;
info.failure = false;
@ -1742,7 +1742,8 @@ find_shift_sequence (int access_size,
GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
new_mode = GET_MODE_WIDER_MODE (new_mode))
{
rtx target, new_reg, shift_seq, insn, new_lhs;
rtx target, new_reg, new_lhs;
rtx_insn *shift_seq, *insn;
int cost;
/* If a constant was stored into memory, try to simplify it here,
@ -1962,7 +1963,8 @@ replace_read (store_info_t store_info, insn_info_t store_insn,
{
enum machine_mode store_mode = GET_MODE (store_info->mem);
enum machine_mode read_mode = GET_MODE (read_info->mem);
rtx insns, this_insn, read_reg;
rtx_insn *insns, *this_insn;
rtx read_reg;
basic_block bb;
if (!dbg_cnt (dse))

View File

@ -113,7 +113,7 @@ typedef struct
HOST_WIDE_INT beg_delay_args_size, end_delay_args_size;
/* The first EH insn in the trace, where beg_delay_args_size must be set. */
rtx eh_head;
rtx_insn *eh_head;
/* The following variables contain data used in interpreting frame related
expressions. These are not part of the "real" row state as defined by
@ -876,7 +876,7 @@ notice_args_size (rtx insn)
data within the trace related to EH insns and args_size. */
static void
notice_eh_throw (rtx insn)
notice_eh_throw (rtx_insn *insn)
{
HOST_WIDE_INT args_size;
@ -2577,10 +2577,10 @@ create_cfi_notes (void)
/* Return the insn before the first NOTE_INSN_CFI after START. */
static rtx
before_next_cfi_note (rtx start)
static rtx_insn *
before_next_cfi_note (rtx_insn *start)
{
rtx prev = start;
rtx_insn *prev = start;
while (start)
{
if (NOTE_P (start) && NOTE_KIND (start) == NOTE_INSN_CFI)
@ -2675,7 +2675,7 @@ connect_traces (void)
if (dump_file && add_cfi_insn != ti->head)
{
rtx note;
rtx_insn *note;
fprintf (dump_file, "Fixup between trace %u and %u:\n",
prev_ti->id, ti->id);

View File

@ -2747,7 +2747,7 @@ reset_insn_used_flags (rtx insn)
static void
reset_all_used_flags (void)
{
rtx p;
rtx_insn *p;
for (p = get_insns (); p; p = NEXT_INSN (p))
if (INSN_P (p))
@ -2786,7 +2786,7 @@ verify_insn_sharing (rtx insn)
DEBUG_FUNCTION void
verify_rtl_sharing (void)
{
rtx p;
rtx_insn *p;
timevar_push (TV_VERIFY_RTL_SHARING);
@ -2816,7 +2816,7 @@ verify_rtl_sharing (void)
Assumes the mark bits are cleared at entry. */
void
unshare_all_rtl_in_chain (rtx insn)
unshare_all_rtl_in_chain (rtx_insn *insn)
{
for (; insn; insn = NEXT_INSN (insn))
if (INSN_P (insn))
@ -3144,7 +3144,7 @@ get_last_insn_anywhere (void)
rtx
get_first_nonnote_insn (void)
{
rtx insn = get_insns ();
rtx_insn *insn = get_insns ();
if (insn)
{
@ -3157,7 +3157,7 @@ get_first_nonnote_insn (void)
{
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
}
}
@ -3170,7 +3170,7 @@ get_first_nonnote_insn (void)
rtx
get_last_nonnote_insn (void)
{
rtx insn = get_last_insn ();
rtx_insn *insn = get_last_insn ();
if (insn)
{
@ -3181,10 +3181,9 @@ get_last_nonnote_insn (void)
continue;
else
{
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0,
XVECLEN (PATTERN (insn), 0) - 1);
if (NONJUMP_INSN_P (insn))
if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
insn = seq->insn (seq->len () - 1);
}
}
@ -3216,42 +3215,45 @@ get_max_insn_count (void)
of the sequence. */
rtx_insn *
next_insn (rtx insn)
next_insn (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
if (insn)
{
insn = NEXT_INSN (insn);
if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
/* Return the previous insn. If it is a SEQUENCE, return the last insn
of the sequence. */
rtx_insn *
previous_insn (rtx insn)
previous_insn (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
if (insn)
{
insn = PREV_INSN (insn);
if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
if (insn && NONJUMP_INSN_P (insn))
if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
insn = seq->insn (seq->len () - 1);
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
/* Return the next insn after INSN that is not a NOTE. This routine does not
look inside SEQUENCEs. */
rtx_insn *
next_nonnote_insn (rtx insn)
next_nonnote_insn (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = NEXT_INSN (insn);
@ -3259,7 +3261,7 @@ next_nonnote_insn (rtx insn)
break;
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
/* Return the next insn after INSN that is not a NOTE, but stop the
@ -3267,8 +3269,10 @@ next_nonnote_insn (rtx insn)
look inside SEQUENCEs. */
rtx_insn *
next_nonnote_insn_bb (rtx insn)
next_nonnote_insn_bb (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = NEXT_INSN (insn);
@ -3278,15 +3282,17 @@ next_nonnote_insn_bb (rtx insn)
return NULL;
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
/* Return the previous insn before INSN that is not a NOTE. This routine does
not look inside SEQUENCEs. */
rtx_insn *
prev_nonnote_insn (rtx insn)
prev_nonnote_insn (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = PREV_INSN (insn);
@ -3294,7 +3300,7 @@ prev_nonnote_insn (rtx insn)
break;
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
/* Return the previous insn before INSN that is not a NOTE, but stop
@ -3302,8 +3308,10 @@ prev_nonnote_insn (rtx insn)
not look inside SEQUENCEs. */
rtx_insn *
prev_nonnote_insn_bb (rtx insn)
prev_nonnote_insn_bb (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = PREV_INSN (insn);
@ -3313,15 +3321,17 @@ prev_nonnote_insn_bb (rtx insn)
return NULL;
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
/* Return the next insn after INSN that is not a DEBUG_INSN. This
routine does not look inside SEQUENCEs. */
rtx_insn *
next_nondebug_insn (rtx insn)
next_nondebug_insn (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = NEXT_INSN (insn);
@ -3329,15 +3339,17 @@ next_nondebug_insn (rtx insn)
break;
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
/* Return the previous insn before INSN that is not a DEBUG_INSN.
This routine does not look inside SEQUENCEs. */
rtx_insn *
prev_nondebug_insn (rtx insn)
prev_nondebug_insn (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = PREV_INSN (insn);
@ -3345,15 +3357,17 @@ prev_nondebug_insn (rtx insn)
break;
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
/* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
This routine does not look inside SEQUENCEs. */
rtx_insn *
next_nonnote_nondebug_insn (rtx insn)
next_nonnote_nondebug_insn (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = NEXT_INSN (insn);
@ -3361,15 +3375,17 @@ next_nonnote_nondebug_insn (rtx insn)
break;
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
/* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
This routine does not look inside SEQUENCEs. */
rtx_insn *
prev_nonnote_nondebug_insn (rtx insn)
prev_nonnote_nondebug_insn (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = PREV_INSN (insn);
@ -3377,7 +3393,7 @@ prev_nonnote_nondebug_insn (rtx insn)
break;
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
/* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
@ -3385,8 +3401,10 @@ prev_nonnote_nondebug_insn (rtx insn)
SEQUENCEs. */
rtx_insn *
next_real_insn (rtx insn)
next_real_insn (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = NEXT_INSN (insn);
@ -3394,7 +3412,7 @@ next_real_insn (rtx insn)
break;
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
/* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
@ -3402,8 +3420,10 @@ next_real_insn (rtx insn)
SEQUENCEs. */
rtx_insn *
prev_real_insn (rtx insn)
prev_real_insn (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = PREV_INSN (insn);
@ -3411,7 +3431,7 @@ prev_real_insn (rtx insn)
break;
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
/* Return the last CALL_INSN in the current list, or 0 if there is none.
@ -3446,8 +3466,10 @@ active_insn_p (const_rtx insn)
}
rtx_insn *
next_active_insn (rtx insn)
next_active_insn (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = NEXT_INSN (insn);
@ -3455,7 +3477,7 @@ next_active_insn (rtx insn)
break;
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
/* Find the last insn before INSN that really does something. This routine
@ -3463,8 +3485,10 @@ next_active_insn (rtx insn)
standalone USE and CLOBBER insn. */
rtx_insn *
prev_active_insn (rtx insn)
prev_active_insn (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
while (insn)
{
insn = PREV_INSN (insn);
@ -3472,7 +3496,7 @@ prev_active_insn (rtx insn)
break;
}
return safe_as_a <rtx_insn *> (insn);
return insn;
}
#ifdef HAVE_cc0
@ -3486,8 +3510,10 @@ prev_active_insn (rtx insn)
Return 0 if we can't find the insn. */
rtx_insn *
next_cc0_user (rtx insn)
next_cc0_user (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
if (note)
@ -3495,10 +3521,10 @@ next_cc0_user (rtx insn)
insn = next_nonnote_insn (insn);
if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
return safe_as_a <rtx_insn *> (insn);
return insn;
return 0;
}
@ -3507,8 +3533,10 @@ next_cc0_user (rtx insn)
note, it is the previous insn. */
rtx_insn *
prev_cc0_setter (rtx insn)
prev_cc0_setter (rtx uncast_insn)
{
rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
if (note)
@ -3517,7 +3545,7 @@ prev_cc0_setter (rtx insn)
insn = prev_nonnote_insn (insn);
gcc_assert (sets_cc0_p (PATTERN (insn)));
return safe_as_a <rtx_insn *> (insn);
return insn;
}
#endif
@ -3574,27 +3602,29 @@ mark_label_nuses (rtx x)
returns TRIAL. If the insn to be returned can be split, it will be. */
rtx_insn *
try_split (rtx pat, rtx trial, int last)
try_split (rtx pat, rtx uncast_trial, int last)
{
rtx_insn *trial = as_a <rtx_insn *> (uncast_trial);
rtx_insn *before = PREV_INSN (trial);
rtx_insn *after = NEXT_INSN (trial);
int has_barrier = 0;
rtx note, seq, tem;
rtx note;
rtx_insn *seq, *tem;
int probability;
rtx insn_last, insn;
rtx_insn *insn_last, *insn;
int njumps = 0;
rtx call_insn = NULL_RTX;
/* We're not good at redistributing frame information. */
if (RTX_FRAME_RELATED_P (trial))
return as_a <rtx_insn *> (trial);
return trial;
if (any_condjump_p (trial)
&& (note = find_reg_note (trial, REG_BR_PROB, 0)))
split_branch_probability = XINT (note, 0);
probability = split_branch_probability;
seq = split_insns (pat, trial);
seq = safe_as_a <rtx_insn *> (split_insns (pat, trial));
split_branch_probability = -1;
@ -3607,7 +3637,7 @@ try_split (rtx pat, rtx trial, int last)
}
if (!seq)
return as_a <rtx_insn *> (trial);
return trial;
/* Avoid infinite loop if any insn of the result matches
the original pattern. */
@ -3616,7 +3646,7 @@ try_split (rtx pat, rtx trial, int last)
{
if (INSN_P (insn_last)
&& rtx_equal_p (PATTERN (insn_last), pat))
return as_a <rtx_insn *> (trial);
return trial;
if (!NEXT_INSN (insn_last))
break;
insn_last = NEXT_INSN (insn_last);
@ -3656,7 +3686,8 @@ try_split (rtx pat, rtx trial, int last)
for (insn = insn_last; insn ; insn = PREV_INSN (insn))
if (CALL_P (insn))
{
rtx next, *p;
rtx_insn *next;
rtx *p;
gcc_assert (call_insn == NULL_RTX);
call_insn = insn;
@ -4090,8 +4121,9 @@ set_insn_deleted (rtx insn)
To really delete an insn and related DF information, use delete_insn. */
void
remove_insn (rtx insn)
remove_insn (rtx uncast_insn)
{
rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
rtx_insn *next = NEXT_INSN (insn);
rtx_insn *prev = PREV_INSN (insn);
basic_block bb;
@ -4627,9 +4659,10 @@ emit_note_before (enum insn_note subtype, rtx uncast_before)
MAKE_RAW indicates how to turn PATTERN into a real insn. */
static rtx_insn *
emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
rtx_insn *(*make_raw) (rtx))
{
rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
if (pattern == NULL_RTX || !loc)
@ -4652,10 +4685,11 @@ emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
any DEBUG_INSNs. */
static rtx_insn *
emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
rtx_insn *(*make_raw) (rtx))
{
rtx prev = after;
rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
rtx_insn *prev = after;
if (skip_debug_insns)
while (DEBUG_INSN_P (prev))
@ -4730,16 +4764,17 @@ emit_debug_insn_after (rtx pattern, rtx after)
CALL_INSN, etc. */
static rtx_insn *
emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
rtx_insn *(*make_raw) (rtx))
{
rtx first = PREV_INSN (before);
rtx last = emit_pattern_before_noloc (pattern, before,
insnp ? before : NULL_RTX,
NULL, make_raw);
rtx_insn *before = as_a <rtx_insn *> (uncast_before);
rtx_insn *first = PREV_INSN (before);
rtx_insn *last = emit_pattern_before_noloc (pattern, before,
insnp ? before : NULL_RTX,
NULL, make_raw);
if (pattern == NULL_RTX || !loc)
return safe_as_a <rtx_insn *> (last);
return last;
if (!first)
first = get_insns ();
@ -4753,7 +4788,7 @@ emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
break;
first = NEXT_INSN (first);
}
return safe_as_a <rtx_insn *> (last);
return last;
}
/* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
@ -4762,10 +4797,11 @@ emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
static rtx_insn *
emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
bool insnp, rtx_insn *(*make_raw) (rtx))
{
rtx next = before;
rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
rtx_insn *next = before;
if (skip_debug_insns)
while (DEBUG_INSN_P (next))

View File

@ -1741,9 +1741,10 @@ insn_could_throw_p (const_rtx insn)
to look for a note, or the note itself. */
void
copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last)
copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
{
rtx insn, note = note_or_insn;
rtx_insn *insn;
rtx note = note_or_insn;
if (INSN_P (note_or_insn))
{
@ -1762,9 +1763,10 @@ copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last)
/* Likewise, but iterate backward. */
void
copy_reg_eh_region_note_backward (rtx note_or_insn, rtx last, rtx first)
copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
{
rtx insn, note = note_or_insn;
rtx_insn *insn;
rtx note = note_or_insn;
if (INSN_P (note_or_insn))
{

View File

@ -3933,11 +3933,12 @@ find_args_size_adjust (rtx insn)
}
int
fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
fixup_args_size_notes (rtx prev, rtx uncast_last, int end_args_size)
{
rtx_insn *last = safe_as_a <rtx_insn *> (uncast_last);
int args_size = end_args_size;
bool saw_unknown = false;
rtx insn;
rtx_insn *insn;
for (insn = last; insn != prev; insn = PREV_INSN (insn))
{

View File

@ -122,7 +122,7 @@ static tree *get_block_vector (tree, int *);
extern tree debug_find_var_in_block_tree (tree, tree);
/* We always define `record_insns' even if it's not used so that we
can always export `prologue_epilogue_contains'. */
static void record_insns (rtx, rtx, htab_t *) ATTRIBUTE_UNUSED;
static void record_insns (rtx_insn *, rtx, htab_t *) ATTRIBUTE_UNUSED;
static bool contains (const_rtx, htab_t);
static void prepare_function_start (void);
static void do_clobber_return_reg (rtx, void *);
@ -4982,9 +4982,9 @@ do_warn_unused_parameter (tree fn)
/* Set the location of the insn chain starting at INSN to LOC. */
static void
set_insn_locations (rtx insn, int loc)
set_insn_locations (rtx_insn *insn, int loc)
{
while (insn != NULL_RTX)
while (insn != NULL)
{
if (INSN_P (insn))
INSN_LOCATION (insn) = loc;
@ -5284,9 +5284,9 @@ get_arg_pointer_save_area (void)
for the first time. */
static void
record_insns (rtx insns, rtx end, htab_t *hashp)
record_insns (rtx_insn *insns, rtx end, htab_t *hashp)
{
rtx tmp;
rtx_insn *tmp;
htab_t hash = *hashp;
if (hash == NULL)
@ -5424,8 +5424,9 @@ set_return_jump_label (rtx returnjump)
#if defined (HAVE_return) || defined (HAVE_simple_return)
/* Return true if there are any active insns between HEAD and TAIL. */
bool
active_insn_between (rtx head, rtx tail)
active_insn_between (rtx head, rtx uncast_tail)
{
rtx_insn *tail = safe_as_a <rtx_insn *> (uncast_tail);
while (tail)
{
if (active_insn_p (tail))
@ -5615,9 +5616,8 @@ thread_prologue_and_epilogue_insns (void)
bitmap_head bb_flags;
#endif
rtx_insn *returnjump;
rtx seq ATTRIBUTE_UNUSED;
rtx_insn *epilogue_end ATTRIBUTE_UNUSED;
rtx prologue_seq ATTRIBUTE_UNUSED, split_prologue_seq ATTRIBUTE_UNUSED;
rtx_insn *prologue_seq ATTRIBUTE_UNUSED, *split_prologue_seq ATTRIBUTE_UNUSED;
edge e, entry_edge, orig_entry_edge, exit_fallthru_edge;
edge_iterator ei;
@ -5626,7 +5626,6 @@ thread_prologue_and_epilogue_insns (void)
rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
inserted = false;
seq = NULL_RTX;
epilogue_end = NULL;
returnjump = NULL;
@ -5637,7 +5636,7 @@ thread_prologue_and_epilogue_insns (void)
entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
orig_entry_edge = entry_edge;
split_prologue_seq = NULL_RTX;
split_prologue_seq = NULL;
if (flag_split_stack
&& (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl))
== NULL))
@ -5657,12 +5656,12 @@ thread_prologue_and_epilogue_insns (void)
#endif
}
prologue_seq = NULL_RTX;
prologue_seq = NULL;
#ifdef HAVE_prologue
if (HAVE_prologue)
{
start_sequence ();
seq = gen_prologue ();
rtx_insn *seq = safe_as_a <rtx_insn *> (gen_prologue ());
emit_insn (seq);
/* Insert an explicit USE for the frame pointer
@ -5799,7 +5798,7 @@ thread_prologue_and_epilogue_insns (void)
{
start_sequence ();
epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
seq = gen_epilogue ();
rtx_insn *seq = as_a <rtx_insn *> (gen_epilogue ());
if (seq)
emit_jump_insn (seq);
@ -5900,7 +5899,7 @@ epilogue_done:
start_sequence ();
emit_note (NOTE_INSN_EPILOGUE_BEG);
emit_insn (ep_seq);
seq = get_insns ();
rtx_insn *seq = get_insns ();
end_sequence ();
/* Retain a map of the epilogue insns. Used in life analysis to

View File

@ -2161,7 +2161,7 @@ process_insert_insn (struct expr *expr)
static void
insert_insn_end_basic_block (struct expr *expr, basic_block bb)
{
rtx insn = BB_END (bb);
rtx_insn *insn = BB_END (bb);
rtx_insn *new_insn;
rtx reg = expr->reaching_reg;
int regno = REGNO (reg);
@ -2188,7 +2188,7 @@ insert_insn_end_basic_block (struct expr *expr, basic_block bb)
if cc0 isn't set. */
rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
if (note)
insn = XEXP (note, 0);
insn = safe_as_a <rtx_insn *> (XEXP (note, 0));
else
{
rtx_insn *maybe_cc0_setter = prev_nonnote_insn (insn);

View File

@ -1037,7 +1037,7 @@ static void
initiate_bb_reg_pressure_info (basic_block bb)
{
unsigned int i ATTRIBUTE_UNUSED;
rtx insn;
rtx_insn *insn;
if (current_nr_blocks > 1)
FOR_BB_INSNS (bb, insn)
@ -1604,7 +1604,7 @@ priority (rtx_insn *insn)
this_priority = insn_cost (insn);
else
{
rtx prev_first, twin;
rtx_insn *prev_first, *twin;
basic_block rec;
/* For recovery check instructions we calculate priority slightly
@ -3049,7 +3049,7 @@ update_register_pressure (rtx_insn *insn)
meaning in sched-int.h::_haifa_insn_data) for all current BB insns
after insn AFTER. */
static void
setup_insn_max_reg_pressure (rtx after, bool update_p)
setup_insn_max_reg_pressure (rtx_insn *after, bool update_p)
{
int i, p;
bool eq_p;
@ -3112,7 +3112,7 @@ update_reg_and_insn_max_reg_pressure (rtx_insn *insn)
insns starting after insn AFTER. Set up also max register pressure
for all insns of the basic block. */
void
sched_setup_bb_reg_pressure_info (basic_block bb, rtx after)
sched_setup_bb_reg_pressure_info (basic_block bb, rtx_insn *after)
{
gcc_assert (sched_pressure == SCHED_PRESSURE_WEIGHTED);
initiate_bb_reg_pressure_info (bb);
@ -4832,7 +4832,7 @@ get_ebb_head_tail (basic_block beg, basic_block end,
/* Return nonzero if there are no real insns in the range [ HEAD, TAIL ]. */
int
no_real_insns_p (const_rtx head, const_rtx tail)
no_real_insns_p (const rtx_insn *head, const rtx_insn *tail)
{
while (head != NEXT_INSN (tail))
{
@ -5975,7 +5975,7 @@ schedule_block (basic_block *target_bb, state_t init_state)
/* Head/tail info for this block. */
rtx_insn *prev_head = current_sched_info->prev_head;
rtx next_tail = current_sched_info->next_tail;
rtx_insn *next_tail = current_sched_info->next_tail;
rtx_insn *head = NEXT_INSN (prev_head);
rtx_insn *tail = PREV_INSN (next_tail);

View File

@ -87,10 +87,11 @@ static int count_bb_insns (const_basic_block);
static bool cheap_bb_rtx_cost_p (const_basic_block, int, int);
static rtx_insn *first_active_insn (basic_block);
static rtx_insn *last_active_insn (basic_block, int);
static rtx find_active_insn_before (basic_block, rtx);
static rtx find_active_insn_after (basic_block, rtx);
static rtx_insn *find_active_insn_before (basic_block, rtx_insn *);
static rtx_insn *find_active_insn_after (basic_block, rtx_insn *);
static basic_block block_fallthru (basic_block);
static int cond_exec_process_insns (ce_if_block *, rtx, rtx, rtx, int, int);
static int cond_exec_process_insns (ce_if_block *, rtx_insn *, rtx, rtx, int,
int);
static rtx cond_exec_get_condition (rtx);
static rtx noce_get_condition (rtx_insn *, rtx_insn **, bool);
static int noce_operand_ok (const_rtx);
@ -256,11 +257,11 @@ last_active_insn (basic_block bb, int skip_use_p)
/* Return the active insn before INSN inside basic block CURR_BB. */
static rtx
find_active_insn_before (basic_block curr_bb, rtx insn)
static rtx_insn *
find_active_insn_before (basic_block curr_bb, rtx_insn *insn)
{
if (!insn || insn == BB_HEAD (curr_bb))
return NULL_RTX;
return NULL;
while ((insn = PREV_INSN (insn)) != NULL_RTX)
{
@ -269,7 +270,7 @@ find_active_insn_before (basic_block curr_bb, rtx insn)
/* No other active insn all the way to the start of the basic block. */
if (insn == BB_HEAD (curr_bb))
return NULL_RTX;
return NULL;
}
return insn;
@ -277,11 +278,11 @@ find_active_insn_before (basic_block curr_bb, rtx insn)
/* Return the active insn after INSN inside basic block CURR_BB. */
static rtx
find_active_insn_after (basic_block curr_bb, rtx insn)
static rtx_insn *
find_active_insn_after (basic_block curr_bb, rtx_insn *insn)
{
if (!insn || insn == BB_END (curr_bb))
return NULL_RTX;
return NULL;
while ((insn = NEXT_INSN (insn)) != NULL_RTX)
{
@ -290,7 +291,7 @@ find_active_insn_after (basic_block curr_bb, rtx insn)
/* No other active insn all the way to the end of the basic block. */
if (insn == BB_END (curr_bb))
return NULL_RTX;
return NULL;
}
return insn;
@ -334,14 +335,14 @@ rtx_interchangeable_p (const_rtx a, const_rtx b)
static int
cond_exec_process_insns (ce_if_block *ce_info ATTRIBUTE_UNUSED,
/* if block information */rtx start,
/* if block information */rtx_insn *start,
/* first insn to look at */rtx end,
/* last insn to look at */rtx test,
/* conditional execution test */int prob_val,
/* probability of branch taken. */int mod_ok)
{
int must_be_last = FALSE;
rtx insn;
rtx_insn *insn;
rtx xtest;
rtx pattern;
@ -466,10 +467,10 @@ cond_exec_process_if_block (ce_if_block * ce_info,
basic_block then_bb = ce_info->then_bb; /* THEN */
basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
rtx test_expr; /* expression in IF_THEN_ELSE that is tested */
rtx then_start; /* first insn in THEN block */
rtx then_end; /* last insn + 1 in THEN block */
rtx else_start = NULL_RTX; /* first insn in ELSE block or NULL */
rtx else_end = NULL_RTX; /* last insn + 1 in ELSE block */
rtx_insn *then_start; /* first insn in THEN block */
rtx_insn *then_end; /* last insn + 1 in THEN block */
rtx_insn *else_start = NULL; /* first insn in ELSE block or NULL */
rtx_insn *else_end = NULL; /* last insn + 1 in ELSE block */
int max; /* max # of insns to convert. */
int then_mod_ok; /* whether conditional mods are ok in THEN */
rtx true_expr; /* test for else block insns */
@ -534,9 +535,9 @@ cond_exec_process_if_block (ce_if_block * ce_info,
&then_first_tail, &else_first_tail,
NULL);
if (then_first_tail == BB_HEAD (then_bb))
then_start = then_end = NULL_RTX;
then_start = then_end = NULL;
if (else_first_tail == BB_HEAD (else_bb))
else_start = else_end = NULL_RTX;
else_start = else_end = NULL;
if (n_matching > 0)
{
@ -562,7 +563,7 @@ cond_exec_process_if_block (ce_if_block * ce_info,
if (n_matching > 0)
{
rtx insn;
rtx_insn *insn;
/* We won't pass the insns in the head sequence to
cond_exec_process_insns, so we need to test them here
@ -577,9 +578,9 @@ cond_exec_process_if_block (ce_if_block * ce_info,
}
if (then_last_head == then_end)
then_start = then_end = NULL_RTX;
then_start = then_end = NULL;
if (else_last_head == else_end)
else_start = else_end = NULL_RTX;
else_start = else_end = NULL;
if (n_matching > 0)
{
@ -641,7 +642,7 @@ cond_exec_process_if_block (ce_if_block * ce_info,
do
{
rtx start, end;
rtx_insn *start, *end;
rtx t, f;
enum rtx_code f_code;
@ -743,7 +744,7 @@ cond_exec_process_if_block (ce_if_block * ce_info,
that the remaining one is executed first for both branches. */
if (then_first_tail)
{
rtx from = then_first_tail;
rtx_insn *from = then_first_tail;
if (!INSN_P (from))
from = find_active_insn_after (then_bb, from);
delete_insn_chain (from, BB_END (then_bb), false);
@ -2499,7 +2500,7 @@ noce_process_if_block (struct noce_if_info *if_info)
basic_block then_bb = if_info->then_bb; /* THEN */
basic_block else_bb = if_info->else_bb; /* ELSE or NULL */
basic_block join_bb = if_info->join_bb; /* JOIN */
rtx jump = if_info->jump;
rtx_insn *jump = if_info->jump;
rtx cond = if_info->cond;
rtx_insn *insn_a, *insn_b;
rtx set_a, set_b;
@ -3201,7 +3202,7 @@ merge_if_block (struct ce_if_block * ce_info)
if (EDGE_COUNT (then_bb->succs) == 0
&& EDGE_COUNT (combo_bb->succs) > 1)
{
rtx end = NEXT_INSN (BB_END (then_bb));
rtx_insn *end = NEXT_INSN (BB_END (then_bb));
while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
end = NEXT_INSN (end);
@ -3224,7 +3225,7 @@ merge_if_block (struct ce_if_block * ce_info)
if (EDGE_COUNT (else_bb->succs) == 0
&& EDGE_COUNT (combo_bb->succs) > 1)
{
rtx end = NEXT_INSN (BB_END (else_bb));
rtx_insn *end = NEXT_INSN (BB_END (else_bb));
while (end && NOTE_P (end) && !NOTE_INSN_BASIC_BLOCK_P (end))
end = NEXT_INSN (end);
@ -3568,7 +3569,7 @@ cond_exec_find_if_block (struct ce_if_block * ce_info)
{
if (single_pred_p (else_bb) && else_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
{
rtx last_insn = BB_END (then_bb);
rtx_insn *last_insn = BB_END (then_bb);
while (last_insn
&& NOTE_P (last_insn)

View File

@ -1247,8 +1247,9 @@ mark_jump_label_asm (rtx asmop, rtx insn)
subsequent cfg_cleanup pass to delete unreachable code if needed. */
rtx_insn *
delete_related_insns (rtx insn)
delete_related_insns (rtx uncast_insn)
{
rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
int was_code_label = (LABEL_P (insn));
rtx note;
rtx_insn *next = NEXT_INSN (insn), *prev = PREV_INSN (insn);
@ -1276,7 +1277,7 @@ delete_related_insns (rtx insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE
&& CALL_P (XVECEXP (PATTERN (insn), 0, 0))))
{
rtx p;
rtx_insn *p;
for (p = next && INSN_DELETED_P (next) ? NEXT_INSN (next) : next;
p && NOTE_P (p);

View File

@ -4544,7 +4544,7 @@ inherit_reload_reg (bool def_p, int original_regno,
" Rejecting inheritance %d->%d "
"as it results in 2 or more insns:\n",
original_regno, REGNO (new_reg));
dump_rtl_slim (lra_dump_file, new_insns, NULL_RTX, -1, 0);
dump_rtl_slim (lra_dump_file, new_insns, NULL, -1, 0);
fprintf (lra_dump_file,
" >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>\n");
}
@ -4809,7 +4809,7 @@ split_reg (bool before_p, int original_regno, rtx_insn *insn,
(lra_dump_file,
" Rejecting split %d->%d resulting in > 2 %s save insns:\n",
original_regno, REGNO (new_reg), call_save_p ? "call" : "");
dump_rtl_slim (lra_dump_file, save, NULL_RTX, -1, 0);
dump_rtl_slim (lra_dump_file, save, NULL, -1, 0);
fprintf (lra_dump_file,
" ))))))))))))))))))))))))))))))))))))))))))))))))\n");
}
@ -4825,7 +4825,7 @@ split_reg (bool before_p, int original_regno, rtx_insn *insn,
" Rejecting split %d->%d "
"resulting in > 2 %s restore insns:\n",
original_regno, REGNO (new_reg), call_save_p ? "call" : "");
dump_rtl_slim (lra_dump_file, restore, NULL_RTX, -1, 0);
dump_rtl_slim (lra_dump_file, restore, NULL, -1, 0);
fprintf (lra_dump_file,
" ))))))))))))))))))))))))))))))))))))))))))))))))\n");
}

View File

@ -1737,12 +1737,12 @@ lra_process_new_insns (rtx_insn *insn, rtx_insn *before, rtx_insn *after,
if (before != NULL_RTX)
{
fprintf (lra_dump_file," %s before:\n", title);
dump_rtl_slim (lra_dump_file, before, NULL_RTX, -1, 0);
dump_rtl_slim (lra_dump_file, before, NULL, -1, 0);
}
if (after != NULL_RTX)
{
fprintf (lra_dump_file, " %s after:\n", title);
dump_rtl_slim (lra_dump_file, after, NULL_RTX, -1, 0);
dump_rtl_slim (lra_dump_file, after, NULL, -1, 0);
}
fprintf (lra_dump_file, "\n");
}

View File

@ -212,7 +212,7 @@ static int compute_split_row (sbitmap, int, int, int, ddg_node_ptr);
static int sms_order_nodes (ddg_ptr, int, int *, int *);
static void set_node_sched_params (ddg_ptr);
static partial_schedule_ptr sms_schedule_by_order (ddg_ptr, int, int, int *);
static void permute_partial_schedule (partial_schedule_ptr, rtx);
static void permute_partial_schedule (partial_schedule_ptr, rtx_insn *);
static void generate_prolog_epilog (partial_schedule_ptr, struct loop *,
rtx, rtx);
static int calculate_stage_count (partial_schedule_ptr, int);
@ -876,7 +876,7 @@ reset_sched_times (partial_schedule_ptr ps, int amount)
row ii-1, and position them right before LAST. This schedules
the insns of the loop kernel. */
static void
permute_partial_schedule (partial_schedule_ptr ps, rtx last)
permute_partial_schedule (partial_schedule_ptr ps, rtx_insn *last)
{
int ii = ps->ii;
int row;

View File

@ -177,9 +177,10 @@ optab_libfunc (optab optab, enum machine_mode mode)
try again, ensuring that TARGET is not one of the operands. */
static int
add_equal_note (rtx insns, rtx target, enum rtx_code code, rtx op0, rtx op1)
add_equal_note (rtx_insn *insns, rtx target, enum rtx_code code, rtx op0, rtx op1)
{
rtx last_insn, set;
rtx_insn *last_insn;
rtx set;
rtx note;
gcc_assert (insns && INSN_P (insns) && NEXT_INSN (insns));
@ -1505,8 +1506,9 @@ expand_binop_directly (enum machine_mode mode, optab binoptab,
/* If PAT is composed of more than one insn, try to add an appropriate
REG_EQUAL note to it. If we can't because TEMP conflicts with an
operand, call expand_binop again, this time without a target. */
if (INSN_P (pat) && NEXT_INSN (pat) != NULL_RTX
&& ! add_equal_note (pat, ops[0].value, optab_to_code (binoptab),
if (INSN_P (pat) && NEXT_INSN (as_a <rtx_insn *> (pat)) != NULL_RTX
&& ! add_equal_note (as_a <rtx_insn *> (pat), ops[0].value,
optab_to_code (binoptab),
ops[1].value, ops[2].value))
{
delete_insns_since (last);
@ -3028,8 +3030,9 @@ expand_unop_direct (enum machine_mode mode, optab unoptab, rtx op0, rtx target,
pat = maybe_gen_insn (icode, 2, ops);
if (pat)
{
if (INSN_P (pat) && NEXT_INSN (pat) != NULL_RTX
&& ! add_equal_note (pat, ops[0].value, optab_to_code (unoptab),
if (INSN_P (pat) && NEXT_INSN (as_a <rtx_insn *> (pat)) != NULL_RTX
&& ! add_equal_note (as_a <rtx_insn *> (pat), ops[0].value,
optab_to_code (unoptab),
ops[1].value, NULL_RTX))
{
delete_insns_since (last);
@ -3829,8 +3832,10 @@ maybe_emit_unop_insn (enum insn_code icode, rtx target, rtx op0,
if (!pat)
return false;
if (INSN_P (pat) && NEXT_INSN (pat) != NULL_RTX && code != UNKNOWN)
add_equal_note (pat, ops[0].value, code, ops[1].value, NULL_RTX);
if (INSN_P (pat) && NEXT_INSN (as_a <rtx_insn *> (pat)) != NULL_RTX
&& code != UNKNOWN)
add_equal_note (as_a <rtx_insn *> (pat), ops[0].value, code, ops[1].value,
NULL_RTX);
emit_insn (pat);

View File

@ -3164,7 +3164,8 @@ static rtx
peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
{
int i;
rtx last, eh_note, as_note, before_try, x;
rtx_insn *last, *before_try, *x;
rtx eh_note, as_note;
rtx old_insn, new_insn;
bool was_call = false;

View File

@ -213,7 +213,7 @@ static rtx_insn *delete_from_delay_slot (rtx_insn *);
static void delete_scheduled_jump (rtx);
static void note_delay_statistics (int, int);
#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
static rtx_insn_list *optimize_skip (rtx);
static rtx_insn_list *optimize_skip (rtx_insn *);
#endif
static int get_jump_flags (rtx, rtx);
static int mostly_true_jump (rtx);
@ -766,10 +766,10 @@ note_delay_statistics (int slots_filled, int index)
of delay slots required. */
static rtx_insn_list *
optimize_skip (rtx insn)
optimize_skip (rtx_insn *insn)
{
rtx_insn *trial = next_nonnote_insn (insn);
rtx next_trial = next_active_insn (trial);
rtx_insn *next_trial = next_active_insn (trial);
rtx_insn_list *delay_list = 0;
int flags;

View File

@ -79,10 +79,10 @@ static HARD_REG_SET pending_dead_regs;
static void update_live_status (rtx, const_rtx, void *);
static int find_basic_block (rtx, int);
static rtx next_insn_no_annul (rtx);
static rtx find_dead_or_set_registers (rtx, struct resources*,
rtx*, int, struct resources,
struct resources);
static rtx_insn *next_insn_no_annul (rtx_insn *);
static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*,
rtx_insn **, int, struct resources,
struct resources);
/* Utility function called from mark_target_live_regs via note_stores.
It deadens any CLOBBERed registers and livens any SET registers. */
@ -163,8 +163,8 @@ find_basic_block (rtx insn, int search_limit)
/* Similar to next_insn, but ignores insns in the delay slots of
an annulled branch. */
static rtx
next_insn_no_annul (rtx insn)
static rtx_insn *
next_insn_no_annul (rtx_insn *insn)
{
if (insn)
{
@ -187,7 +187,7 @@ next_insn_no_annul (rtx insn)
insn = NEXT_INSN (insn);
if (insn && NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
}
return insn;
@ -308,7 +308,7 @@ mark_referenced_resources (rtx x, struct resources *res,
However, we may have moved some of the parameter loading insns
into the delay slot of this CALL. If so, the USE's for them
don't count and should be skipped. */
rtx_insn *insn = PREV_INSN (x);
rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x));
rtx_sequence *sequence = 0;
int seq_size = 0;
int i;
@ -420,19 +420,19 @@ mark_referenced_resources (rtx x, struct resources *res,
Stop after passing a few conditional jumps, and/or a small
number of unconditional branches. */
static rtx
find_dead_or_set_registers (rtx target, struct resources *res,
rtx *jump_target, int jump_count,
static rtx_insn *
find_dead_or_set_registers (rtx_insn *target, struct resources *res,
rtx_insn **jump_target, int jump_count,
struct resources set, struct resources needed)
{
HARD_REG_SET scratch;
rtx insn, next;
rtx jump_insn = 0;
rtx_insn *insn, *next;
rtx_insn *jump_insn = 0;
int i;
for (insn = target; insn; insn = next)
{
rtx this_jump_insn = insn;
rtx_insn *this_jump_insn = insn;
next = NEXT_INSN (insn);
@ -480,7 +480,7 @@ find_dead_or_set_registers (rtx target, struct resources *res,
of a call, so search for a JUMP_INSN in any position. */
for (i = 0; i < seq->len (); i++)
{
this_jump_insn = seq->element (i);
this_jump_insn = seq->insn (i);
if (JUMP_P (this_jump_insn))
break;
}
@ -497,14 +497,14 @@ find_dead_or_set_registers (rtx target, struct resources *res,
if (any_uncondjump_p (this_jump_insn)
|| ANY_RETURN_P (PATTERN (this_jump_insn)))
{
next = JUMP_LABEL (this_jump_insn);
next = JUMP_LABEL_AS_INSN (this_jump_insn);
if (ANY_RETURN_P (next))
next = NULL_RTX;
next = NULL;
if (jump_insn == 0)
{
jump_insn = insn;
if (jump_target)
*jump_target = JUMP_LABEL (this_jump_insn);
*jump_target = JUMP_LABEL_AS_INSN (this_jump_insn);
}
}
else if (any_condjump_p (this_jump_insn))
@ -569,7 +569,7 @@ find_dead_or_set_registers (rtx target, struct resources *res,
AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
if (!ANY_RETURN_P (JUMP_LABEL (this_jump_insn)))
find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
find_dead_or_set_registers (JUMP_LABEL_AS_INSN (this_jump_insn),
&target_res, 0, jump_count,
target_set, needed);
find_dead_or_set_registers (next,
@ -880,14 +880,14 @@ return_insn_p (const_rtx insn)
init_resource_info () was invoked before we are called. */
void
mark_target_live_regs (rtx insns, rtx target, struct resources *res)
mark_target_live_regs (rtx_insn *insns, rtx_insn *target, struct resources *res)
{
int b = -1;
unsigned int i;
struct target_info *tinfo = NULL;
rtx insn;
rtx_insn *insn;
rtx jump_insn = 0;
rtx jump_target;
rtx_insn *jump_target;
HARD_REG_SET scratch;
struct resources set, needed;
@ -965,7 +965,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
if (b != -1)
{
regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
rtx start_insn, stop_insn;
rtx_insn *start_insn, *stop_insn;
/* Compute hard regs live at start of block. */
REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
@ -978,7 +978,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
if (NONJUMP_INSN_P (start_insn)
&& GET_CODE (PATTERN (start_insn)) == SEQUENCE)
start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0);
if (NONJUMP_INSN_P (stop_insn)
&& GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
@ -1122,7 +1122,7 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res)
if (jump_insn)
{
struct resources new_resources;
rtx stop_insn = next_active_insn (jump_insn);
rtx_insn *stop_insn = next_active_insn (jump_insn);
if (!ANY_RETURN_P (jump_target))
jump_target = next_active_insn (jump_target);

View File

@ -44,7 +44,7 @@ enum mark_resource_type
MARK_SRC_DEST_CALL = 1
};
extern void mark_target_live_regs (rtx, rtx, struct resources *);
extern void mark_target_live_regs (rtx_insn *, rtx_insn *, struct resources *);
extern void mark_set_resources (rtx, struct resources *, int,
enum mark_resource_type);
extern void mark_referenced_resources (rtx, struct resources *, bool);

View File

@ -2789,8 +2789,8 @@ extern bool can_throw_external (const_rtx);
extern bool insn_could_throw_p (const_rtx);
extern bool insn_nothrow_p (const_rtx);
extern bool can_nonlocal_goto (const_rtx);
extern void copy_reg_eh_region_note_forward (rtx, rtx, rtx);
extern void copy_reg_eh_region_note_backward (rtx, rtx, rtx);
extern void copy_reg_eh_region_note_forward (rtx, rtx_insn *, rtx);
extern void copy_reg_eh_region_note_backward (rtx, rtx_insn *, rtx);
extern int inequality_comparisons_p (const_rtx);
extern rtx replace_rtx (rtx, rtx, rtx);
extern void replace_label (rtx *, rtx, rtx, bool);
@ -3280,7 +3280,7 @@ extern void pop_topmost_sequence (void);
extern void set_new_first_and_last_insn (rtx_insn *, rtx_insn *);
extern unsigned int unshare_all_rtl (void);
extern void unshare_all_rtl_again (rtx_insn *);
extern void unshare_all_rtl_in_chain (rtx);
extern void unshare_all_rtl_in_chain (rtx_insn *);
extern void verify_rtl_sharing (void);
extern void add_insn (rtx_insn *);
extern void add_insn_before (rtx, rtx, basic_block);
@ -3339,7 +3339,8 @@ extern void print_inline_rtx (FILE *, const_rtx, int);
by the scheduler anymore but for all "slim" RTL dumping. */
extern void dump_value_slim (FILE *, const_rtx, int);
extern void dump_insn_slim (FILE *, const_rtx);
extern void dump_rtl_slim (FILE *, const_rtx, const_rtx, int, int);
extern void dump_rtl_slim (FILE *, const rtx_insn *, const rtx_insn *,
int, int);
extern void print_value (pretty_printer *, const_rtx, int);
extern void print_pattern (pretty_printer *, const_rtx, int);
extern void print_insn (pretty_printer *, const_rtx, int);

View File

@ -3848,10 +3848,10 @@ delete_dep_nodes_in_back_deps (rtx insn, bool resolved_p)
/* Delete (RESOLVED_P) dependencies between HEAD and TAIL together with
deps_lists. */
void
sched_free_deps (rtx head, rtx tail, bool resolved_p)
sched_free_deps (rtx_insn *head, rtx_insn *tail, bool resolved_p)
{
rtx insn;
rtx next_tail = NEXT_INSN (tail);
rtx_insn *insn;
rtx_insn *next_tail = NEXT_INSN (tail);
/* We make two passes since some insns may be scheduled before their
dependencies are resolved. */

View File

@ -116,8 +116,8 @@ static void
init_ready_list (void)
{
int n = 0;
rtx prev_head = current_sched_info->prev_head;
rtx next_tail = current_sched_info->next_tail;
rtx_insn *prev_head = current_sched_info->prev_head;
rtx_insn *next_tail = current_sched_info->next_tail;
rtx_insn *insn;
sched_rgn_n_insns = 0;
@ -189,7 +189,7 @@ begin_move_insn (rtx_insn *insn, rtx_insn *last)
else
{
/* Create an empty unreachable block after the INSN. */
rtx next = NEXT_INSN (insn);
rtx_insn *next = NEXT_INSN (insn);
if (next && BARRIER_P (next))
next = NEXT_INSN (next);
bb = create_basic_block (next, NULL_RTX, last_bb);

View File

@ -1328,7 +1328,7 @@ extern void deps_start_bb (struct deps_desc *, rtx);
extern enum reg_note ds_to_dt (ds_t);
extern bool deps_pools_are_empty_p (void);
extern void sched_free_deps (rtx, rtx, bool);
extern void sched_free_deps (rtx_insn *, rtx_insn *, bool);
extern void extend_dependency_caches (int, bool);
extern void debug_ds (ds_t);
@ -1342,14 +1342,14 @@ extern void free_global_sched_pressure_data (void);
extern int haifa_classify_insn (const_rtx);
extern void get_ebb_head_tail (basic_block, basic_block,
rtx_insn **, rtx_insn **);
extern int no_real_insns_p (const_rtx, const_rtx);
extern int no_real_insns_p (const rtx_insn *, const rtx_insn *);
extern int insn_cost (rtx_insn *);
extern int dep_cost_1 (dep_t, dw_t);
extern int dep_cost (dep_t);
extern int set_priorities (rtx_insn *, rtx_insn *);
extern void sched_setup_bb_reg_pressure_info (basic_block, rtx);
extern void sched_setup_bb_reg_pressure_info (basic_block, rtx_insn *);
extern bool schedule_block (basic_block *, state_t);
extern int cycle_issued_insns;

View File

@ -2107,8 +2107,8 @@ schedule_more_p (void)
static void
init_ready_list (void)
{
rtx prev_head = current_sched_info->prev_head;
rtx next_tail = current_sched_info->next_tail;
rtx_insn *prev_head = current_sched_info->prev_head;
rtx_insn *next_tail = current_sched_info->next_tail;
int bb_src;
rtx_insn *insn;

View File

@ -816,14 +816,14 @@ dump_insn_slim (FILE *f, const_rtx x)
If COUNT < 0 it will stop only at LAST or NULL rtx. */
void
dump_rtl_slim (FILE *f, const_rtx first, const_rtx last,
dump_rtl_slim (FILE *f, const rtx_insn *first, const rtx_insn *last,
int count, int flags ATTRIBUTE_UNUSED)
{
const_rtx insn, tail;
const rtx_insn *insn, *tail;
pretty_printer rtl_slim_pp;
rtl_slim_pp.buffer->stream = f;
tail = last ? NEXT_INSN (last) : NULL_RTX;
tail = last ? NEXT_INSN (last) : NULL;
for (insn = first;
(insn != NULL) && (insn != tail) && (count != 0);
insn = NEXT_INSN (insn))
@ -842,7 +842,7 @@ dump_rtl_slim (FILE *f, const_rtx first, const_rtx last,
void
rtl_dump_bb_for_graph (pretty_printer *pp, basic_block bb)
{
rtx insn;
rtx_insn *insn;
bool first = true;
/* TODO: inter-bb stuff. */
@ -882,9 +882,11 @@ debug_insn_slim (const_rtx x)
}
/* Same as above, but using dump_rtl_slim. */
extern void debug_rtl_slim (FILE *, const_rtx, const_rtx, int, int);
extern void debug_rtl_slim (FILE *, const rtx_insn *, const rtx_insn *,
int, int);
DEBUG_FUNCTION void
debug_rtl_slim (const_rtx first, const_rtx last, int count, int flags)
debug_rtl_slim (const rtx_insn *first, const rtx_insn *last, int count,
int flags)
{
dump_rtl_slim (stderr, first, last, count, flags);
}

View File

@ -428,13 +428,13 @@ dup_block_and_redirect (basic_block bb, basic_block copy_bb, rtx_insn *before,
void
try_shrink_wrapping (edge *entry_edge, edge orig_entry_edge,
bitmap_head *bb_flags, rtx prologue_seq)
bitmap_head *bb_flags, rtx_insn *prologue_seq)
{
edge e;
edge_iterator ei;
bool nonempty_prologue = false;
unsigned max_grow_size;
rtx seq;
rtx_insn *seq;
for (seq = prologue_seq; seq; seq = NEXT_INSN (seq))
if (!NOTE_P (seq) || NOTE_KIND (seq) != NOTE_INSN_PROLOGUE_END)
@ -449,7 +449,7 @@ try_shrink_wrapping (edge *entry_edge, edge orig_entry_edge,
{
HARD_REG_SET prologue_clobbered, prologue_used, live_on_edge;
struct hard_reg_set_container set_up_by_prologue;
rtx p_insn;
rtx_insn *p_insn;
vec<basic_block> vec;
basic_block bb;
bitmap_head bb_antic_flags;
@ -831,7 +831,7 @@ get_unconverted_simple_return (edge exit_fallthru_edge, bitmap_head bb_flags,
void
convert_to_simple_return (edge entry_edge, edge orig_entry_edge,
bitmap_head bb_flags, rtx returnjump,
bitmap_head bb_flags, rtx_insn *returnjump,
vec<edge> unconverted_simple_returns)
{
edge e;

View File

@ -40,11 +40,12 @@ extern void dup_block_and_redirect (basic_block bb, basic_block copy_bb,
rtx_insn *before,
bitmap_head *need_prologue);
extern void try_shrink_wrapping (edge *entry_edge, edge orig_entry_edge,
bitmap_head *bb_flags, rtx prologue_seq);
bitmap_head *bb_flags, rtx_insn *prologue_seq);
extern edge get_unconverted_simple_return (edge, bitmap_head,
vec<edge> *, rtx_insn **);
extern void convert_to_simple_return (edge entry_edge, edge orig_entry_edge,
bitmap_head bb_flags, rtx returnjump,
bitmap_head bb_flags,
rtx_insn *returnjump,
vec<edge> unconverted_simple_returns);
#endif

View File

@ -177,7 +177,7 @@ propagate_for_debug_subst (rtx from, const_rtx old_rtx, void *data)
of THIS_BASIC_BLOCK. */
void
propagate_for_debug (rtx_insn *insn, rtx last, rtx dest, rtx src,
propagate_for_debug (rtx_insn *insn, rtx_insn *last, rtx dest, rtx src,
basic_block this_basic_block)
{
rtx_insn *next, *end = NEXT_INSN (BB_END (this_basic_block));

View File

@ -149,7 +149,7 @@ extern int dead_debug_insert_temp (struct dead_debug_local *,
unsigned int uregno, rtx insn,
enum debug_temp_where);
extern void propagate_for_debug (rtx_insn *, rtx, rtx, rtx, basic_block);
extern void propagate_for_debug (rtx_insn *, rtx_insn *, rtx, rtx, basic_block);
#endif /* GCC_VALTRACK_H */