df.h (DF_INSN_INFO_MWS, [...]): New macros.

gcc/
	* df.h (DF_INSN_INFO_MWS, FOR_EACH_INSN_INFO_DEF): New macros.
	(FOR_EACH_INSN_INFO_USE, FOR_EACH_INSN_INFO_EQ_USE): Likewise.
	(FOR_EACH_INSN_DEF, FOR_EACH_INSN_USE, FOR_EACH_INSN_EQ_USE): Likewise.
	* auto-inc-dec.c (find_inc, merge_in_block): Use them.
	* combine.c (create_log_links): Likewise.
	* compare-elim.c (find_flags_uses_in_insn): Likewise.
	(try_eliminate_compare): Likewise.
	* cprop.c (make_set_regs_unavailable, mark_oprs_set): Likewise.
	* dce.c (deletable_insn_p, find_call_stack_args): Likewise.
	(remove_reg_equal_equiv_notes_for_defs): Likewise.
	(reset_unmarked_insns_debug_uses, mark_reg_dependencies): Likewise.
	(word_dce_process_block, dce_process_block): Likewise.
	* ddg.c (def_has_ccmode_p): Likewise.
	* df-core.c (df_bb_regno_first_def_find): Likewise.
	(df_bb_regno_last_def_find, df_find_def, df_find_use): Likewise.
	* df-problems.c (df_rd_simulate_one_insn): Likewise.
	(df_lr_bb_local_compute, df_live_bb_local_compute): Likewise.
	(df_chain_remove_problem, df_chain_insn_top_dump): Likewise.
	(df_chain_insn_bottom_dump, df_word_lr_bb_local_compute): Likewise.
	(df_word_lr_simulate_defs, df_word_lr_simulate_uses): Likewise.
	(df_remove_dead_eq_notes, df_note_bb_compute): Likewise.
	(df_simulate_find_defs, df_simulate_find_uses): Likewise.
	(df_simulate_find_noclobber_defs, df_simulate_defs): Likewise.
	(df_simulate_uses, df_md_simulate_one_insn): Likewise.
	* df-scan.c (df_reorganize_refs_by_reg_by_insn): Likewise.
	* fwprop.c (local_ref_killed_between_p): Likewise.
	(all_uses_available_at, free_load_extend): Likewise.
	* gcse.c (update_bb_reg_pressure, calculate_bb_reg_pressure): Likewise.
	* hw-doloop.c (scan_loop): Likewise.
	* ifcvt.c (dead_or_predicable): Likewise.
	* init-regs.c (initialize_uninitialized_regs): Likewise.
	* ira-lives.c (mark_hard_reg_early_clobbers): Likewise.
	(process_bb_node_lives): Likewise.
	* ira.c (compute_regs_asm_clobbered, build_insn_chain): Likewise.
	(find_moveable_pseudos): Likewise.
	* loop-invariant.c (check_dependencies, record_uses): Likewise.
	* recog.c (peep2_find_free_register): Likewise.
	* ree.c (get_defs): Likewise.
	* regstat.c (regstat_bb_compute_ri): Likewise.
	(regstat_bb_compute_calls_crossed): Likewise.
	* sched-deps.c (find_inc, find_mem): Likewise.
	* sel-sched-ir.c (maybe_downgrade_id_to_use): Likewise.
	(maybe_downgrade_id_to_use, setup_id_reg_sets): Likewise.
	* shrink-wrap.c (requires_stack_frame_p): Likewise.
	(prepare_shrink_wrap): Likewise.
	* store-motion.c (compute_store_table, build_store_vectors): Likewise.
	* web.c (union_defs, pass_web::execute): Likewise.
	* config/i386/i386.c (increase_distance, insn_defines_reg): Likewise.
	(insn_uses_reg_mem, ix86_ok_to_clobber_flags): Likewise.

From-SVN: r211678
This commit is contained in:
Richard Sandiford 2014-06-15 07:32:28 +00:00 committed by Richard Sandiford
parent 2c73339ae9
commit bfac633a90
28 changed files with 539 additions and 665 deletions

View File

@ -1,3 +1,55 @@
2014-06-15 Richard Sandiford <rdsandiford@googlemail.com>
* df.h (DF_INSN_INFO_MWS, FOR_EACH_INSN_INFO_DEF): New macros.
(FOR_EACH_INSN_INFO_USE, FOR_EACH_INSN_INFO_EQ_USE): Likewise.
(FOR_EACH_INSN_DEF, FOR_EACH_INSN_USE, FOR_EACH_INSN_EQ_USE): Likewise.
* auto-inc-dec.c (find_inc, merge_in_block): Use them.
* combine.c (create_log_links): Likewise.
* compare-elim.c (find_flags_uses_in_insn): Likewise.
(try_eliminate_compare): Likewise.
* cprop.c (make_set_regs_unavailable, mark_oprs_set): Likewise.
* dce.c (deletable_insn_p, find_call_stack_args): Likewise.
(remove_reg_equal_equiv_notes_for_defs): Likewise.
(reset_unmarked_insns_debug_uses, mark_reg_dependencies): Likewise.
(word_dce_process_block, dce_process_block): Likewise.
* ddg.c (def_has_ccmode_p): Likewise.
* df-core.c (df_bb_regno_first_def_find): Likewise.
(df_bb_regno_last_def_find, df_find_def, df_find_use): Likewise.
* df-problems.c (df_rd_simulate_one_insn): Likewise.
(df_lr_bb_local_compute, df_live_bb_local_compute): Likewise.
(df_chain_remove_problem, df_chain_insn_top_dump): Likewise.
(df_chain_insn_bottom_dump, df_word_lr_bb_local_compute): Likewise.
(df_word_lr_simulate_defs, df_word_lr_simulate_uses): Likewise.
(df_remove_dead_eq_notes, df_note_bb_compute): Likewise.
(df_simulate_find_defs, df_simulate_find_uses): Likewise.
(df_simulate_find_noclobber_defs, df_simulate_defs): Likewise.
(df_simulate_uses, df_md_simulate_one_insn): Likewise.
* df-scan.c (df_reorganize_refs_by_reg_by_insn): Likewise.
* fwprop.c (local_ref_killed_between_p): Likewise.
(all_uses_available_at, free_load_extend): Likewise.
* gcse.c (update_bb_reg_pressure, calculate_bb_reg_pressure): Likewise.
* hw-doloop.c (scan_loop): Likewise.
* ifcvt.c (dead_or_predicable): Likewise.
* init-regs.c (initialize_uninitialized_regs): Likewise.
* ira-lives.c (mark_hard_reg_early_clobbers): Likewise.
(process_bb_node_lives): Likewise.
* ira.c (compute_regs_asm_clobbered, build_insn_chain): Likewise.
(find_moveable_pseudos): Likewise.
* loop-invariant.c (check_dependencies, record_uses): Likewise.
* recog.c (peep2_find_free_register): Likewise.
* ree.c (get_defs): Likewise.
* regstat.c (regstat_bb_compute_ri): Likewise.
(regstat_bb_compute_calls_crossed): Likewise.
* sched-deps.c (find_inc, find_mem): Likewise.
* sel-sched-ir.c (maybe_downgrade_id_to_use): Likewise.
(maybe_downgrade_id_to_use, setup_id_reg_sets): Likewise.
* shrink-wrap.c (requires_stack_frame_p): Likewise.
(prepare_shrink_wrap): Likewise.
* store-motion.c (compute_store_table, build_store_vectors): Likewise.
* web.c (union_defs, pass_web::execute): Likewise.
* config/i386/i386.c (increase_distance, insn_defines_reg): Likewise.
(insn_uses_reg_mem, ix86_ok_to_clobber_flags): Likewise.
2014-06-13 Vladimir Makarov <vmakarov@redhat.com>
* lra-assign.c (assign_by_spills): Add code to assign vector regs

View File

@ -969,7 +969,7 @@ find_inc (bool first_try)
rtx insn;
basic_block bb = BLOCK_FOR_INSN (mem_insn.insn);
rtx other_insn;
df_ref *def_rec;
df_ref def;
/* Make sure this reg appears only once in this insn. */
if (count_occurrences (PATTERN (mem_insn.insn), mem_insn.reg0, 1) != 1)
@ -1013,9 +1013,8 @@ find_inc (bool first_try)
/* Need to assure that none of the operands of the inc instruction are
assigned to by the mem insn. */
for (def_rec = DF_INSN_DEFS (mem_insn.insn); *def_rec; def_rec++)
FOR_EACH_INSN_DEF (def, mem_insn.insn)
{
df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
if ((regno == REGNO (inc_insn.reg0))
|| (regno == REGNO (inc_insn.reg_res)))
@ -1342,7 +1341,7 @@ merge_in_block (int max_reg, basic_block bb)
FOR_BB_INSNS_REVERSE_SAFE (bb, insn, curr)
{
unsigned int uid = INSN_UID (insn);
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
bool insn_is_add_or_inc = true;
if (!NONDEBUG_INSN_P (insn))
@ -1418,22 +1417,20 @@ merge_in_block (int max_reg, basic_block bb)
/* If the inc insn was merged with a mem, the inc insn is gone
and there is noting to update. */
if (DF_INSN_UID_GET (uid))
if (insn_info)
{
df_ref *def_rec;
df_ref *use_rec;
df_ref def, use;
/* Need to update next use. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
df_ref def = *def_rec;
reg_next_use[DF_REF_REGNO (def)] = NULL;
reg_next_inc_use[DF_REF_REGNO (def)] = NULL;
reg_next_def[DF_REF_REGNO (def)] = insn;
}
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
FOR_EACH_INSN_INFO_USE (use, insn_info)
{
df_ref use = *use_rec;
reg_next_use[DF_REF_REGNO (use)] = insn;
if (insn_is_add_or_inc)
reg_next_inc_use[DF_REF_REGNO (use)] = insn;

View File

@ -985,7 +985,7 @@ create_log_links (void)
{
basic_block bb;
rtx *next_use, insn;
df_ref *def_vec, *use_vec;
df_ref def, use;
next_use = XCNEWVEC (rtx, max_reg_num ());
@ -1008,9 +1008,8 @@ create_log_links (void)
/* Log links are created only once. */
gcc_assert (!LOG_LINKS (insn));
for (def_vec = DF_INSN_DEFS (insn); *def_vec; def_vec++)
FOR_EACH_INSN_DEF (def, insn)
{
df_ref def = *def_vec;
int regno = DF_REF_REGNO (def);
rtx use_insn;
@ -1061,9 +1060,8 @@ create_log_links (void)
next_use[regno] = NULL_RTX;
}
for (use_vec = DF_INSN_USES (insn); *use_vec; use_vec++)
FOR_EACH_INSN_USE (use, insn)
{
df_ref use = *use_vec;
int regno = DF_REF_REGNO (use);
/* Do not consider the usage of the stack pointer

View File

@ -193,14 +193,14 @@ arithmetic_flags_clobber_p (rtx insn)
static void
find_flags_uses_in_insn (struct comparison *cmp, rtx insn)
{
df_ref *use_rec, use;
df_ref use;
/* If we've already lost track of uses, don't bother collecting more. */
if (cmp->missing_uses)
return;
/* Find a USE of the flags register. */
for (use_rec = DF_INSN_USES (insn); (use = *use_rec) != NULL; use_rec++)
FOR_EACH_INSN_USE (use, insn)
if (DF_REF_REGNO (use) == targetm.flags_regnum)
{
rtx x, *loc;
@ -522,7 +522,7 @@ try_eliminate_compare (struct comparison *cmp)
| DF_REF_MUST_CLOBBER | DF_REF_SIGN_EXTRACT
| DF_REF_ZERO_EXTRACT | DF_REF_STRICT_LOW_PART
| DF_REF_PRE_POST_MODIFY);
df_ref *def_rec, def;
df_ref def;
/* Note that the BB_HEAD is always either a note or a label, but in
any case it means that IN_A is defined outside the block. */
@ -532,7 +532,7 @@ try_eliminate_compare (struct comparison *cmp)
continue;
/* Find a possible def of IN_A in INSN. */
for (def_rec = DF_INSN_DEFS (insn); (def = *def_rec) != NULL; def_rec++)
FOR_EACH_INSN_DEF (def, insn)
if (DF_REF_REGNO (def) == REGNO (in_a))
break;

View File

@ -17764,8 +17764,7 @@ ix86_emit_cfi ()
static unsigned int
increase_distance (rtx prev, rtx next, unsigned int distance)
{
df_ref *use_rec;
df_ref *def_rec;
df_ref def, use;
if (!prev || !next)
return distance + (distance & 1) + 2;
@ -17773,10 +17772,10 @@ increase_distance (rtx prev, rtx next, unsigned int distance)
if (!DF_INSN_USES (next) || !DF_INSN_DEFS (prev))
return distance + 1;
for (use_rec = DF_INSN_USES (next); *use_rec; use_rec++)
for (def_rec = DF_INSN_DEFS (prev); *def_rec; def_rec++)
if (!DF_REF_IS_ARTIFICIAL (*def_rec)
&& DF_REF_REGNO (*use_rec) == DF_REF_REGNO (*def_rec))
FOR_EACH_INSN_USE (use, next)
FOR_EACH_INSN_DEF (def, prev)
if (!DF_REF_IS_ARTIFICIAL (def)
&& DF_REF_REGNO (use) == DF_REF_REGNO (def))
return distance + (distance & 1) + 2;
return distance + 1;
@ -17789,16 +17788,14 @@ static bool
insn_defines_reg (unsigned int regno1, unsigned int regno2,
rtx insn)
{
df_ref *def_rec;
df_ref def;
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
if (DF_REF_REG_DEF_P (*def_rec)
&& !DF_REF_IS_ARTIFICIAL (*def_rec)
&& (regno1 == DF_REF_REGNO (*def_rec)
|| regno2 == DF_REF_REGNO (*def_rec)))
{
return true;
}
FOR_EACH_INSN_DEF (def, insn)
if (DF_REF_REG_DEF_P (def)
&& !DF_REF_IS_ARTIFICIAL (def)
&& (regno1 == DF_REF_REGNO (def)
|| regno2 == DF_REF_REGNO (def)))
return true;
return false;
}
@ -17809,10 +17806,10 @@ insn_defines_reg (unsigned int regno1, unsigned int regno2,
static bool
insn_uses_reg_mem (unsigned int regno, rtx insn)
{
df_ref *use_rec;
df_ref use;
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
if (DF_REF_REG_MEM_P (*use_rec) && regno == DF_REF_REGNO (*use_rec))
FOR_EACH_INSN_USE (use, insn)
if (DF_REF_REG_MEM_P (use) && regno == DF_REF_REGNO (use))
return true;
return false;
@ -18144,15 +18141,15 @@ static bool
ix86_ok_to_clobber_flags (rtx insn)
{
basic_block bb = BLOCK_FOR_INSN (insn);
df_ref *use;
df_ref use;
bitmap live;
while (insn)
{
if (NONDEBUG_INSN_P (insn))
{
for (use = DF_INSN_USES (insn); *use; use++)
if (DF_REF_REG_USE_P (*use) && DF_REF_REGNO (*use) == FLAGS_REG)
FOR_EACH_INSN_USE (use, insn)
if (DF_REF_REG_USE_P (use) && DF_REF_REGNO (use) == FLAGS_REG)
return false;
if (insn_defines_reg (FLAGS_REG, INVALID_REGNUM, insn))

View File

@ -374,11 +374,10 @@ dump_hash_table (FILE *file, const char *name, struct hash_table_d *table)
static void
make_set_regs_unavailable (rtx insn)
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref *def_rec;
df_ref def;
for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (*def_rec));
FOR_EACH_INSN_DEF (def, insn)
SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (def));
}
/* Top level function to create an assignment hash table.
@ -533,11 +532,10 @@ reg_not_set_p (const_rtx x, const_rtx insn ATTRIBUTE_UNUSED)
static void
mark_oprs_set (rtx insn)
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref *def_rec;
df_ref def;
for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (*def_rec));
FOR_EACH_INSN_DEF (def, insn)
SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (def));
}
/* Compute copy/constant propagation working variables. */

View File

@ -96,6 +96,7 @@ deletable_insn_p (rtx insn, bool fast, bitmap arg_stores)
{
rtx body, x;
int i;
df_ref def;
if (CALL_P (insn)
/* We cannot delete calls inside of the recursive dce because
@ -121,9 +122,9 @@ deletable_insn_p (rtx insn, bool fast, bitmap arg_stores)
return false;
/* If INSN sets a global_reg, leave it untouched. */
for (df_ref *def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
if (HARD_REGISTER_NUM_P (DF_REF_REGNO (*def_rec))
&& global_regs[DF_REF_REGNO (*def_rec)])
FOR_EACH_INSN_DEF (def, insn)
if (HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
&& global_regs[DF_REF_REGNO (def)])
return false;
body = PATTERN (insn);
@ -305,18 +306,18 @@ find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
sp + offset. */
if (!fast)
{
df_ref *use_rec;
df_ref use;
struct df_link *defs;
rtx set;
for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
FOR_EACH_INSN_USE (use, call_insn)
if (rtx_equal_p (addr, DF_REF_REG (use)))
break;
if (*use_rec == NULL)
if (use == NULL)
return false;
for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
if (! DF_REF_IS_ARTIFICIAL (defs->ref))
break;
@ -364,15 +365,15 @@ find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
}
if (addr != stack_pointer_rtx)
{
df_ref *use_rec;
df_ref use;
struct df_link *defs;
rtx set;
for (use_rec = DF_INSN_USES (call_insn); *use_rec; use_rec++)
if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
FOR_EACH_INSN_USE (use, call_insn)
if (rtx_equal_p (addr, DF_REF_REG (use)))
break;
for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
if (! DF_REF_IS_ARTIFICIAL (defs->ref))
break;
@ -429,18 +430,18 @@ find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
break;
if (!fast)
{
df_ref *use_rec;
df_ref use;
struct df_link *defs;
rtx set;
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
if (rtx_equal_p (addr, DF_REF_REG (*use_rec)))
FOR_EACH_INSN_USE (use, insn)
if (rtx_equal_p (addr, DF_REF_REG (use)))
break;
if (*use_rec == NULL)
if (use == NULL)
break;
for (defs = DF_REF_CHAIN (*use_rec); defs; defs = defs->next)
for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
if (! DF_REF_IS_ARTIFICIAL (defs->ref))
break;
@ -496,10 +497,10 @@ find_call_stack_args (rtx call_insn, bool do_mark, bool fast,
static void
remove_reg_equal_equiv_notes_for_defs (rtx insn)
{
df_ref *def_rec;
df_ref def;
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (*def_rec));
FOR_EACH_INSN_DEF (def, insn)
remove_reg_equal_equiv_notes_for_regno (DF_REF_REGNO (def));
}
/* Scan all BBs for debug insns and reset those that reference values
@ -515,11 +516,10 @@ reset_unmarked_insns_debug_uses (void)
FOR_BB_INSNS_REVERSE_SAFE (bb, insn, next)
if (DEBUG_INSN_P (insn))
{
df_ref *use_rec;
df_ref use;
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
FOR_EACH_INSN_USE (use, insn)
{
df_ref use = *use_rec;
struct df_link *defs;
for (defs = DF_REF_CHAIN (use); defs; defs = defs->next)
{
@ -680,14 +680,13 @@ static void
mark_reg_dependencies (rtx insn)
{
struct df_link *defs;
df_ref *use_rec;
df_ref use;
if (DEBUG_INSN_P (insn))
return;
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
FOR_EACH_INSN_USE (use, insn)
{
df_ref use = *use_rec;
if (dump_file)
{
fprintf (dump_file, "Processing use of ");
@ -868,14 +867,14 @@ word_dce_process_block (basic_block bb, bool redo_out,
FOR_BB_INSNS_REVERSE (bb, insn)
if (DEBUG_INSN_P (insn))
{
df_ref *use_rec;
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
if (DF_REF_REGNO (*use_rec) >= FIRST_PSEUDO_REGISTER
&& (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (*use_rec)))
df_ref use;
FOR_EACH_INSN_USE (use, insn)
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER
&& (GET_MODE_SIZE (GET_MODE (DF_REF_REAL_REG (use)))
== 2 * UNITS_PER_WORD)
&& !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (*use_rec))
&& !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (*use_rec) + 1))
dead_debug_add (&debug, *use_rec, DF_REF_REGNO (*use_rec));
&& !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use))
&& !bitmap_bit_p (local_live, 2 * DF_REF_REGNO (use) + 1))
dead_debug_add (&debug, use, DF_REF_REGNO (use));
}
else if (INSN_P (insn))
{
@ -898,10 +897,10 @@ word_dce_process_block (basic_block bb, bool redo_out,
death. */
if (debug.used && !bitmap_empty_p (debug.used))
{
df_ref *def_rec;
df_ref def;
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
dead_debug_insert_temp (&debug, DF_REF_REGNO (*def_rec), insn,
FOR_EACH_INSN_DEF (def, insn)
dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
marked_insn_p (insn)
&& !control_flow_insn_p (insn)
? DEBUG_TEMP_AFTER_WITH_REG_FORCE
@ -939,7 +938,7 @@ dce_process_block (basic_block bb, bool redo_out, bitmap au,
bitmap local_live = BITMAP_ALLOC (&dce_tmp_bitmap_obstack);
rtx insn;
bool block_changed;
df_ref *def_rec;
df_ref def;
struct dead_debug_local debug;
if (redo_out)
@ -969,11 +968,11 @@ dce_process_block (basic_block bb, bool redo_out, bitmap au,
FOR_BB_INSNS_REVERSE (bb, insn)
if (DEBUG_INSN_P (insn))
{
df_ref *use_rec;
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
if (!bitmap_bit_p (local_live, DF_REF_REGNO (*use_rec))
&& !bitmap_bit_p (au, DF_REF_REGNO (*use_rec)))
dead_debug_add (&debug, *use_rec, DF_REF_REGNO (*use_rec));
df_ref use;
FOR_EACH_INSN_USE (use, insn)
if (!bitmap_bit_p (local_live, DF_REF_REGNO (use))
&& !bitmap_bit_p (au, DF_REF_REGNO (use)))
dead_debug_add (&debug, use, DF_REF_REGNO (use));
}
else if (INSN_P (insn))
{
@ -981,9 +980,9 @@ dce_process_block (basic_block bb, bool redo_out, bitmap au,
/* The insn is needed if there is someone who uses the output. */
if (!needed)
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
if (bitmap_bit_p (local_live, DF_REF_REGNO (*def_rec))
|| bitmap_bit_p (au, DF_REF_REGNO (*def_rec)))
FOR_EACH_INSN_DEF (def, insn)
if (bitmap_bit_p (local_live, DF_REF_REGNO (def))
|| bitmap_bit_p (au, DF_REF_REGNO (def)))
{
needed = true;
mark_insn (insn, true);
@ -1004,8 +1003,8 @@ dce_process_block (basic_block bb, bool redo_out, bitmap au,
was marked, in case the debug use was after the point of
death. */
if (debug.used && !bitmap_empty_p (debug.used))
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
dead_debug_insert_temp (&debug, DF_REF_REGNO (*def_rec), insn,
FOR_EACH_INSN_DEF (def, insn)
dead_debug_insert_temp (&debug, DF_REF_REGNO (def), insn,
needed && !control_flow_insn_p (insn)
? DEBUG_TEMP_AFTER_WITH_REG_FORCE
: DEBUG_TEMP_BEFORE_WITH_VALUE);

View File

@ -169,11 +169,11 @@ autoinc_var_is_used_p (rtx def_insn, rtx use_insn)
static bool
def_has_ccmode_p (rtx insn)
{
df_ref *def;
df_ref def;
for (def = DF_INSN_DEFS (insn); *def; def++)
FOR_EACH_INSN_DEF (def, insn)
{
enum machine_mode mode = GET_MODE (DF_REF_REG (*def));
enum machine_mode mode = GET_MODE (DF_REF_REG (def));
if (GET_MODE_CLASS (mode) == MODE_CC)
return true;

View File

@ -1947,21 +1947,16 @@ df_ref
df_bb_regno_first_def_find (basic_block bb, unsigned int regno)
{
rtx insn;
df_ref *def_rec;
unsigned int uid;
df_ref def;
FOR_BB_INSNS (bb, insn)
{
if (!INSN_P (insn))
continue;
uid = INSN_UID (insn);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
df_ref def = *def_rec;
if (DF_REF_REGNO (def) == regno)
return def;
}
FOR_EACH_INSN_DEF (def, insn)
if (DF_REF_REGNO (def) == regno)
return def;
}
return NULL;
}
@ -1973,21 +1968,16 @@ df_ref
df_bb_regno_last_def_find (basic_block bb, unsigned int regno)
{
rtx insn;
df_ref *def_rec;
unsigned int uid;
df_ref def;
FOR_BB_INSNS_REVERSE (bb, insn)
{
if (!INSN_P (insn))
continue;
uid = INSN_UID (insn);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
df_ref def = *def_rec;
if (DF_REF_REGNO (def) == regno)
return def;
}
FOR_EACH_INSN_DEF (def, insn)
if (DF_REF_REGNO (def) == regno)
return def;
}
return NULL;
@ -1999,20 +1989,15 @@ df_bb_regno_last_def_find (basic_block bb, unsigned int regno)
df_ref
df_find_def (rtx insn, rtx reg)
{
unsigned int uid;
df_ref *def_rec;
df_ref def;
if (GET_CODE (reg) == SUBREG)
reg = SUBREG_REG (reg);
gcc_assert (REG_P (reg));
uid = INSN_UID (insn);
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
df_ref def = *def_rec;
if (DF_REF_REGNO (def) == REGNO (reg))
return def;
}
FOR_EACH_INSN_DEF (def, insn)
if (DF_REF_REGNO (def) == REGNO (reg))
return def;
return NULL;
}
@ -2033,27 +2018,20 @@ df_reg_defined (rtx insn, rtx reg)
df_ref
df_find_use (rtx insn, rtx reg)
{
unsigned int uid;
df_ref *use_rec;
df_ref use;
if (GET_CODE (reg) == SUBREG)
reg = SUBREG_REG (reg);
gcc_assert (REG_P (reg));
uid = INSN_UID (insn);
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
FOR_EACH_INSN_INFO_USE (use, insn_info)
if (DF_REF_REGNO (use) == REGNO (reg))
return use;
if (df->changeable_flags & DF_EQ_NOTES)
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
if (DF_REF_REGNO (use) == REGNO (reg))
return use;
}
if (df->changeable_flags & DF_EQ_NOTES)
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
if (DF_REF_REGNO (use) == REGNO (reg))
return use;
}
return NULL;
}

View File

@ -268,12 +268,10 @@ void
df_rd_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
bitmap local_rd)
{
unsigned uid = INSN_UID (insn);
df_ref *def_rec;
df_ref def;
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
FOR_EACH_INSN_DEF (def, insn)
{
df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (dregno >= FIRST_PSEUDO_REGISTER))
@ -838,6 +836,7 @@ df_lr_bb_local_compute (unsigned int bb_index)
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
df_ref def, use;
/* Process the registers set in an exception handler. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
@ -862,30 +861,23 @@ df_lr_bb_local_compute (unsigned int bb_index)
FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
if (!NONDEBUG_INSN_P (insn))
continue;
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
df_ref def = *def_rec;
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
{
unsigned int dregno = DF_REF_REGNO (def);
bitmap_set_bit (&bb_info->def, dregno);
bitmap_clear_bit (&bb_info->use, dregno);
}
}
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
FOR_EACH_INSN_INFO_DEF (def, insn_info)
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
{
unsigned int dregno = DF_REF_REGNO (def);
bitmap_set_bit (&bb_info->def, dregno);
bitmap_clear_bit (&bb_info->use, dregno);
}
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
}
FOR_EACH_INSN_INFO_USE (use, insn_info)
/* Add use to set of uses in this BB. */
bitmap_set_bit (&bb_info->use, DF_REF_REGNO (use));
}
/* Process the registers set in an exception handler or the hard
@ -1463,7 +1455,7 @@ df_live_bb_local_compute (unsigned int bb_index)
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
rtx insn;
df_ref *def_rec;
df_ref def, *def_rec;
int luid = 0;
FOR_BB_INSNS (bb, insn)
@ -1484,9 +1476,8 @@ df_live_bb_local_compute (unsigned int bb_index)
continue;
luid++;
for (def_rec = DF_INSN_INFO_DEFS (insn_info); *def_rec; def_rec++)
FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
if (DF_REF_FLAGS_IS_SET (def,
@ -1985,6 +1976,7 @@ df_chain_remove_problem (void)
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
df_ref def, use;
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
if (df_chain_problem_p (DF_DU_CHAIN))
@ -1995,23 +1987,20 @@ df_chain_remove_problem (void)
DF_REF_CHAIN (*use_rec) = NULL;
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
if (INSN_P (insn))
{
if (df_chain_problem_p (DF_DU_CHAIN))
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
DF_REF_CHAIN (*def_rec) = NULL;
if (df_chain_problem_p (DF_UD_CHAIN))
{
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
DF_REF_CHAIN (*use_rec) = NULL;
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
DF_REF_CHAIN (*use_rec) = NULL;
}
}
}
if (INSN_P (insn))
{
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
if (df_chain_problem_p (DF_DU_CHAIN))
FOR_EACH_INSN_INFO_DEF (def, insn_info)
DF_REF_CHAIN (def) = NULL;
if (df_chain_problem_p (DF_UD_CHAIN))
{
FOR_EACH_INSN_INFO_USE (use, insn_info)
DF_REF_CHAIN (use) = NULL;
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
DF_REF_CHAIN (use) = NULL;
}
}
}
bitmap_clear (df_chain->out_of_date_transfer_functions);
@ -2254,39 +2243,28 @@ df_chain_insn_top_dump (const_rtx insn, FILE *file)
if (df_chain_problem_p (DF_UD_CHAIN) && INSN_P (insn))
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref *use_rec = DF_INSN_INFO_USES (insn_info);
df_ref *eq_use_rec = DF_INSN_INFO_EQ_USES (insn_info);
df_ref use;
fprintf (file, ";; UD chains for insn luid %d uid %d\n",
DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
if (*use_rec || *eq_use_rec)
{
while (*use_rec)
{
df_ref use = *use_rec;
if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
|| !(df->changeable_flags & DF_NO_HARD_REGS))
{
fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
fprintf (file, "read/write ");
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
}
use_rec++;
}
while (*eq_use_rec)
{
df_ref use = *eq_use_rec;
if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
|| !(df->changeable_flags & DF_NO_HARD_REGS))
{
fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
}
eq_use_rec++;
}
}
FOR_EACH_INSN_INFO_USE (use, insn_info)
if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
|| !(df->changeable_flags & DF_NO_HARD_REGS))
{
fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
fprintf (file, "read/write ");
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
}
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (use))
|| !(df->changeable_flags & DF_NO_HARD_REGS))
{
fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
}
}
}
@ -2296,26 +2274,19 @@ df_chain_insn_bottom_dump (const_rtx insn, FILE *file)
if (df_chain_problem_p (DF_DU_CHAIN) && INSN_P (insn))
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref *def_rec = DF_INSN_INFO_DEFS (insn_info);
df_ref def;
fprintf (file, ";; DU chains for insn luid %d uid %d\n",
DF_INSN_INFO_LUID (insn_info), INSN_UID (insn));
if (*def_rec)
{
while (*def_rec)
{
df_ref def = *def_rec;
if (! HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
|| !(df->changeable_flags & DF_NO_HARD_REGS))
{
fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
fprintf (file, "read/write ");
df_chain_dump (DF_REF_CHAIN (def), file);
fprintf (file, "\n");
}
def_rec++;
}
}
FOR_EACH_INSN_INFO_DEF (def, insn_info)
if (!HARD_REGISTER_NUM_P (DF_REF_REGNO (def))
|| !(df->changeable_flags & DF_NO_HARD_REGS))
{
fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
if (DF_REF_FLAGS (def) & DF_REF_READ_WRITE)
fprintf (file, "read/write ");
df_chain_dump (DF_REF_CHAIN (def), file);
fprintf (file, "\n");
}
fprintf (file, "\n");
}
}
@ -2534,6 +2505,7 @@ df_word_lr_bb_local_compute (unsigned int bb_index)
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
df_ref def, use;
/* Ensure that artificial refs don't contain references to pseudos. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
@ -2550,26 +2522,20 @@ df_word_lr_bb_local_compute (unsigned int bb_index)
FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
if (!NONDEBUG_INSN_P (insn))
continue;
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
df_ref def = *def_rec;
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
{
df_word_lr_mark_ref (def, true, &bb_info->def);
df_word_lr_mark_ref (def, false, &bb_info->use);
}
}
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
df_word_lr_mark_ref (use, true, &bb_info->use);
}
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
FOR_EACH_INSN_INFO_DEF (def, insn_info)
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
if (!(DF_REF_FLAGS (def) & (DF_REF_CONDITIONAL)))
{
df_word_lr_mark_ref (def, true, &bb_info->def);
df_word_lr_mark_ref (def, false, &bb_info->use);
}
FOR_EACH_INSN_INFO_USE (use, insn_info)
df_word_lr_mark_ref (use, true, &bb_info->use);
}
}
@ -2753,17 +2719,13 @@ bool
df_word_lr_simulate_defs (rtx insn, bitmap live)
{
bool changed = false;
df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
df_ref def;
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
df_ref def = *def_rec;
if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
changed = true;
else
changed |= df_word_lr_mark_ref (*def_rec, false, live);
}
FOR_EACH_INSN_DEF (def, insn)
if (DF_REF_FLAGS (def) & DF_REF_CONDITIONAL)
changed = true;
else
changed |= df_word_lr_mark_ref (def, false, live);
return changed;
}
@ -2773,11 +2735,10 @@ df_word_lr_simulate_defs (rtx insn, bitmap live)
void
df_word_lr_simulate_uses (rtx insn, bitmap live)
{
df_ref *use_rec;
unsigned int uid = INSN_UID (insn);
df_ref use;
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
df_word_lr_mark_ref (*use_rec, true, live);
FOR_EACH_INSN_USE (use, insn)
df_word_lr_mark_ref (use, true, live);
}
/*----------------------------------------------------------------------------
@ -2899,22 +2860,19 @@ df_remove_dead_eq_notes (rtx insn, bitmap live)
one REG_EQUAL/EQUIV note, all of EQ_USES will refer to this note
so we need to purge the complete EQ_USES vector when removing
the note using df_notes_rescan. */
df_ref *use_rec;
df_ref use;
bool deleted = false;
for (use_rec = DF_INSN_EQ_USES (insn); *use_rec; use_rec++)
{
df_ref use = *use_rec;
if (DF_REF_REGNO (use) > FIRST_PSEUDO_REGISTER
&& DF_REF_LOC (use)
&& (DF_REF_FLAGS (use) & DF_REF_IN_NOTE)
&& ! bitmap_bit_p (live, DF_REF_REGNO (use))
&& loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0)))
{
deleted = true;
break;
}
}
FOR_EACH_INSN_EQ_USE (use, insn)
if (DF_REF_REGNO (use) > FIRST_PSEUDO_REGISTER
&& DF_REF_LOC (use)
&& (DF_REF_FLAGS (use) & DF_REF_IN_NOTE)
&& !bitmap_bit_p (live, DF_REF_REGNO (use))
&& loc_mentioned_in_p (DF_REF_LOC (use), XEXP (link, 0)))
{
deleted = true;
break;
}
if (deleted)
{
rtx next;
@ -3156,6 +3114,7 @@ df_note_bb_compute (unsigned int bb_index,
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
df_ref def, use;
struct dead_debug_local debug;
dead_debug_local_init (&debug, NULL, NULL);
@ -3204,7 +3163,7 @@ df_note_bb_compute (unsigned int bb_index,
FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
struct df_mw_hardreg **mws_rec;
int debug_insn;
@ -3221,13 +3180,14 @@ df_note_bb_compute (unsigned int bb_index,
{
if (REG_DEAD_DEBUGGING && dump_file)
{
fprintf (dump_file, "processing call %d\n live =", INSN_UID (insn));
fprintf (dump_file, "processing call %d\n live =",
INSN_UID (insn));
df_print_regset (dump_file, live);
}
/* We only care about real sets for calls. Clobbers cannot
be depended on to really die. */
mws_rec = DF_INSN_UID_MWS (uid);
mws_rec = DF_INSN_INFO_MWS (insn_info);
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
@ -3241,9 +3201,8 @@ df_note_bb_compute (unsigned int bb_index,
/* All of the defs except the return value are some sort of
clobber. This code is for the return. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
{
@ -3259,7 +3218,7 @@ df_note_bb_compute (unsigned int bb_index,
else
{
/* Regular insn. */
mws_rec = DF_INSN_UID_MWS (uid);
mws_rec = DF_INSN_INFO_MWS (insn_info);
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
@ -3270,9 +3229,8 @@ df_note_bb_compute (unsigned int bb_index,
mws_rec++;
}
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
df_create_unused_note (insn,
def, live, artificial_uses, &debug);
@ -3286,7 +3244,7 @@ df_note_bb_compute (unsigned int bb_index,
}
/* Process the uses. */
mws_rec = DF_INSN_UID_MWS (uid);
mws_rec = DF_INSN_INFO_MWS (insn_info);
while (*mws_rec)
{
struct df_mw_hardreg *mws = *mws_rec;
@ -3306,9 +3264,8 @@ df_note_bb_compute (unsigned int bb_index,
mws_rec++;
}
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
FOR_EACH_INSN_INFO_USE (use, insn_info)
{
df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
if (REG_DEAD_DEBUGGING && dump_file && !debug_insn)
@ -3475,14 +3432,10 @@ df_note_add_problem (void)
void
df_simulate_find_defs (rtx insn, bitmap defs)
{
df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
df_ref def;
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
df_ref def = *def_rec;
bitmap_set_bit (defs, DF_REF_REGNO (def));
}
FOR_EACH_INSN_DEF (def, insn)
bitmap_set_bit (defs, DF_REF_REGNO (def));
}
/* Find the set of uses for INSN. This includes partial defs. */
@ -3490,20 +3443,14 @@ df_simulate_find_defs (rtx insn, bitmap defs)
static void
df_simulate_find_uses (rtx insn, bitmap uses)
{
df_ref *rec;
unsigned int uid = INSN_UID (insn);
df_ref def, use;
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
{
df_ref def = *rec;
if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
bitmap_set_bit (uses, DF_REF_REGNO (def));
}
for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
{
df_ref use = *rec;
bitmap_set_bit (uses, DF_REF_REGNO (use));
}
FOR_EACH_INSN_INFO_DEF (def, insn_info)
if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
bitmap_set_bit (uses, DF_REF_REGNO (def));
FOR_EACH_INSN_INFO_USE (use, insn_info)
bitmap_set_bit (uses, DF_REF_REGNO (use));
}
/* Find the set of real DEFs, which are not clobbers, for INSN. */
@ -3511,15 +3458,11 @@ df_simulate_find_uses (rtx insn, bitmap uses)
void
df_simulate_find_noclobber_defs (rtx insn, bitmap defs)
{
df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
df_ref def;
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
df_ref def = *def_rec;
if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
bitmap_set_bit (defs, DF_REF_REGNO (def));
}
FOR_EACH_INSN_DEF (def, insn)
if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
bitmap_set_bit (defs, DF_REF_REGNO (def));
}
@ -3528,12 +3471,10 @@ df_simulate_find_noclobber_defs (rtx insn, bitmap defs)
void
df_simulate_defs (rtx insn, bitmap live)
{
df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
df_ref def;
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
FOR_EACH_INSN_DEF (def, insn)
{
df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
/* If the def is to only part of the reg, it does
@ -3549,18 +3490,14 @@ df_simulate_defs (rtx insn, bitmap live)
void
df_simulate_uses (rtx insn, bitmap live)
{
df_ref *use_rec;
unsigned int uid = INSN_UID (insn);
df_ref use;
if (DEBUG_INSN_P (insn))
return;
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
/* Add use to set of uses in this BB. */
bitmap_set_bit (live, DF_REF_REGNO (use));
}
FOR_EACH_INSN_USE (use, insn)
/* Add use to set of uses in this BB. */
bitmap_set_bit (live, DF_REF_REGNO (use));
}
@ -4206,14 +4143,12 @@ df_md_simulate_artificial_defs_at_top (basic_block bb, bitmap local_md)
void
df_md_simulate_one_insn (basic_block bb ATTRIBUTE_UNUSED, rtx insn,
bitmap local_md)
bitmap local_md)
{
unsigned uid = INSN_UID (insn);
df_ref *def_rec;
df_ref def;
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
FOR_EACH_INSN_DEF (def, insn)
{
df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if ((!(df->changeable_flags & DF_NO_HARD_REGS))
|| (dregno >= FIRST_PSEUDO_REGISTER))

View File

@ -1640,6 +1640,7 @@ df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
rtx insn;
df_ref *ref_rec;
df_ref def, use;
if (include_defs)
for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
@ -1658,24 +1659,24 @@ df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
{
if (INSN_P (insn))
{
unsigned int uid = INSN_UID (insn);
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
if (include_defs)
for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
unsigned int regno = DF_REF_REGNO (*ref_rec);
unsigned int regno = DF_REF_REGNO (def);
ref_info->count[regno]++;
}
if (include_uses)
for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
FOR_EACH_INSN_INFO_USE (use, insn_info)
{
unsigned int regno = DF_REF_REGNO (*ref_rec);
unsigned int regno = DF_REF_REGNO (use);
ref_info->count[regno]++;
}
if (include_eq_uses)
for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
{
unsigned int regno = DF_REF_REGNO (*ref_rec);
unsigned int regno = DF_REF_REGNO (use);
ref_info->count[regno]++;
}
}
@ -1694,6 +1695,7 @@ df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
rtx insn;
df_ref *ref_rec;
df_ref def, use;
if (include_defs)
for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
@ -1726,45 +1728,42 @@ df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
{
if (INSN_P (insn))
{
unsigned int uid = INSN_UID (insn);
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
if (include_defs)
for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
unsigned int regno = DF_REF_REGNO (def);
if (regno >= start)
{
unsigned int id
= ref_info->begin[regno] + ref_info->count[regno]++;
DF_REF_ID (ref) = id;
ref_info->refs[id] = ref;
DF_REF_ID (def) = id;
ref_info->refs[id] = def;
}
}
if (include_uses)
for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
FOR_EACH_INSN_INFO_USE (use, insn_info)
{
df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
unsigned int regno = DF_REF_REGNO (use);
if (regno >= start)
{
unsigned int id
= ref_info->begin[regno] + ref_info->count[regno]++;
DF_REF_ID (ref) = id;
ref_info->refs[id] = ref;
DF_REF_ID (use) = id;
ref_info->refs[id] = use;
}
}
if (include_eq_uses)
for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
{
df_ref ref = *ref_rec;
unsigned int regno = DF_REF_REGNO (ref);
unsigned int regno = DF_REF_REGNO (use);
if (regno >= start)
{
unsigned int id
= ref_info->begin[regno] + ref_info->count[regno]++;
DF_REF_ID (ref) = id;
ref_info->refs[id] = ref;
DF_REF_ID (use) = id;
ref_info->refs[id] = use;
}
}
}

View File

@ -736,6 +736,7 @@ struct df_d
#define DF_INSN_INFO_DEFS(II) ((II)->defs)
#define DF_INSN_INFO_USES(II) ((II)->uses)
#define DF_INSN_INFO_EQ_USES(II) ((II)->eq_uses)
#define DF_INSN_INFO_MWS(II) ((II)->mw_hardregs)
#define DF_INSN_LUID(INSN) (DF_INSN_INFO_LUID (DF_INSN_INFO_GET (INSN)))
#define DF_INSN_DEFS(INSN) (DF_INSN_INFO_DEFS (DF_INSN_INFO_GET (INSN)))
@ -753,6 +754,27 @@ struct df_d
#define DF_INSN_UID_EQ_USES(INSN) (DF_INSN_UID_GET (INSN)->eq_uses)
#define DF_INSN_UID_MWS(INSN) (DF_INSN_UID_GET (INSN)->mw_hardregs)
#define FOR_EACH_INSN_INFO_DEF(ITER, INSN) \
for (df_ref *ITER##_ = DF_INSN_INFO_DEFS (INSN); (ITER = *ITER##_); \
++ITER##_)
#define FOR_EACH_INSN_INFO_USE(ITER, INSN) \
for (df_ref *ITER##_ = DF_INSN_INFO_USES (INSN); (ITER = *ITER##_); \
++ITER##_)
#define FOR_EACH_INSN_INFO_EQ_USE(ITER, INSN) \
for (df_ref *ITER##_ = DF_INSN_INFO_EQ_USES (INSN); (ITER = *ITER##_); \
++ITER##_)
#define FOR_EACH_INSN_DEF(ITER, INSN) \
FOR_EACH_INSN_INFO_DEF(ITER, DF_INSN_INFO_GET (INSN))
#define FOR_EACH_INSN_USE(ITER, INSN) \
FOR_EACH_INSN_INFO_USE(ITER, DF_INSN_INFO_GET (INSN))
#define FOR_EACH_INSN_EQ_USE(ITER, INSN) \
FOR_EACH_INSN_INFO_EQ_USE(ITER, DF_INSN_INFO_GET (INSN))
/* An obstack for bitmap not related to specific dataflow problems.
This obstack should e.g. be used for bitmaps with a short life time
such as temporary bitmaps. This obstack is declared in df-core.c. */

View File

@ -700,16 +700,13 @@ local_ref_killed_between_p (df_ref ref, rtx from, rtx to)
for (insn = from; insn != to; insn = NEXT_INSN (insn))
{
df_ref *def_rec;
df_ref def;
if (!INSN_P (insn))
continue;
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
{
df_ref def = *def_rec;
if (DF_REF_REGNO (ref) == DF_REF_REGNO (def))
return true;
}
FOR_EACH_INSN_DEF (def, insn)
if (DF_REF_REGNO (ref) == DF_REF_REGNO (def))
return true;
}
return false;
}
@ -790,7 +787,7 @@ use_killed_between (df_ref use, rtx def_insn, rtx target_insn)
static bool
all_uses_available_at (rtx def_insn, rtx target_insn)
{
df_ref *use_rec;
df_ref use;
struct df_insn_info *insn_info = DF_INSN_INFO_GET (def_insn);
rtx def_set = single_set (def_insn);
rtx next;
@ -809,18 +806,12 @@ all_uses_available_at (rtx def_insn, rtx target_insn)
/* If the insn uses the reg that it defines, the substitution is
invalid. */
for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
{
df_ref use = *use_rec;
if (rtx_equal_p (DF_REF_REG (use), def_reg))
return false;
}
for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
{
df_ref use = *use_rec;
if (rtx_equal_p (DF_REF_REG (use), def_reg))
return false;
}
FOR_EACH_INSN_INFO_USE (use, insn_info)
if (rtx_equal_p (DF_REF_REG (use), def_reg))
return false;
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
if (rtx_equal_p (DF_REF_REG (use), def_reg))
return false;
}
else
{
@ -828,17 +819,15 @@ all_uses_available_at (rtx def_insn, rtx target_insn)
/* Look at all the uses of DEF_INSN, and see if they are not
killed between DEF_INSN and TARGET_INSN. */
for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
FOR_EACH_INSN_INFO_USE (use, insn_info)
{
df_ref use = *use_rec;
if (def_reg && rtx_equal_p (DF_REF_REG (use), def_reg))
return false;
if (use_killed_between (use, def_insn, target_insn))
return false;
}
for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
{
df_ref use = *use_rec;
if (def_reg && rtx_equal_p (DF_REF_REG (use), def_reg))
return false;
if (use_killed_between (use, def_insn, target_insn))
@ -1034,8 +1023,7 @@ static bool
free_load_extend (rtx src, rtx insn)
{
rtx reg;
df_ref *use_vec;
df_ref use = 0, def;
df_ref def, use;
reg = XEXP (src, 0);
#ifdef LOAD_EXTEND_OP
@ -1043,15 +1031,11 @@ free_load_extend (rtx src, rtx insn)
#endif
return false;
for (use_vec = DF_INSN_USES (insn); *use_vec; use_vec++)
{
use = *use_vec;
if (!DF_REF_IS_ARTIFICIAL (use)
&& DF_REF_TYPE (use) == DF_REF_REG_USE
&& DF_REF_REG (use) == reg)
break;
}
FOR_EACH_INSN_USE (use, insn)
if (!DF_REF_IS_ARTIFICIAL (use)
&& DF_REF_TYPE (use) == DF_REF_REG_USE
&& DF_REF_REG (use) == reg)
break;
if (!use)
return false;

View File

@ -2961,16 +2961,16 @@ update_bb_reg_pressure (basic_block bb, rtx from)
{
rtx dreg, insn;
basic_block succ_bb;
df_ref *op, op_ref;
df_ref use, op_ref;
edge succ;
edge_iterator ei;
int decreased_pressure = 0;
int nregs;
enum reg_class pressure_class;
for (op = DF_INSN_USES (from); *op; op++)
FOR_EACH_INSN_USE (use, from)
{
dreg = DF_REF_REAL_REG (*op);
dreg = DF_REF_REAL_REG (use);
/* The live range of register is shrunk only if it isn't:
1. referred on any path from the end of this block to EXIT, or
2. referred by insns other than FROM in this block. */
@ -3593,17 +3593,17 @@ calculate_bb_reg_pressure (void)
{
rtx dreg;
int regno;
df_ref *def_rec, *use_rec;
df_ref def, use;
if (! NONDEBUG_INSN_P (insn))
continue;
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
FOR_EACH_INSN_DEF (def, insn)
{
dreg = DF_REF_REAL_REG (*def_rec);
dreg = DF_REF_REAL_REG (def);
gcc_assert (REG_P (dreg));
regno = REGNO (dreg);
if (!(DF_REF_FLAGS (*def_rec)
if (!(DF_REF_FLAGS (def)
& (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
{
if (bitmap_clear_bit (curr_regs_live, regno))
@ -3611,9 +3611,9 @@ calculate_bb_reg_pressure (void)
}
}
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
FOR_EACH_INSN_USE (use, insn)
{
dreg = DF_REF_REAL_REG (*use_rec);
dreg = DF_REF_REAL_REG (use);
gcc_assert (REG_P (dreg));
regno = REGNO (dreg);
if (bitmap_set_bit (curr_regs_live, regno))

View File

@ -119,7 +119,7 @@ scan_loop (hwloop_info loop)
insn != NEXT_INSN (BB_END (bb));
insn = NEXT_INSN (insn))
{
df_ref *def_rec;
df_ref def;
HARD_REG_SET set_this_insn;
if (!NONDEBUG_INSN_P (insn))
@ -131,9 +131,9 @@ scan_loop (hwloop_info loop)
loop->has_asm = true;
CLEAR_HARD_REG_SET (set_this_insn);
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
FOR_EACH_INSN_DEF (def, insn)
{
rtx dreg = DF_REF_REG (*def_rec);
rtx dreg = DF_REF_REG (def);
if (!REG_P (dreg))
continue;

View File

@ -4279,22 +4279,16 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
FOR_BB_INSNS_REVERSE (new_dest, insn)
if (NONDEBUG_INSN_P (insn))
{
df_ref *def_rec;
unsigned int uid = INSN_UID (insn);
df_ref def;
/* If this insn sets any reg in return_regs.. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
df_ref def = *def_rec;
unsigned r = DF_REF_REGNO (def);
if (bitmap_bit_p (return_regs, r))
/* If this insn sets any reg in return_regs, add all
reg uses to the set of regs we're interested in. */
FOR_EACH_INSN_DEF (def, insn)
if (bitmap_bit_p (return_regs, DF_REF_REGNO (def)))
{
df_simulate_uses (insn, return_regs);
break;
}
/* ..then add all reg uses to the set of regs
we're interested in. */
if (*def_rec)
df_simulate_uses (insn, return_regs);
}
}
if (bitmap_intersect_p (merge_set, return_regs))
{

View File

@ -68,14 +68,12 @@ initialize_uninitialized_regs (void)
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
df_ref *use_rec;
df_ref use;
if (!NONDEBUG_INSN_P (insn))
continue;
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
FOR_EACH_INSN_USE (use, insn)
{
df_ref use = *use_rec;
unsigned int regno = DF_REF_REGNO (use);
/* Only do this for the pseudos. */
@ -109,7 +107,8 @@ initialize_uninitialized_regs (void)
if (dump_file)
fprintf (dump_file,
"adding initialization in %s of reg %d at in block %d for insn %d.\n",
current_function_name (), regno, bb->index, uid);
current_function_name (), regno, bb->index,
INSN_UID (insn));
}
}
}

View File

@ -725,13 +725,13 @@ make_early_clobber_and_input_conflicts (void)
static bool
mark_hard_reg_early_clobbers (rtx insn, bool live_p)
{
df_ref *def_rec;
df_ref def;
bool set_p = false;
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
if (DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MUST_CLOBBER))
FOR_EACH_INSN_DEF (def, insn)
if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
{
rtx dreg = DF_REF_REG (*def_rec);
rtx dreg = DF_REF_REG (def);
if (GET_CODE (dreg) == SUBREG)
dreg = SUBREG_REG (dreg);
@ -742,9 +742,9 @@ mark_hard_reg_early_clobbers (rtx insn, bool live_p)
because there is no way to say that non-operand hard
register clobbers are not early ones. */
if (live_p)
mark_ref_live (*def_rec);
mark_ref_live (def);
else
mark_ref_dead (*def_rec);
mark_ref_dead (def);
set_p = true;
}
@ -1114,7 +1114,7 @@ process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
pessimistic, but it probably doesn't matter much in practice. */
FOR_BB_INSNS_REVERSE (bb, insn)
{
df_ref *def_rec, *use_rec;
df_ref def, use;
bool call_p;
if (!NONDEBUG_INSN_P (insn))
@ -1135,9 +1135,9 @@ process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
live would stop us from allocating it to a call-crossing
allocno. */
call_p = CALL_P (insn);
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER))
mark_ref_live (*def_rec);
FOR_EACH_INSN_DEF (def, insn)
if (!call_p || !DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
mark_ref_live (def);
/* If INSN has multiple outputs, then any value used in one
of the outputs conflicts with the other outputs. Model this
@ -1151,12 +1151,12 @@ process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
to the same hard register as an unused output we could
set the hard register before the output reload insn. */
if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
FOR_EACH_INSN_USE (use, insn)
{
int i;
rtx reg;
reg = DF_REF_REG (*use_rec);
reg = DF_REF_REG (use);
for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
{
rtx set;
@ -1167,7 +1167,7 @@ process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
{
/* After the previous loop, this is a no-op if
REG is contained within SET_DEST (SET). */
mark_ref_live (*use_rec);
mark_ref_live (use);
break;
}
}
@ -1178,9 +1178,9 @@ process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
process_single_reg_class_operands (false, freq);
/* See which defined values die here. */
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
if (!call_p || !DF_REF_FLAGS_IS_SET (*def_rec, DF_REF_MAY_CLOBBER))
mark_ref_dead (*def_rec);
FOR_EACH_INSN_DEF (def, insn)
if (!call_p || !DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
mark_ref_dead (def);
if (call_p)
{
@ -1249,8 +1249,8 @@ process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
curr_point++;
/* Mark each used value as live. */
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
mark_ref_live (*use_rec);
FOR_EACH_INSN_USE (use, insn)
mark_ref_live (use);
process_single_reg_class_operands (true, freq);
@ -1263,16 +1263,16 @@ process_bb_node_lives (ira_loop_tree_node_t loop_tree_node)
/* Mark each hard reg as live again. For example, a
hard register can be in clobber and in an insn
input. */
for (use_rec = DF_INSN_USES (insn); *use_rec; use_rec++)
FOR_EACH_INSN_USE (use, insn)
{
rtx ureg = DF_REF_REG (*use_rec);
rtx ureg = DF_REF_REG (use);
if (GET_CODE (ureg) == SUBREG)
ureg = SUBREG_REG (ureg);
if (! REG_P (ureg) || REGNO (ureg) >= FIRST_PSEUDO_REGISTER)
continue;
mark_ref_live (*use_rec);
mark_ref_live (use);
}
}

View File

@ -2251,12 +2251,11 @@ compute_regs_asm_clobbered (void)
rtx insn;
FOR_BB_INSNS_REVERSE (bb, insn)
{
df_ref *def_rec;
df_ref def;
if (insn_contains_asm (insn))
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
FOR_EACH_INSN_DEF (def, insn)
{
df_ref def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
if (HARD_REGISTER_NUM_P (dregno))
add_to_hard_reg_set (&crtl->asm_clobbers,
@ -4041,9 +4040,8 @@ build_insn_chain (void)
{
if (!NOTE_P (insn) && !BARRIER_P (insn))
{
unsigned int uid = INSN_UID (insn);
df_ref *def_rec;
df_ref *use_rec;
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref def, use;
c = new_insn_chain ();
c->next = next;
@ -4055,9 +4053,8 @@ build_insn_chain (void)
c->block = bb->index;
if (NONDEBUG_INSN_P (insn))
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
df_ref def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
/* Ignore may clobbers because these are generated
@ -4146,9 +4143,8 @@ build_insn_chain (void)
bitmap_copy (&c->live_throughout, live_relevant_regs);
if (NONDEBUG_INSN_P (insn))
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
FOR_EACH_INSN_INFO_USE (use, insn_info)
{
df_ref use = *use_rec;
unsigned int regno = DF_REF_REGNO (use);
rtx reg = DF_REF_REG (use);
@ -4440,12 +4436,14 @@ find_moveable_pseudos (void)
FOR_BB_INSNS (bb, insn)
if (NONDEBUG_INSN_P (insn))
{
df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref *u_rec, *d_rec;
df_ref def, use;
uid_luid[INSN_UID (insn)] = i++;
u_rec = DF_INSN_USES (insn);
d_rec = DF_INSN_DEFS (insn);
u_rec = DF_INSN_INFO_USES (insn_info);
d_rec = DF_INSN_INFO_DEFS (insn_info);
if (d_rec[0] != NULL && d_rec[1] == NULL
&& u_rec[0] != NULL && u_rec[1] == NULL
&& DF_REF_REGNO (*u_rec) == DF_REF_REGNO (*d_rec)
@ -4459,22 +4457,20 @@ find_moveable_pseudos (void)
bitmap_clear_bit (transp, regno);
continue;
}
while (*u_rec)
FOR_EACH_INSN_INFO_USE (use, insn_info)
{
unsigned regno = DF_REF_REGNO (*u_rec);
unsigned regno = DF_REF_REGNO (use);
bitmap_set_bit (&used, regno);
if (bitmap_clear_bit (moveable, regno))
bitmap_clear_bit (transp, regno);
u_rec++;
}
while (*d_rec)
FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
unsigned regno = DF_REF_REGNO (*d_rec);
unsigned regno = DF_REF_REGNO (def);
bitmap_set_bit (&set, regno);
bitmap_clear_bit (transp, regno);
bitmap_clear_bit (moveable, regno);
d_rec++;
}
}
}
@ -4612,7 +4608,7 @@ find_moveable_pseudos (void)
bitmap def_bb_transp = bb_transp_live + def_block->index;
bool local_to_bb_p = bitmap_bit_p (def_bb_local, i);
rtx use_insn = closest_uses[i];
df_ref *def_insn_use_rec = DF_INSN_USES (def_insn);
df_ref use;
bool all_ok = true;
bool all_transp = true;
@ -4643,9 +4639,8 @@ find_moveable_pseudos (void)
if (dump_file)
fprintf (dump_file, "Examining insn %d, def for %d\n",
INSN_UID (def_insn), i);
while (*def_insn_use_rec != NULL)
FOR_EACH_INSN_USE (use, def_insn)
{
df_ref use = *def_insn_use_rec;
unsigned regno = DF_REF_REGNO (use);
if (bitmap_bit_p (&unusable_as_input, regno))
{
@ -4688,8 +4683,6 @@ find_moveable_pseudos (void)
else
all_transp = false;
}
def_insn_use_rec++;
}
if (!all_ok)
continue;

View File

@ -826,14 +826,14 @@ static bool
check_dependencies (rtx insn, bitmap depends_on)
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref *use_rec;
df_ref use;
basic_block bb = BLOCK_FOR_INSN (insn);
for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
if (!check_dependency (bb, *use_rec, depends_on))
FOR_EACH_INSN_INFO_USE (use, insn_info)
if (!check_dependency (bb, use, depends_on))
return false;
for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
if (!check_dependency (bb, *use_rec, depends_on))
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
if (!check_dependency (bb, use, depends_on))
return false;
return true;
@ -909,19 +909,17 @@ static void
record_uses (rtx insn)
{
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref *use_rec;
df_ref use;
struct invariant *inv;
for (use_rec = DF_INSN_INFO_USES (insn_info); *use_rec; use_rec++)
FOR_EACH_INSN_INFO_USE (use, insn_info)
{
df_ref use = *use_rec;
inv = invariant_for_use (use);
if (inv)
record_use (inv->def, use);
}
for (use_rec = DF_INSN_INFO_EQ_USES (insn_info); *use_rec; use_rec++)
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
{
df_ref use = *use_rec;
inv = invariant_for_use (use);
if (inv)
record_use (inv->def, use);

View File

@ -3024,7 +3024,7 @@ peep2_find_free_register (int from, int to, const char *class_str,
{
enum reg_class cl;
HARD_REG_SET live;
df_ref *def_rec;
df_ref def;
int i;
gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
@ -3041,9 +3041,8 @@ peep2_find_free_register (int from, int to, const char *class_str,
gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
/* Don't use registers set or clobbered by the insn. */
for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
*def_rec; def_rec++)
SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
from = peep2_buf_position (from + 1);
}

View File

@ -431,23 +431,20 @@ transform_ifelse (ext_cand *cand, rtx def_insn)
static struct df_link *
get_defs (rtx insn, rtx reg, vec<rtx> *dest)
{
df_ref reg_info, *uses;
df_ref use;
struct df_link *ref_chain, *ref_link;
reg_info = NULL;
for (uses = DF_INSN_USES (insn); *uses; uses++)
FOR_EACH_INSN_USE (use, insn)
{
reg_info = *uses;
if (GET_CODE (DF_REF_REG (reg_info)) == SUBREG)
if (GET_CODE (DF_REF_REG (use)) == SUBREG)
return NULL;
if (REGNO (DF_REF_REG (reg_info)) == REGNO (reg))
break;
if (REGNO (DF_REF_REG (use)) == REGNO (reg))
break;
}
gcc_assert (reg_info != NULL && uses != NULL);
gcc_assert (use != NULL);
ref_chain = DF_REF_CHAIN (reg_info);
ref_chain = DF_REF_CHAIN (use);
for (ref_link = ref_chain; ref_link; ref_link = ref_link->next)
{

View File

@ -124,6 +124,7 @@ regstat_bb_compute_ri (unsigned int bb_index,
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
df_ref def, use;
int luid = 0;
bitmap_iterator bi;
unsigned int regno;
@ -158,7 +159,7 @@ regstat_bb_compute_ri (unsigned int bb_index,
FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
bitmap_iterator bi;
struct df_mw_hardreg **mws_rec;
rtx link;
@ -209,7 +210,7 @@ regstat_bb_compute_ri (unsigned int bb_index,
/* We only care about real sets for calls. Clobbers cannot
be depended on.
Only do this if the value is totally dead. */
for (mws_rec = DF_INSN_UID_MWS (uid); *mws_rec; mws_rec++)
for (mws_rec = DF_INSN_INFO_MWS (insn_info); *mws_rec; mws_rec++)
{
struct df_mw_hardreg *mws = *mws_rec;
if (DF_MWS_REG_DEF_P (mws))
@ -235,9 +236,8 @@ regstat_bb_compute_ri (unsigned int bb_index,
/* All of the defs except the return value are some sort of
clobber. This code is for the return. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
df_ref def = *def_rec;
if ((!CALL_P (insn))
|| (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))))
{
@ -301,9 +301,8 @@ regstat_bb_compute_ri (unsigned int bb_index,
}
}
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
FOR_EACH_INSN_INFO_USE (use, insn_info)
{
df_ref use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
if (uregno >= FIRST_PSEUDO_REGISTER)
@ -444,6 +443,7 @@ regstat_bb_compute_calls_crossed (unsigned int bb_index, bitmap live)
rtx insn;
df_ref *def_rec;
df_ref *use_rec;
df_ref def, use;
bitmap_copy (live, df_get_live_out (bb));
@ -465,7 +465,7 @@ regstat_bb_compute_calls_crossed (unsigned int bb_index, bitmap live)
FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
unsigned int regno;
if (!INSN_P (insn))
@ -486,9 +486,8 @@ regstat_bb_compute_calls_crossed (unsigned int bb_index, bitmap live)
/* All of the defs except the return value are some sort of
clobber. This code is for the return. */
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
df_ref def = *def_rec;
if ((!CALL_P (insn))
|| (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))))
{
@ -498,11 +497,8 @@ regstat_bb_compute_calls_crossed (unsigned int bb_index, bitmap live)
}
}
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
bitmap_set_bit (live, DF_REF_REGNO (use));
}
FOR_EACH_INSN_INFO_USE (use, insn_info)
bitmap_set_bit (live, DF_REF_REGNO (use));
}
}

View File

@ -4725,7 +4725,7 @@ find_inc (struct mem_inc_info *mii, bool backwards)
if (parse_add_or_inc (mii, inc_cand, backwards))
{
struct dep_replacement *desc;
df_ref *def_rec;
df_ref def;
rtx newaddr, newmem;
if (sched_verbose >= 5)
@ -4734,18 +4734,15 @@ find_inc (struct mem_inc_info *mii, bool backwards)
/* Need to assure that none of the operands of the inc
instruction are assigned to by the mem insn. */
for (def_rec = DF_INSN_DEFS (mii->mem_insn); *def_rec; def_rec++)
{
df_ref def = *def_rec;
if (reg_overlap_mentioned_p (DF_REF_REG (def), mii->inc_input)
|| reg_overlap_mentioned_p (DF_REF_REG (def), mii->mem_reg0))
{
if (sched_verbose >= 5)
fprintf (sched_dump,
"inc conflicts with store failure.\n");
goto next;
}
}
FOR_EACH_INSN_DEF (def, mii->mem_insn)
if (reg_overlap_mentioned_p (DF_REF_REG (def), mii->inc_input)
|| reg_overlap_mentioned_p (DF_REF_REG (def), mii->mem_reg0))
{
if (sched_verbose >= 5)
fprintf (sched_dump,
"inc conflicts with store failure.\n");
goto next;
}
newaddr = mii->inc_input;
if (mii->mem_index != NULL_RTX)
newaddr = gen_rtx_PLUS (GET_MODE (newaddr), newaddr,
@ -4820,22 +4817,19 @@ find_mem (struct mem_inc_info *mii, rtx *address_of_x)
}
if (REG_P (reg0))
{
df_ref *def_rec;
df_ref use;
int occurrences = 0;
/* Make sure this reg appears only once in this insn. Can't use
count_occurrences since that only works for pseudos. */
for (def_rec = DF_INSN_USES (mii->mem_insn); *def_rec; def_rec++)
{
df_ref def = *def_rec;
if (reg_overlap_mentioned_p (reg0, DF_REF_REG (def)))
if (++occurrences > 1)
{
if (sched_verbose >= 5)
fprintf (sched_dump, "mem count failure\n");
return false;
}
}
FOR_EACH_INSN_USE (use, mii->mem_insn)
if (reg_overlap_mentioned_p (reg0, DF_REF_REG (use)))
if (++occurrences > 1)
{
if (sched_verbose >= 5)
fprintf (sched_dump, "mem count failure\n");
return false;
}
mii->mem_reg0 = reg0;
return find_inc (mii, true) || find_inc (mii, false);

View File

@ -2613,8 +2613,7 @@ static void
maybe_downgrade_id_to_use (idata_t id, insn_t insn)
{
bool must_be_use = false;
unsigned uid = INSN_UID (insn);
df_ref *rec;
df_ref def;
rtx lhs = IDATA_LHS (id);
rtx rhs = IDATA_RHS (id);
@ -2628,10 +2627,8 @@ maybe_downgrade_id_to_use (idata_t id, insn_t insn)
return;
}
for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
FOR_EACH_INSN_DEF (def, insn)
{
df_ref def = *rec;
if (DF_REF_INSN (def)
&& DF_REF_FLAGS_IS_SET (def, DF_REF_PRE_POST_MODIFY)
&& loc_mentioned_in_p (DF_REF_LOC (def), IDATA_RHS (id)))
@ -2659,13 +2656,12 @@ maybe_downgrade_id_to_use (idata_t id, insn_t insn)
static void
setup_id_reg_sets (idata_t id, insn_t insn)
{
unsigned uid = INSN_UID (insn);
df_ref *rec;
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref def, use;
regset tmp = get_clear_regset_from_pool ();
for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
FOR_EACH_INSN_INFO_DEF (def, insn_info)
{
df_ref def = *rec;
unsigned int regno = DF_REF_REGNO (def);
/* Post modifies are treated like clobbers by sched-deps.c. */
@ -2689,9 +2685,8 @@ setup_id_reg_sets (idata_t id, insn_t insn)
bitmap_set_bit (tmp, regno);
}
for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
FOR_EACH_INSN_INFO_USE (use, insn_info)
{
df_ref use = *rec;
unsigned int regno = DF_REF_REGNO (use);
/* When these refs are met for the first time, skip them, as

View File

@ -64,7 +64,7 @@ bool
requires_stack_frame_p (rtx insn, HARD_REG_SET prologue_used,
HARD_REG_SET set_up_by_prologue)
{
df_ref *df_rec;
df_ref def, use;
HARD_REG_SET hardregs;
unsigned regno;
@ -76,9 +76,9 @@ requires_stack_frame_p (rtx insn, HARD_REG_SET prologue_used,
return true;
CLEAR_HARD_REG_SET (hardregs);
for (df_rec = DF_INSN_DEFS (insn); *df_rec; df_rec++)
FOR_EACH_INSN_DEF (def, insn)
{
rtx dreg = DF_REF_REG (*df_rec);
rtx dreg = DF_REF_REG (def);
if (!REG_P (dreg))
continue;
@ -94,9 +94,9 @@ requires_stack_frame_p (rtx insn, HARD_REG_SET prologue_used,
&& df_regs_ever_live_p (regno))
return true;
for (df_rec = DF_INSN_USES (insn); *df_rec; df_rec++)
FOR_EACH_INSN_USE (use, insn)
{
rtx reg = DF_REF_REG (*df_rec);
rtx reg = DF_REF_REG (use);
if (!REG_P (reg))
continue;
@ -333,7 +333,7 @@ prepare_shrink_wrap (basic_block entry_block)
{
rtx insn, curr, x;
HARD_REG_SET uses, defs;
df_ref *ref;
df_ref def, use;
bool split_p = false;
if (JUMP_P (BB_END (entry_block)))
@ -353,17 +353,17 @@ prepare_shrink_wrap (basic_block entry_block)
&split_p))
{
/* Add all defined registers to DEFs. */
for (ref = DF_INSN_DEFS (insn); *ref; ref++)
FOR_EACH_INSN_DEF (def, insn)
{
x = DF_REF_REG (*ref);
x = DF_REF_REG (def);
if (REG_P (x) && HARD_REGISTER_P (x))
SET_HARD_REG_BIT (defs, REGNO (x));
}
/* Add all used registers to USESs. */
for (ref = DF_INSN_USES (insn); *ref; ref++)
FOR_EACH_INSN_USE (use, insn)
{
x = DF_REF_REG (*ref);
x = DF_REF_REG (use);
if (REG_P (x) && HARD_REGISTER_P (x))
SET_HARD_REG_BIT (uses, REGNO (x));
}

View File

@ -645,7 +645,7 @@ compute_store_table (void)
unsigned regno;
#endif
rtx insn, tmp;
df_ref *def_rec;
df_ref def;
int *last_set_in, *already_set;
struct st_expr * ptr, **prev_next_ptr_ptr;
unsigned int max_gcse_regno = max_reg_num ();
@ -665,8 +665,8 @@ compute_store_table (void)
if (! NONDEBUG_INSN_P (insn))
continue;
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
last_set_in[DF_REF_REGNO (*def_rec)] = INSN_UID (insn);
FOR_EACH_INSN_DEF (def, insn)
last_set_in[DF_REF_REGNO (def)] = INSN_UID (insn);
}
/* Now find the stores. */
@ -676,16 +676,16 @@ compute_store_table (void)
if (! NONDEBUG_INSN_P (insn))
continue;
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
already_set[DF_REF_REGNO (*def_rec)] = INSN_UID (insn);
FOR_EACH_INSN_DEF (def, insn)
already_set[DF_REF_REGNO (def)] = INSN_UID (insn);
/* Now that we've marked regs, look for stores. */
find_moveable_store (insn, already_set, last_set_in);
/* Unmark regs that are no longer set. */
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
if (last_set_in[DF_REF_REGNO (*def_rec)] == INSN_UID (insn))
last_set_in[DF_REF_REGNO (*def_rec)] = 0;
FOR_EACH_INSN_DEF (def, insn)
if (last_set_in[DF_REF_REGNO (def)] == INSN_UID (insn))
last_set_in[DF_REF_REGNO (def)] = 0;
}
#ifdef ENABLE_CHECKING
@ -1068,12 +1068,12 @@ build_store_vectors (void)
FOR_BB_INSNS (bb, insn)
if (NONDEBUG_INSN_P (insn))
{
df_ref *def_rec;
for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
df_ref def;
FOR_EACH_INSN_DEF (def, insn)
{
unsigned int ref_regno = DF_REF_REGNO (*def_rec);
unsigned int ref_regno = DF_REF_REGNO (def);
if (ref_regno < max_gcse_regno)
regs_set_in_block[DF_REF_REGNO (*def_rec)] = 1;
regs_set_in_block[DF_REF_REGNO (def)] = 1;
}
}

150
gcc/web.c
View File

@ -173,51 +173,34 @@ union_defs (df_ref use, struct web_entry *def_entry,
{
struct df_insn_info *insn_info = DF_REF_INSN_INFO (use);
struct df_link *link = DF_REF_CHAIN (use);
df_ref *eq_use_link;
df_ref *def_link;
rtx set;
if (insn_info)
{
rtx insn = insn_info->insn;
eq_use_link = DF_INSN_INFO_EQ_USES (insn_info);
def_link = DF_INSN_INFO_DEFS (insn_info);
set = single_set (insn);
df_ref eq_use;
set = single_set (insn_info->insn);
FOR_EACH_INSN_INFO_EQ_USE (eq_use, insn_info)
if (use != eq_use
&& DF_REF_REAL_REG (use) == DF_REF_REAL_REG (eq_use))
(*fun) (use_entry + DF_REF_ID (use), use_entry + DF_REF_ID (eq_use));
}
else
{
/* An artificial use. It links up with nothing. */
eq_use_link = NULL;
def_link = NULL;
set = NULL;
}
set = NULL;
/* Union all occurrences of the same register in reg notes. */
if (eq_use_link)
while (*eq_use_link)
{
if (use != *eq_use_link
&& DF_REF_REAL_REG (use) == DF_REF_REAL_REG (*eq_use_link))
(*fun) (use_entry + DF_REF_ID (use),
use_entry + DF_REF_ID (*eq_use_link));
eq_use_link++;
}
/* Recognize trivial noop moves and attempt to keep them as noop. */
if (set
&& SET_SRC (set) == DF_REF_REG (use)
&& SET_SRC (set) == SET_DEST (set))
{
if (def_link)
while (*def_link)
{
if (DF_REF_REAL_REG (use) == DF_REF_REAL_REG (*def_link))
(*fun) (use_entry + DF_REF_ID (use),
def_entry + DF_REF_ID (*def_link));
def_link++;
}
df_ref def;
FOR_EACH_INSN_INFO_DEF (def, insn_info)
if (DF_REF_REAL_REG (use) == DF_REF_REAL_REG (def))
(*fun) (use_entry + DF_REF_ID (use), def_entry + DF_REF_ID (def));
}
/* UD chains of uninitialized REGs are empty. Keeping all uses of
@ -248,23 +231,14 @@ union_defs (df_ref use, struct web_entry *def_entry,
/* A READ_WRITE use requires the corresponding def to be in the same
register. Find it and union. */
if (DF_REF_FLAGS (use) & DF_REF_READ_WRITE)
{
df_ref *link;
if (insn_info)
{
df_ref def;
if (insn_info)
link = DF_INSN_INFO_DEFS (insn_info);
else
link = NULL;
if (link)
while (*link)
{
if (DF_REF_REAL_REG (*link) == DF_REF_REAL_REG (use))
(*fun) (use_entry + DF_REF_ID (use),
def_entry + DF_REF_ID (*link));
link++;
}
}
FOR_EACH_INSN_INFO_DEF (def, insn_info)
if (DF_REF_REAL_REG (use) == DF_REF_REAL_REG (def))
(*fun) (use_entry + DF_REF_ID (use), def_entry + DF_REF_ID (def));
}
}
/* Find the corresponding register for the given entry. */
@ -375,22 +349,16 @@ pass_web::execute (function *fun)
FOR_ALL_BB_FN (bb, fun)
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
if (NONDEBUG_INSN_P (insn))
{
df_ref *use_rec;
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
DF_REF_ID (use) = uses_num++;
}
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
DF_REF_ID (use) = uses_num++;
}
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref use;
FOR_EACH_INSN_INFO_USE (use, insn_info)
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
DF_REF_ID (use) = uses_num++;
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
DF_REF_ID (use) = uses_num++;
}
}
@ -402,34 +370,23 @@ pass_web::execute (function *fun)
/* Produce the web. */
FOR_ALL_BB_FN (bb, fun)
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
if (NONDEBUG_INSN_P (insn))
{
df_ref *use_rec;
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref use;
union_match_dups (insn, def_entry, use_entry, unionfind_union);
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
union_defs (use, def_entry, used, use_entry, unionfind_union);
}
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
union_defs (use, def_entry, used, use_entry, unionfind_union);
}
FOR_EACH_INSN_INFO_USE (use, insn_info)
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
union_defs (use, def_entry, used, use_entry, unionfind_union);
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
union_defs (use, def_entry, used, use_entry, unionfind_union);
}
}
/* Update the instruction stream, allocating new registers for split pseudos
in progress. */
FOR_ALL_BB_FN (bb, fun)
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
if (NONDEBUG_INSN_P (insn)
/* Ignore naked clobber. For example, reg 134 in the second insn
of the following sequence will not be replaced.
@ -441,28 +398,21 @@ pass_web::execute (function *fun)
Thus the later passes can optimize them away. */
&& GET_CODE (PATTERN (insn)) != CLOBBER)
{
df_ref *use_rec;
df_ref *def_rec;
for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
replace_ref (use, entry_register (use_entry + DF_REF_ID (use), use, used));
}
for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
{
df_ref use = *use_rec;
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
replace_ref (use, entry_register (use_entry + DF_REF_ID (use), use, used));
}
for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
df_ref def = *def_rec;
if (DF_REF_REGNO (def) >= FIRST_PSEUDO_REGISTER)
replace_ref (def, entry_register (def_entry + DF_REF_ID (def), def, used));
}
struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
df_ref def, use;
FOR_EACH_INSN_INFO_USE (use, insn_info)
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
replace_ref (use, entry_register (use_entry + DF_REF_ID (use),
use, used));
FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
if (DF_REF_REGNO (use) >= FIRST_PSEUDO_REGISTER)
replace_ref (use, entry_register (use_entry + DF_REF_ID (use),
use, used));
FOR_EACH_INSN_INFO_DEF (def, insn_info)
if (DF_REF_REGNO (def) >= FIRST_PSEUDO_REGISTER)
replace_ref (def, entry_register (def_entry + DF_REF_ID (def),
def, used));
}
}
free (def_entry);
free (use_entry);