always define HAVE_cc0
gcc/ChangeLog: 2015-04-21 Trevor Saunders <tbsaunde+gcc@tbsaunde.org> * genconfig.c (main): Always define HAVE_cc0. * caller-save.c (insert_one_insn): Change ifdef HAVE_cc0 to #if HAVE_cc0. * cfgcleanup.c (flow_find_cross_jump): Likewise. (flow_find_head_matching_sequence): Likewise. (try_head_merge_bb): Likewise. * cfgrtl.c (rtl_merge_blocks): Likewise. (try_redirect_by_replacing_jump): Likewise. (rtl_tidy_fallthru_edge): Likewise. * combine.c (do_SUBST_MODE): Likewise. (insn_a_feeds_b): Likewise. (combine_instructions): Likewise. (can_combine_p): Likewise. (try_combine): Likewise. (find_split_point): Likewise. (subst): Likewise. (simplify_set): Likewise. (distribute_notes): Likewise. * cprop.c (cprop_jump): Likewise. * cse.c (cse_extended_basic_block): Likewise. * df-problems.c (can_move_insns_across): Likewise. * final.c (final): Likewise. (final_scan_insn): Likewise. * function.c (emit_use_return_register_into_block): Likewise. * gcse.c (insert_insn_end_basic_block): Likewise. * haifa-sched.c (sched_init): Likewise. * ira.c (find_moveable_pseudos): Likewise. * loop-invariant.c (find_invariant_insn): Likewise. * lra-constraints.c (curr_insn_transform): Likewise. * optabs.c (prepare_cmp_insn): Likewise. * postreload.c (reload_combine_recognize_const_pattern): * Likewise. * reload.c (find_reloads): Likewise. (find_reloads_address_1): Likewise. * reorg.c (delete_scheduled_jump): Likewise. (steal_delay_list_from_target): Likewise. (steal_delay_list_from_fallthrough): Likewise. (try_merge_delay_insns): Likewise. (redundant_insn): Likewise. (fill_simple_delay_slots): Likewise. (fill_slots_from_thread): Likewise. (delete_computation): Likewise. (relax_delay_slots): Likewise. * sched-deps.c (sched_analyze_2): Likewise. * sched-rgn.c (add_branch_dependences): Likewise. From-SVN: r222296
This commit is contained in:
parent
176cb568dc
commit
f1e52ed6b2
|
@ -1,3 +1,51 @@
|
|||
2015-04-21 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
|
||||
|
||||
* genconfig.c (main): Always define HAVE_cc0.
|
||||
* caller-save.c (insert_one_insn): Change ifdef HAVE_cc0 to #if
|
||||
HAVE_cc0.
|
||||
* cfgcleanup.c (flow_find_cross_jump): Likewise.
|
||||
(flow_find_head_matching_sequence): Likewise.
|
||||
(try_head_merge_bb): Likewise.
|
||||
* cfgrtl.c (rtl_merge_blocks): Likewise.
|
||||
(try_redirect_by_replacing_jump): Likewise.
|
||||
(rtl_tidy_fallthru_edge): Likewise.
|
||||
* combine.c (do_SUBST_MODE): Likewise.
|
||||
(insn_a_feeds_b): Likewise.
|
||||
(combine_instructions): Likewise.
|
||||
(can_combine_p): Likewise.
|
||||
(try_combine): Likewise.
|
||||
(find_split_point): Likewise.
|
||||
(subst): Likewise.
|
||||
(simplify_set): Likewise.
|
||||
(distribute_notes): Likewise.
|
||||
* cprop.c (cprop_jump): Likewise.
|
||||
* cse.c (cse_extended_basic_block): Likewise.
|
||||
* df-problems.c (can_move_insns_across): Likewise.
|
||||
* final.c (final): Likewise.
|
||||
(final_scan_insn): Likewise.
|
||||
* function.c (emit_use_return_register_into_block): Likewise.
|
||||
* gcse.c (insert_insn_end_basic_block): Likewise.
|
||||
* haifa-sched.c (sched_init): Likewise.
|
||||
* ira.c (find_moveable_pseudos): Likewise.
|
||||
* loop-invariant.c (find_invariant_insn): Likewise.
|
||||
* lra-constraints.c (curr_insn_transform): Likewise.
|
||||
* optabs.c (prepare_cmp_insn): Likewise.
|
||||
* postreload.c (reload_combine_recognize_const_pattern):
|
||||
* Likewise.
|
||||
* reload.c (find_reloads): Likewise.
|
||||
(find_reloads_address_1): Likewise.
|
||||
* reorg.c (delete_scheduled_jump): Likewise.
|
||||
(steal_delay_list_from_target): Likewise.
|
||||
(steal_delay_list_from_fallthrough): Likewise.
|
||||
(try_merge_delay_insns): Likewise.
|
||||
(redundant_insn): Likewise.
|
||||
(fill_simple_delay_slots): Likewise.
|
||||
(fill_slots_from_thread): Likewise.
|
||||
(delete_computation): Likewise.
|
||||
(relax_delay_slots): Likewise.
|
||||
* sched-deps.c (sched_analyze_2): Likewise.
|
||||
* sched-rgn.c (add_branch_dependences): Likewise.
|
||||
|
||||
2015-04-21 Trevor Saunders <tbsaunde+gcc@tbsaunde.org>
|
||||
|
||||
* combine.c (find_single_use): Remove HAVE_cc0 ifdef for code
|
||||
|
|
|
@ -1400,7 +1400,7 @@ insert_one_insn (struct insn_chain *chain, int before_p, int code, rtx pat)
|
|||
rtx_insn *insn = chain->insn;
|
||||
struct insn_chain *new_chain;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* If INSN references CC0, put our insns in front of the insn that sets
|
||||
CC0. This is always safe, since the only way we could be passed an
|
||||
insn that references CC0 is for a restore, and doing a restore earlier
|
||||
|
|
|
@ -1456,7 +1456,7 @@ flow_find_cross_jump (basic_block bb1, basic_block bb2, rtx_insn **f1,
|
|||
i2 = PREV_INSN (i2);
|
||||
}
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* Don't allow the insn after a compare to be shared by
|
||||
cross-jumping unless the compare is also shared. */
|
||||
if (ninsns && reg_mentioned_p (cc0_rtx, last1) && ! sets_cc0_p (last1))
|
||||
|
@ -1579,7 +1579,7 @@ flow_find_head_matching_sequence (basic_block bb1, basic_block bb2, rtx_insn **f
|
|||
i2 = NEXT_INSN (i2);
|
||||
}
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* Don't allow a compare to be shared by cross-jumping unless the insn
|
||||
after the compare is also shared. */
|
||||
if (ninsns && reg_mentioned_p (cc0_rtx, last1) && sets_cc0_p (last1))
|
||||
|
@ -2370,7 +2370,7 @@ try_head_merge_bb (basic_block bb)
|
|||
cond = get_condition (jump, &move_before, true, false);
|
||||
if (cond == NULL_RTX)
|
||||
{
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (reg_mentioned_p (cc0_rtx, jump))
|
||||
move_before = prev_nonnote_nondebug_insn (jump);
|
||||
else
|
||||
|
@ -2539,7 +2539,7 @@ try_head_merge_bb (basic_block bb)
|
|||
cond = get_condition (jump, &move_before, true, false);
|
||||
if (cond == NULL_RTX)
|
||||
{
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (reg_mentioned_p (cc0_rtx, jump))
|
||||
move_before = prev_nonnote_nondebug_insn (jump);
|
||||
else
|
||||
|
@ -2562,7 +2562,7 @@ try_head_merge_bb (basic_block bb)
|
|||
/* Try again, using a different insertion point. */
|
||||
move_before = jump;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* Don't try moving before a cc0 user, as that may invalidate
|
||||
the cc0. */
|
||||
if (reg_mentioned_p (cc0_rtx, jump))
|
||||
|
@ -2622,7 +2622,7 @@ try_head_merge_bb (basic_block bb)
|
|||
/* For the unmerged insns, try a different insertion point. */
|
||||
move_before = jump;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* Don't try moving before a cc0 user, as that may invalidate
|
||||
the cc0. */
|
||||
if (reg_mentioned_p (cc0_rtx, jump))
|
||||
|
|
|
@ -893,7 +893,7 @@ rtl_merge_blocks (basic_block a, basic_block b)
|
|||
|
||||
del_first = a_end;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* If this was a conditional jump, we need to also delete
|
||||
the insn that set cc0. */
|
||||
if (only_sets_cc0_p (prev))
|
||||
|
@ -1064,7 +1064,7 @@ try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
|
|||
/* In case we zap a conditional jump, we'll need to kill
|
||||
the cc0 setter too. */
|
||||
kill_from = insn;
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
|
||||
&& only_sets_cc0_p (PREV_INSN (insn)))
|
||||
kill_from = PREV_INSN (insn);
|
||||
|
@ -1825,7 +1825,7 @@ rtl_tidy_fallthru_edge (edge e)
|
|||
delete_insn (table);
|
||||
}
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* If this was a conditional jump, we need to also delete
|
||||
the insn that set cc0. */
|
||||
if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
|
||||
|
|
|
@ -836,7 +836,7 @@ do_SUBST_MODE (rtx *into, machine_mode newval)
|
|||
|
||||
#define SUBST_MODE(INTO, NEWVAL) do_SUBST_MODE (&(INTO), (NEWVAL))
|
||||
|
||||
#ifndef HAVE_cc0
|
||||
#if !HAVE_cc0
|
||||
/* Similar to SUBST, but NEWVAL is a LOG_LINKS expression. */
|
||||
|
||||
static void
|
||||
|
@ -1141,7 +1141,7 @@ insn_a_feeds_b (rtx_insn *a, rtx_insn *b)
|
|||
FOR_EACH_LOG_LINK (links, b)
|
||||
if (links->insn == a)
|
||||
return true;
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (sets_cc0_p (a))
|
||||
return true;
|
||||
#endif
|
||||
|
@ -1157,7 +1157,7 @@ static int
|
|||
combine_instructions (rtx_insn *f, unsigned int nregs)
|
||||
{
|
||||
rtx_insn *insn, *next;
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
rtx_insn *prev;
|
||||
#endif
|
||||
struct insn_link *links, *nextlinks;
|
||||
|
@ -1334,7 +1334,7 @@ combine_instructions (rtx_insn *f, unsigned int nregs)
|
|||
}
|
||||
}
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* Try to combine a jump insn that uses CC0
|
||||
with a preceding insn that sets CC0, and maybe with its
|
||||
logical predecessor as well.
|
||||
|
@ -2068,7 +2068,7 @@ can_combine_p (rtx_insn *insn, rtx_insn *i3, rtx_insn *pred ATTRIBUTE_UNUSED,
|
|||
return 0;
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* Don't combine an insn that follows a CC0-setting insn.
|
||||
An insn that uses CC0 must not be separated from the one that sets it.
|
||||
We do, however, allow I2 to follow a CC0-setting insn if that insn
|
||||
|
@ -2514,7 +2514,7 @@ is_parallel_of_n_reg_sets (rtx pat, int n)
|
|||
return true;
|
||||
}
|
||||
|
||||
#ifndef HAVE_cc0
|
||||
#if !HAVE_cc0
|
||||
/* Return whether INSN, a PARALLEL of N register SETs (and maybe some
|
||||
CLOBBERs), can be split into individual SETs in that order, without
|
||||
changing semantics. */
|
||||
|
@ -2888,7 +2888,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
|
|||
}
|
||||
}
|
||||
|
||||
#ifndef HAVE_cc0
|
||||
#if !HAVE_cc0
|
||||
/* If we have no I1 and I2 looks like:
|
||||
(parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
|
||||
(set Y OP)])
|
||||
|
@ -3116,7 +3116,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
|
|||
|
||||
subst_insn = i3;
|
||||
|
||||
#ifndef HAVE_cc0
|
||||
#if !HAVE_cc0
|
||||
/* Many machines that don't use CC0 have insns that can both perform an
|
||||
arithmetic operation and set the condition code. These operations will
|
||||
be represented as a PARALLEL with the first element of the vector
|
||||
|
@ -3646,7 +3646,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
|
|||
are set between I2 and I3. */
|
||||
if (insn_code_number < 0
|
||||
&& (split = find_split_point (&newpat, i3, false)) != 0
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
&& REG_P (i2dest)
|
||||
#endif
|
||||
/* We need I2DEST in the proper mode. If it is a hard register
|
||||
|
@ -3918,7 +3918,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
|
|||
&& !(GET_CODE (SET_DEST (set1)) == SUBREG
|
||||
&& find_reg_note (i2, REG_DEAD,
|
||||
SUBREG_REG (SET_DEST (set1))))
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
&& !reg_referenced_p (cc0_rtx, set0)
|
||||
#endif
|
||||
/* If I3 is a jump, ensure that set0 is a jump so that
|
||||
|
@ -3935,7 +3935,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
|
|||
&& !(GET_CODE (SET_DEST (set0)) == SUBREG
|
||||
&& find_reg_note (i2, REG_DEAD,
|
||||
SUBREG_REG (SET_DEST (set0))))
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
&& !reg_referenced_p (cc0_rtx, set1)
|
||||
#endif
|
||||
/* If I3 is a jump, ensure that set1 is a jump so that
|
||||
|
@ -4002,7 +4002,7 @@ try_combine (rtx_insn *i3, rtx_insn *i2, rtx_insn *i1, rtx_insn *i0,
|
|||
}
|
||||
}
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* If I2 is the CC0 setter and I3 is the CC0 user then check whether
|
||||
they are adjacent to each other or not. */
|
||||
{
|
||||
|
@ -4816,7 +4816,7 @@ find_split_point (rtx *loc, rtx_insn *insn, bool set_src)
|
|||
break;
|
||||
|
||||
case SET:
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
|
||||
ZERO_EXTRACT, the most likely reason why this doesn't match is that
|
||||
we need to put the operand into a register. So split at that
|
||||
|
@ -5331,7 +5331,7 @@ subst (rtx x, rtx from, rtx to, int in_dest, int in_cond, int unique_copy)
|
|||
&& ! (code == SUBREG
|
||||
&& MODES_TIEABLE_P (GET_MODE (x),
|
||||
GET_MODE (SUBREG_REG (to))))
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
&& ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
|
||||
#endif
|
||||
)
|
||||
|
@ -6582,7 +6582,7 @@ simplify_set (rtx x)
|
|||
else
|
||||
compare_mode = SELECT_CC_MODE (new_code, op0, op1);
|
||||
|
||||
#ifndef HAVE_cc0
|
||||
#if !HAVE_cc0
|
||||
/* If the mode changed, we have to change SET_DEST, the mode in the
|
||||
compare, and the mode in the place SET_DEST is used. If SET_DEST is
|
||||
a hard register, just build new versions with the proper mode. If it
|
||||
|
@ -13802,7 +13802,7 @@ distribute_notes (rtx notes, rtx_insn *from_insn, rtx_insn *i3, rtx_insn *i2,
|
|||
{
|
||||
rtx set = single_set (tem_insn);
|
||||
rtx inner_dest = 0;
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
rtx_insn *cc0_setter = NULL;
|
||||
#endif
|
||||
|
||||
|
@ -13824,7 +13824,7 @@ distribute_notes (rtx notes, rtx_insn *from_insn, rtx_insn *i3, rtx_insn *i2,
|
|||
|
||||
if (set != 0 && ! side_effects_p (SET_SRC (set))
|
||||
&& rtx_equal_p (XEXP (note, 0), inner_dest)
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
&& (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
|
||||
|| ((cc0_setter = prev_cc0_setter (tem_insn)) != NULL
|
||||
&& sets_cc0_p (PATTERN (cc0_setter)) > 0))
|
||||
|
@ -13848,7 +13848,7 @@ distribute_notes (rtx notes, rtx_insn *from_insn, rtx_insn *i3, rtx_insn *i2,
|
|||
if (tem_insn == i2)
|
||||
i2 = NULL;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* Delete the setter too. */
|
||||
if (cc0_setter)
|
||||
{
|
||||
|
|
|
@ -965,7 +965,7 @@ cprop_jump (basic_block bb, rtx_insn *setcc, rtx_insn *jump, rtx from, rtx src)
|
|||
remove_note (jump, note);
|
||||
}
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* Delete the cc0 setter. */
|
||||
if (setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
|
||||
delete_insn (setcc);
|
||||
|
|
|
@ -6524,7 +6524,7 @@ cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
|
|||
&& check_for_label_ref (insn))
|
||||
recorded_label_ref = true;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (NONDEBUG_INSN_P (insn))
|
||||
{
|
||||
/* If the previous insn sets CC0 and this insn no
|
||||
|
|
|
@ -3820,7 +3820,7 @@ can_move_insns_across (rtx_insn *from, rtx_insn *to,
|
|||
if (bitmap_intersect_p (merge_set, test_use)
|
||||
|| bitmap_intersect_p (merge_use, test_set))
|
||||
break;
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (!sets_cc0_p (insn))
|
||||
#endif
|
||||
max_to = insn;
|
||||
|
@ -3861,7 +3861,7 @@ can_move_insns_across (rtx_insn *from, rtx_insn *to,
|
|||
if (NONDEBUG_INSN_P (insn))
|
||||
{
|
||||
if (!bitmap_intersect_p (test_set, local_merge_live)
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
&& !sets_cc0_p (insn)
|
||||
#endif
|
||||
)
|
||||
|
|
14
gcc/final.c
14
gcc/final.c
|
@ -242,7 +242,7 @@ static void output_asm_operand_names (rtx *, int *, int);
|
|||
#ifdef LEAF_REGISTERS
|
||||
static void leaf_renumber_regs (rtx_insn *);
|
||||
#endif
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
static int alter_cond (rtx);
|
||||
#endif
|
||||
#ifndef ADDR_VEC_ALIGN
|
||||
|
@ -2029,7 +2029,7 @@ final (rtx_insn *first, FILE *file, int optimize_p)
|
|||
|
||||
last_ignored_compare = 0;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
for (insn = first; insn; insn = NEXT_INSN (insn))
|
||||
{
|
||||
/* If CC tracking across branches is enabled, record the insn which
|
||||
|
@ -2198,7 +2198,7 @@ rtx_insn *
|
|||
final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
|
||||
int nopeepholes ATTRIBUTE_UNUSED, int *seen)
|
||||
{
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
rtx set;
|
||||
#endif
|
||||
rtx_insn *next;
|
||||
|
@ -2505,7 +2505,7 @@ final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
|
|||
|| GET_CODE (body) == CLOBBER)
|
||||
break;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
{
|
||||
/* If there is a REG_CC_SETTER note on this insn, it means that
|
||||
the setting of the condition code was done in the delay slot
|
||||
|
@ -2722,7 +2722,7 @@ final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
|
|||
|
||||
body = PATTERN (insn);
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
set = single_set (insn);
|
||||
|
||||
/* Check for redundant test and compare instructions
|
||||
|
@ -2967,7 +2967,7 @@ final_scan_insn (rtx_insn *insn, FILE *file, int optimize_p ATTRIBUTE_UNUSED,
|
|||
&& GET_CODE (PATTERN (insn)) == COND_EXEC)
|
||||
current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
cc_prev_status = cc_status;
|
||||
|
||||
/* Update `cc_status' for this instruction.
|
||||
|
@ -3256,7 +3256,7 @@ walk_alter_subreg (rtx *xp, bool *changed)
|
|||
return *xp;
|
||||
}
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
|
||||
/* Given BODY, the body of a jump instruction, alter the jump condition
|
||||
as required by the bits that are set in cc_status.flags.
|
||||
|
|
|
@ -5661,7 +5661,7 @@ emit_use_return_register_into_block (basic_block bb)
|
|||
seq = get_insns ();
|
||||
end_sequence ();
|
||||
insn = BB_END (bb);
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
|
||||
insn = prev_cc0_setter (insn);
|
||||
#endif
|
||||
|
|
|
@ -2048,7 +2048,7 @@ insert_insn_end_basic_block (struct gcse_expr *expr, basic_block bb)
|
|||
&& (!single_succ_p (bb)
|
||||
|| single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
|
||||
{
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
|
||||
if cc0 isn't set. */
|
||||
rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
|
||||
|
|
|
@ -346,6 +346,7 @@ main (int argc, char **argv)
|
|||
{
|
||||
/* We output CC0_P this way to make sure that X is declared
|
||||
somewhere. */
|
||||
printf ("#define HAVE_cc0 0\n");
|
||||
printf ("#define CC0_P(X) ((X) ? 0 : 0)\n");
|
||||
}
|
||||
|
||||
|
|
|
@ -7184,7 +7184,7 @@ void
|
|||
sched_init (void)
|
||||
{
|
||||
/* Disable speculative loads in their presence if cc0 defined. */
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
flag_schedule_speculative_load = 0;
|
||||
#endif
|
||||
|
||||
|
|
|
@ -4641,7 +4641,7 @@ find_moveable_pseudos (void)
|
|||
? " (no unique first use)" : "");
|
||||
continue;
|
||||
}
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (reg_referenced_p (cc0_rtx, PATTERN (closest_use)))
|
||||
{
|
||||
if (dump_file)
|
||||
|
@ -4724,7 +4724,7 @@ find_moveable_pseudos (void)
|
|||
{
|
||||
if (bitmap_bit_p (def_bb_moveable, regno)
|
||||
&& !control_flow_insn_p (use_insn)
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
&& !sets_cc0_p (use_insn)
|
||||
#endif
|
||||
)
|
||||
|
|
|
@ -923,7 +923,7 @@ find_invariant_insn (rtx_insn *insn, bool always_reached, bool always_executed)
|
|||
bool simple = true;
|
||||
struct invariant *inv;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* We can't move a CC0 setter without the user. */
|
||||
if (sets_cc0_p (insn))
|
||||
return;
|
||||
|
|
|
@ -3354,7 +3354,7 @@ curr_insn_transform (bool check_only_p)
|
|||
if (JUMP_P (curr_insn) || CALL_P (curr_insn))
|
||||
no_output_reloads_p = true;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (reg_referenced_p (cc0_rtx, PATTERN (curr_insn)))
|
||||
no_input_reloads_p = true;
|
||||
if (reg_set_p (cc0_rtx, PATTERN (curr_insn)))
|
||||
|
|
|
@ -4088,7 +4088,7 @@ prepare_cmp_insn (rtx x, rtx y, enum rtx_code comparison, rtx size,
|
|||
> COSTS_N_INSNS (1)))
|
||||
y = force_reg (mode, y);
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* Make sure if we have a canonical comparison. The RTL
|
||||
documentation states that canonical comparisons are required only
|
||||
for targets which have cc0. */
|
||||
|
|
|
@ -1032,7 +1032,7 @@ reload_combine_recognize_const_pattern (rtx_insn *insn)
|
|||
&& reg_state[clobbered_regno].real_store_ruid >= use_ruid)
|
||||
break;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */
|
||||
if (must_move_add && sets_cc0_p (PATTERN (use_insn)))
|
||||
break;
|
||||
|
|
|
@ -2706,7 +2706,7 @@ find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
|
|||
if (JUMP_P (insn) || CALL_P (insn))
|
||||
no_output_reloads = 1;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
|
||||
no_input_reloads = 1;
|
||||
if (reg_set_p (cc0_rtx, PATTERN (insn)))
|
||||
|
@ -4579,7 +4579,7 @@ find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
|
|||
rld[j].in = 0;
|
||||
}
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* If we made any reloads for addresses, see if they violate a
|
||||
"no input reloads" requirement for this insn. But loads that we
|
||||
do after the insn (such as for output addresses) are fine. */
|
||||
|
@ -5873,7 +5873,7 @@ find_reloads_address_1 (machine_mode mode, addr_space_t as,
|
|||
enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
|
||||
if (insn && NONJUMP_INSN_P (insn) && equiv
|
||||
&& memory_operand (equiv, GET_MODE (equiv))
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
&& ! sets_cc0_p (PATTERN (insn))
|
||||
#endif
|
||||
&& ! (icode != CODE_FOR_nothing
|
||||
|
|
30
gcc/reorg.c
30
gcc/reorg.c
|
@ -182,7 +182,7 @@ skip_consecutive_labels (rtx label_or_return)
|
|||
return label;
|
||||
}
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
|
||||
and REG_CC_USER notes so we can find it. */
|
||||
|
||||
|
@ -699,7 +699,7 @@ delete_scheduled_jump (rtx_insn *insn)
|
|||
be other insns that became dead anyway, which we wouldn't know to
|
||||
delete. */
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (reg_mentioned_p (cc0_rtx, insn))
|
||||
{
|
||||
rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
|
||||
|
@ -1171,7 +1171,7 @@ steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
|
|||
if (insn_references_resource_p (trial, sets, false)
|
||||
|| insn_sets_resource_p (trial, needed, false)
|
||||
|| insn_sets_resource_p (trial, sets, false)
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* If TRIAL sets CC0, we can't copy it, so we can't steal this
|
||||
delay list. */
|
||||
|| find_reg_note (trial, REG_CC_USER, NULL_RTX)
|
||||
|
@ -1279,7 +1279,7 @@ steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition,
|
|||
if (insn_references_resource_p (trial, sets, false)
|
||||
|| insn_sets_resource_p (trial, needed, false)
|
||||
|| insn_sets_resource_p (trial, sets, false)
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
|| sets_cc0_p (PATTERN (trial))
|
||||
#endif
|
||||
)
|
||||
|
@ -1373,7 +1373,7 @@ try_merge_delay_insns (rtx insn, rtx_insn *thread)
|
|||
continue;
|
||||
|
||||
if (GET_CODE (next_to_match) == GET_CODE (trial)
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* We can't share an insn that sets cc0. */
|
||||
&& ! sets_cc0_p (pat)
|
||||
#endif
|
||||
|
@ -1446,7 +1446,7 @@ try_merge_delay_insns (rtx insn, rtx_insn *thread)
|
|||
if (! insn_references_resource_p (dtrial, &set, true)
|
||||
&& ! insn_sets_resource_p (dtrial, &set, true)
|
||||
&& ! insn_sets_resource_p (dtrial, &needed, true)
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
&& ! sets_cc0_p (PATTERN (dtrial))
|
||||
#endif
|
||||
&& rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
|
||||
|
@ -1629,7 +1629,7 @@ redundant_insn (rtx insn, rtx_insn *target, rtx delay_list)
|
|||
target_main = XVECEXP (PATTERN (target), 0, 0);
|
||||
|
||||
if (resource_conflicts_p (&needed, &set)
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
|| reg_mentioned_p (cc0_rtx, ipat)
|
||||
#endif
|
||||
/* The insn requiring the delay may not set anything needed or set by
|
||||
|
@ -2125,7 +2125,7 @@ fill_simple_delay_slots (int non_jumps_p)
|
|||
filter_flags ? &fset : &set,
|
||||
true)
|
||||
&& ! insn_sets_resource_p (trial, &needed, true)
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* Can't separate set of cc0 from its use. */
|
||||
&& ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
|
||||
#endif
|
||||
|
@ -2260,7 +2260,7 @@ fill_simple_delay_slots (int non_jumps_p)
|
|||
&& ! insn_references_resource_p (trial, &set, true)
|
||||
&& ! insn_sets_resource_p (trial, &set, true)
|
||||
&& ! insn_sets_resource_p (trial, &needed, true)
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
&& ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
|
||||
#endif
|
||||
&& ! (maybe_never && may_trap_or_fault_p (pat))
|
||||
|
@ -2270,7 +2270,7 @@ fill_simple_delay_slots (int non_jumps_p)
|
|||
{
|
||||
next_trial = next_nonnote_insn (trial);
|
||||
delay_list = add_to_delay_list (trial, delay_list);
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (reg_mentioned_p (cc0_rtx, pat))
|
||||
link_cc0_insns (trial);
|
||||
#endif
|
||||
|
@ -2309,7 +2309,7 @@ fill_simple_delay_slots (int non_jumps_p)
|
|||
&& ! insn_references_resource_p (next_trial, &set, true)
|
||||
&& ! insn_sets_resource_p (next_trial, &set, true)
|
||||
&& ! insn_sets_resource_p (next_trial, &needed, true)
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
&& ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
|
||||
#endif
|
||||
&& ! (maybe_never && may_trap_or_fault_p (PATTERN (next_trial)))
|
||||
|
@ -2522,7 +2522,7 @@ fill_slots_from_thread (rtx_insn *insn, rtx condition, rtx thread_or_return,
|
|||
if (! insn_references_resource_p (trial, &set, true)
|
||||
&& ! insn_sets_resource_p (trial, &set, true)
|
||||
&& ! insn_sets_resource_p (trial, &needed, true)
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
&& ! (reg_mentioned_p (cc0_rtx, pat)
|
||||
&& (! own_thread || ! sets_cc0_p (pat)))
|
||||
#endif
|
||||
|
@ -2605,7 +2605,7 @@ fill_slots_from_thread (rtx_insn *insn, rtx condition, rtx thread_or_return,
|
|||
must_annul = 1;
|
||||
winner:
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (reg_mentioned_p (cc0_rtx, pat))
|
||||
link_cc0_insns (trial);
|
||||
#endif
|
||||
|
@ -3161,7 +3161,7 @@ delete_computation (rtx insn)
|
|||
{
|
||||
rtx note, next;
|
||||
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
|
||||
{
|
||||
rtx prev = prev_nonnote_insn (insn);
|
||||
|
@ -3498,7 +3498,7 @@ relax_delay_slots (rtx_insn *first)
|
|||
&& ! condjump_in_parallel_p (delay_insn)
|
||||
&& prev_active_insn (target_label) == insn
|
||||
&& ! BARRIER_P (prev_nonnote_insn (target_label))
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
/* If the last insn in the delay slot sets CC0 for some insn,
|
||||
various code assumes that it is in a delay slot. We could
|
||||
put it back where it belonged and delete the register notes,
|
||||
|
|
|
@ -2609,7 +2609,7 @@ sched_analyze_2 (struct deps_desc *deps, rtx x, rtx_insn *insn)
|
|||
return;
|
||||
|
||||
case CC0:
|
||||
#ifndef HAVE_cc0
|
||||
#if !HAVE_cc0
|
||||
gcc_unreachable ();
|
||||
#endif
|
||||
/* User of CC0 depends on immediately preceding insn. */
|
||||
|
|
|
@ -2487,7 +2487,7 @@ add_branch_dependences (rtx_insn *head, rtx_insn *tail)
|
|||
&& (GET_CODE (PATTERN (insn)) == USE
|
||||
|| GET_CODE (PATTERN (insn)) == CLOBBER
|
||||
|| can_throw_internal (insn)
|
||||
#ifdef HAVE_cc0
|
||||
#if HAVE_cc0
|
||||
|| sets_cc0_p (PATTERN (insn))
|
||||
#endif
|
||||
|| (!reload_completed
|
||||
|
|
Loading…
Reference in New Issue