alias.c (get_alias_set, [...]): Use MEM_P.
2004-07-01 Jerry Quinn <jlquinn@optonline.net> * alias.c (get_alias_set, canon_rtx, get_addr, nonoverlapping_memrefs_p, nonlocal_referenced_p_1, memory_modified_1): Use MEM_P. * builtins.c (expand_builtin_prefetch, expand_builtin_profile_func, expand_builtin): Likewise. * calls.c (expand_call, emit_library_call_value_1, store_one_arg): Likewise. * combine.c (can_combine_p, combinable_i3pat, try_combine, find_split_point, combine_simplify_rtx, simplify_set, make_extraction, rtx_equal_for_field_assignment_p, gen_lowpart_for_combine, record_dead_and_set_regs_1, get_last_value_validate, mark_used_regs_combine, move_deaths, unmentioned_reg_p_1): Likewise. * cse.c (check_dependence, canon_hash, equiv_constant, gen_lowpart_if_possible, cse_insn, invalidate_from_clobbers, cse_around_loop, cse_check_loop_start, cse_set_around_loop, count_reg_usage): Likewise. * cselib.c (rtx_equal_for_cselib_p, add_mem_for_addr, cselib_lookup, cselib_invalidate_mem, cselib_invalidate_rtx, cselib_record_set, cselib_record_sets): Likewise. * dbxout.c (PARM_PASSED_IN_MEMORY, dbxout_symbol, dbxout_symbol_location, dbxout_parms, dbxout_reg_parms): Likewise. * ddg.c (mark_mem_use, mark_mem_store, rtx_mem_access_p): Likewise. * df.c (df_uses_record): Likewise. * dojump (do_jump): Likewise. * dwarf2out.c (stack_adjust_offset, mem_loc_descriptor, loc_descriptor_from_tree, rtl_for_decl_location, add_bound_info, decl_start_label): Likewise. * emit-rtl.c (gen_complex_constant_part, gen_highpart, operand_subword, change_address_1, make_safe_from): Likewise. * explow.c (break_out_memory_refs, copy_all_regs, validize_mem, stabilize, force_not_mem): Likewise. * expmed.c (store_bit_field, store_split_bit_field, extract_bit_field, expand_mult_const, expand_divmod, emit_store_flag): Likewise. * expr.c (convert_move, convert_modes, emit_block_move, emit_group_load, emit_group_store, clear_storage, emit_move_insn, emit_move_insn_1, expand_assignment, store_expr, store_constructor_field, store_constructor, store_field, force_operand, safe_from_p, expand_expr_real_1, expand_increment): Likewise. * final.c (cleanup_subreg_operands, alter_subreg, get_mem_expr_from_op): Likewise. * flow.c (notice_stack_pointer_modification_1, init_propagate_block_info, insn_dead_p, mark_set_1, mark_used_regs): Likewise. * function.c (mark_temp_addr_taken, preserve_temp_slots, preserve_rtl_expr_result, put_var_into_stack, fixup_var_refs_1, optimize_bit_field, flush_addressof, purge_addressof_1, instantiate_decl, instantiate_virtual_regs_1, assign_parms, setjmp_protect, setjmp_protect_args, fix_lexical_addr, keep_stack_depressed): Likewise. * ifcvt.c (noce_try_cmove_arith, noce_try_abs, noce_operand_ok, noce_process_if_block, find_memory): Likewise. * integrate.c (subst_constants, allocate_initial_values): Likewise. * local-alloc.c (validate_equiv_mem_from_store, memref_referenced_p, update_equiv_regs): Likewise. * loop.c (scan_loop, prescan_loop, note_addr_stored, check_store, maybe_eliminate_biv_1, find_mem_in_note_1): Likewise. * optabs.c (expand_abs, emit_unop_insn): Likewise. * passes.c (rest_of_handle_final): Likewise. * postreload.c (reload_cse_simplify_set, reload_cse_simplify_operands, move2add_note_store): Likewise. * ra-build.c (detect_remat_webs): Likewise. * ra-debug.c (dump_static_insn_cost): Likewise. * ra-rewrite.c (slots_overlap_p, insert_stores): Likewise. * recog.c (validate_change, apply_change_group, cancel_changes, validate_replace_rtx_1, general_operand, register_operand, nonmemory_operand, push_operand, pop_operand, memory_operand, indirect_operand, asm_operand_ok, offsettable_memref_p, offsettable_nonstrict_memref_p, constrain_operands, store_data_bypass_p): Likewise. * reg-stack.c (subst_stack_regs_pat): Likewise. * regclass.c (record_operand_costs, scan_one_insn, record_reg_classes, copy_cost, reg_scan_mark_refs): Likewise. * regmove.c (optimize_reg_copy_3, stack_memref_p, combine_stack_adjustments_for_block): Likewise. * regrename.c (copyprop_hardreg_forward_1): Likewise. * reload.c (can_reload_into, push_reload, decompose, immune_p, find_reloads, find_reloads_address, find_reloads_address_1, reg_overlap_mentioned_for_reload_p, refers_to_mem_for_reload_p, find_equiv_reg): Likewise. * reload1.c (reload, eliminate_regs, eliminate_regs_in_insn, reload_as_needed, choose_reload_regs, emit_input_reload_insns, do_input_reload, emit_reload_insns, gen_reload, delete_output_reload, delete_address_reloads): Likewise. * resource.c (mark_referenced_resources): Likewise. * rtlanal.c (get_jump_table_offset, count_occurrences, reg_referenced_p, reg_set_p, set_of_1, set_noop_p, reg_overlap_mentioned_p, note_uses, replace_regs, nonzero_bits1, num_sign_bit_copies1): Likewise. * rtlhooks.c (gen_lowpart_general): Likewise. * sched-deps.c (sched_analyze_1, sched_analyze_2): Likewise. * sdbout.c (PARM_PASSED_IN_MEMORY, sdbout_symbol, sdbout_toplevel_data, sdbout_parms, sdbout_reg_parms, sdbout_global_decl): Likewise. * simplify-rtx.c (simplify_subreg): Likewise. * stmt.c (expand_asm_operands, expand_expr_stmt_value, expand_decl, expand_anon_union_decl, expand_end_case_type): Likewise. * unroll.c (calculate_giv_inc): Likewise. * var-tracking.c (stack_adjust_offset_pre_post, bb_stack_adjust_offset, track_expr_p, count_uses, add_uses, add_stores, compute_bb_dataflow, vt_get_decl_and_offset, vt_add_function_parameters): Likewise. * varasm.c (make_var_volatile, notice_global_symbol, assemble_external, decode_addr_const, mark_weak, default_encode_section_info): Likewise. From-SVN: r83980
This commit is contained in:
parent
8436e65ac6
commit
3c0cb5de6a
108
gcc/ChangeLog
108
gcc/ChangeLog
|
@ -1,3 +1,111 @@
|
|||
2004-07-01 Jerry Quinn <jlquinn@optonline.net>
|
||||
|
||||
* alias.c (get_alias_set, canon_rtx, get_addr,
|
||||
nonoverlapping_memrefs_p, nonlocal_referenced_p_1, memory_modified_1):
|
||||
Use MEM_P.
|
||||
* builtins.c (expand_builtin_prefetch, expand_builtin_profile_func,
|
||||
expand_builtin): Likewise.
|
||||
* calls.c (expand_call, emit_library_call_value_1, store_one_arg):
|
||||
Likewise.
|
||||
* combine.c (can_combine_p, combinable_i3pat, try_combine,
|
||||
find_split_point, combine_simplify_rtx, simplify_set, make_extraction,
|
||||
rtx_equal_for_field_assignment_p, gen_lowpart_for_combine,
|
||||
record_dead_and_set_regs_1, get_last_value_validate,
|
||||
mark_used_regs_combine, move_deaths, unmentioned_reg_p_1): Likewise.
|
||||
* cse.c (check_dependence, canon_hash, equiv_constant,
|
||||
gen_lowpart_if_possible, cse_insn, invalidate_from_clobbers,
|
||||
cse_around_loop, cse_check_loop_start, cse_set_around_loop,
|
||||
count_reg_usage): Likewise.
|
||||
* cselib.c (rtx_equal_for_cselib_p, add_mem_for_addr, cselib_lookup,
|
||||
cselib_invalidate_mem, cselib_invalidate_rtx, cselib_record_set,
|
||||
cselib_record_sets): Likewise.
|
||||
* dbxout.c (PARM_PASSED_IN_MEMORY, dbxout_symbol,
|
||||
dbxout_symbol_location, dbxout_parms, dbxout_reg_parms): Likewise.
|
||||
* ddg.c (mark_mem_use, mark_mem_store, rtx_mem_access_p): Likewise.
|
||||
* df.c (df_uses_record): Likewise.
|
||||
* dojump (do_jump): Likewise.
|
||||
* dwarf2out.c (stack_adjust_offset, mem_loc_descriptor,
|
||||
loc_descriptor_from_tree, rtl_for_decl_location, add_bound_info,
|
||||
decl_start_label): Likewise.
|
||||
* emit-rtl.c (gen_complex_constant_part, gen_highpart,
|
||||
operand_subword, change_address_1, make_safe_from): Likewise.
|
||||
* explow.c (break_out_memory_refs, copy_all_regs, validize_mem,
|
||||
stabilize, force_not_mem): Likewise.
|
||||
* expmed.c (store_bit_field, store_split_bit_field, extract_bit_field,
|
||||
expand_mult_const, expand_divmod, emit_store_flag): Likewise.
|
||||
* expr.c (convert_move, convert_modes, emit_block_move,
|
||||
emit_group_load, emit_group_store, clear_storage, emit_move_insn,
|
||||
emit_move_insn_1, expand_assignment, store_expr,
|
||||
store_constructor_field, store_constructor, store_field,
|
||||
force_operand, safe_from_p, expand_expr_real_1, expand_increment):
|
||||
Likewise.
|
||||
* final.c (cleanup_subreg_operands, alter_subreg,
|
||||
get_mem_expr_from_op): Likewise.
|
||||
* flow.c (notice_stack_pointer_modification_1,
|
||||
init_propagate_block_info, insn_dead_p, mark_set_1, mark_used_regs):
|
||||
Likewise.
|
||||
* function.c (mark_temp_addr_taken, preserve_temp_slots,
|
||||
preserve_rtl_expr_result, put_var_into_stack, fixup_var_refs_1,
|
||||
optimize_bit_field, flush_addressof, purge_addressof_1,
|
||||
instantiate_decl, instantiate_virtual_regs_1, assign_parms,
|
||||
setjmp_protect, setjmp_protect_args, fix_lexical_addr,
|
||||
keep_stack_depressed): Likewise.
|
||||
* ifcvt.c (noce_try_cmove_arith, noce_try_abs, noce_operand_ok,
|
||||
noce_process_if_block, find_memory): Likewise.
|
||||
* integrate.c (subst_constants, allocate_initial_values): Likewise.
|
||||
* local-alloc.c (validate_equiv_mem_from_store, memref_referenced_p,
|
||||
update_equiv_regs): Likewise.
|
||||
* loop.c (scan_loop, prescan_loop, note_addr_stored, check_store,
|
||||
maybe_eliminate_biv_1, find_mem_in_note_1): Likewise.
|
||||
* optabs.c (expand_abs, emit_unop_insn): Likewise.
|
||||
* passes.c (rest_of_handle_final): Likewise.
|
||||
* postreload.c (reload_cse_simplify_set, reload_cse_simplify_operands,
|
||||
move2add_note_store): Likewise.
|
||||
* ra-build.c (detect_remat_webs): Likewise.
|
||||
* ra-debug.c (dump_static_insn_cost): Likewise.
|
||||
* ra-rewrite.c (slots_overlap_p, insert_stores): Likewise.
|
||||
* recog.c (validate_change, apply_change_group, cancel_changes,
|
||||
validate_replace_rtx_1, general_operand, register_operand,
|
||||
nonmemory_operand, push_operand, pop_operand, memory_operand,
|
||||
indirect_operand, asm_operand_ok, offsettable_memref_p,
|
||||
offsettable_nonstrict_memref_p, constrain_operands,
|
||||
store_data_bypass_p): Likewise.
|
||||
* reg-stack.c (subst_stack_regs_pat): Likewise.
|
||||
* regclass.c (record_operand_costs, scan_one_insn, record_reg_classes,
|
||||
copy_cost, reg_scan_mark_refs): Likewise.
|
||||
* regmove.c (optimize_reg_copy_3, stack_memref_p,
|
||||
combine_stack_adjustments_for_block): Likewise.
|
||||
* regrename.c (copyprop_hardreg_forward_1): Likewise.
|
||||
* reload.c (can_reload_into, push_reload, decompose, immune_p,
|
||||
find_reloads, find_reloads_address, find_reloads_address_1,
|
||||
reg_overlap_mentioned_for_reload_p, refers_to_mem_for_reload_p,
|
||||
find_equiv_reg): Likewise.
|
||||
* reload1.c (reload, eliminate_regs, eliminate_regs_in_insn,
|
||||
reload_as_needed, choose_reload_regs, emit_input_reload_insns,
|
||||
do_input_reload, emit_reload_insns, gen_reload, delete_output_reload,
|
||||
delete_address_reloads): Likewise.
|
||||
* resource.c (mark_referenced_resources): Likewise.
|
||||
* rtlanal.c (get_jump_table_offset, count_occurrences,
|
||||
reg_referenced_p, reg_set_p, set_of_1, set_noop_p,
|
||||
reg_overlap_mentioned_p, note_uses, replace_regs, nonzero_bits1,
|
||||
num_sign_bit_copies1): Likewise.
|
||||
* rtlhooks.c (gen_lowpart_general): Likewise.
|
||||
* sched-deps.c (sched_analyze_1, sched_analyze_2): Likewise.
|
||||
* sdbout.c (PARM_PASSED_IN_MEMORY, sdbout_symbol,
|
||||
sdbout_toplevel_data, sdbout_parms, sdbout_reg_parms,
|
||||
sdbout_global_decl): Likewise.
|
||||
* simplify-rtx.c (simplify_subreg): Likewise.
|
||||
* stmt.c (expand_asm_operands, expand_expr_stmt_value, expand_decl,
|
||||
expand_anon_union_decl, expand_end_case_type): Likewise.
|
||||
* unroll.c (calculate_giv_inc): Likewise.
|
||||
* var-tracking.c (stack_adjust_offset_pre_post,
|
||||
bb_stack_adjust_offset, track_expr_p, count_uses, add_uses,
|
||||
add_stores, compute_bb_dataflow, vt_get_decl_and_offset,
|
||||
vt_add_function_parameters): Likewise.
|
||||
* varasm.c (make_var_volatile, notice_global_symbol,
|
||||
assemble_external, decode_addr_const, mark_weak,
|
||||
default_encode_section_info): Likewise.
|
||||
|
||||
2004-07-01 Steven Bosscher <stevenb@suse.de>
|
||||
|
||||
* stmt.c (check_seenlabel): Remove.
|
||||
|
|
22
gcc/alias.c
22
gcc/alias.c
|
@ -566,7 +566,7 @@ get_alias_set (tree t)
|
|||
it. This is necessary for C++ anonymous unions, whose component
|
||||
variables don't look like union members (boo!). */
|
||||
if (TREE_CODE (t) == VAR_DECL
|
||||
&& DECL_RTL_SET_P (t) && GET_CODE (DECL_RTL (t)) == MEM)
|
||||
&& DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
|
||||
return MEM_ALIAS_SET (DECL_RTL (t));
|
||||
|
||||
/* Now all we care about is the type. */
|
||||
|
@ -1197,7 +1197,7 @@ canon_rtx (rtx x)
|
|||
the loop optimizer. Note we want to leave the original
|
||||
MEM alone, but need to return the canonicalized MEM with
|
||||
all the flags with their original values. */
|
||||
else if (GET_CODE (x) == MEM)
|
||||
else if (MEM_P (x))
|
||||
x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
|
||||
|
||||
return x;
|
||||
|
@ -1608,7 +1608,7 @@ get_addr (rtx x)
|
|||
if (CONSTANT_P (l->loc))
|
||||
return l->loc;
|
||||
for (l = v->locs; l; l = l->next)
|
||||
if (!REG_P (l->loc) && GET_CODE (l->loc) != MEM)
|
||||
if (!REG_P (l->loc) && !MEM_P (l->loc))
|
||||
return l->loc;
|
||||
if (v->locs)
|
||||
return v->locs->loc;
|
||||
|
@ -2110,7 +2110,7 @@ nonoverlapping_memrefs_p (rtx x, rtx y)
|
|||
/* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
|
||||
can't overlap unless they are the same because we never reuse that part
|
||||
of the stack frame used for locals for spilled pseudos. */
|
||||
if ((GET_CODE (rtlx) != MEM || GET_CODE (rtly) != MEM)
|
||||
if ((!MEM_P (rtlx) || !MEM_P (rtly))
|
||||
&& ! rtx_equal_p (rtlx, rtly))
|
||||
return 1;
|
||||
|
||||
|
@ -2118,11 +2118,11 @@ nonoverlapping_memrefs_p (rtx x, rtx y)
|
|||
know both are and are the same, so use that as the base. The only
|
||||
we can avoid overlap is if we can deduce that they are nonoverlapping
|
||||
pieces of that decl, which is very rare. */
|
||||
basex = GET_CODE (rtlx) == MEM ? XEXP (rtlx, 0) : rtlx;
|
||||
basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
|
||||
if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT)
|
||||
offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
|
||||
|
||||
basey = GET_CODE (rtly) == MEM ? XEXP (rtly, 0) : rtly;
|
||||
basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
|
||||
if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT)
|
||||
offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
|
||||
|
||||
|
@ -2137,10 +2137,10 @@ nonoverlapping_memrefs_p (rtx x, rtx y)
|
|||
|| (CONSTANT_P (basey) && REG_P (basex)
|
||||
&& REGNO_PTR_FRAME_P (REGNO (basex))));
|
||||
|
||||
sizex = (GET_CODE (rtlx) != MEM ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
|
||||
sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
|
||||
: MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
|
||||
: -1);
|
||||
sizey = (GET_CODE (rtly) != MEM ? (int) GET_MODE_SIZE (GET_MODE (rtly))
|
||||
sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
|
||||
: MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
|
||||
-1);
|
||||
|
||||
|
@ -2557,7 +2557,7 @@ nonlocal_referenced_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
|
|||
if (nonlocal_mentioned_p (SET_SRC (x)))
|
||||
return 1;
|
||||
|
||||
if (GET_CODE (SET_DEST (x)) == MEM)
|
||||
if (MEM_P (SET_DEST (x)))
|
||||
return nonlocal_mentioned_p (XEXP (SET_DEST (x), 0));
|
||||
|
||||
/* If the destination is anything other than a CC0, PC,
|
||||
|
@ -2577,7 +2577,7 @@ nonlocal_referenced_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
|
|||
return 0;
|
||||
|
||||
case CLOBBER:
|
||||
if (GET_CODE (XEXP (x, 0)) == MEM)
|
||||
if (MEM_P (XEXP (x, 0)))
|
||||
return nonlocal_mentioned_p (XEXP (XEXP (x, 0), 0));
|
||||
return 0;
|
||||
|
||||
|
@ -2789,7 +2789,7 @@ static bool memory_modified;
|
|||
static void
|
||||
memory_modified_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
|
||||
{
|
||||
if (GET_CODE (x) == MEM)
|
||||
if (MEM_P (x))
|
||||
{
|
||||
if (anti_dependence (x, (rtx)data) || output_dependence (x, (rtx)data))
|
||||
memory_modified = true;
|
||||
|
|
|
@ -965,7 +965,7 @@ expand_builtin_prefetch (tree arglist)
|
|||
op0 = protect_from_queue (op0, 0);
|
||||
/* Don't do anything with direct references to volatile memory, but
|
||||
generate code to handle other side effects. */
|
||||
if (GET_CODE (op0) != MEM && side_effects_p (op0))
|
||||
if (!MEM_P (op0) && side_effects_p (op0))
|
||||
emit_insn (op0);
|
||||
}
|
||||
|
||||
|
@ -5329,7 +5329,7 @@ expand_builtin_profile_func (bool exitp)
|
|||
rtx this, which;
|
||||
|
||||
this = DECL_RTL (current_function_decl);
|
||||
if (GET_CODE (this) == MEM)
|
||||
if (MEM_P (this))
|
||||
this = XEXP (this, 0);
|
||||
else
|
||||
abort ();
|
||||
|
@ -5836,7 +5836,7 @@ expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
|
|||
case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
|
||||
if (arglist != 0
|
||||
|| ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
|
||||
|| GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
|
||||
|| !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
|
||||
return const0_rtx;
|
||||
else
|
||||
return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
|
||||
|
|
16
gcc/calls.c
16
gcc/calls.c
|
@ -2208,7 +2208,7 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
structure_value_addr = expand_expr (return_arg, NULL_RTX,
|
||||
VOIDmode, EXPAND_NORMAL);
|
||||
}
|
||||
else if (target && GET_CODE (target) == MEM)
|
||||
else if (target && MEM_P (target))
|
||||
structure_value_addr = XEXP (target, 0);
|
||||
else
|
||||
{
|
||||
|
@ -3101,7 +3101,7 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
target = const0_rtx;
|
||||
else if (structure_value_addr)
|
||||
{
|
||||
if (target == 0 || GET_CODE (target) != MEM)
|
||||
if (target == 0 || !MEM_P (target))
|
||||
{
|
||||
target
|
||||
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
|
||||
|
@ -3156,7 +3156,7 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
/* If we are setting a MEM, this code must be executed. Since it is
|
||||
emitted after the call insn, sibcall optimization cannot be
|
||||
performed in that case. */
|
||||
if (GET_CODE (target) == MEM)
|
||||
if (MEM_P (target))
|
||||
sibcall_failure = 1;
|
||||
}
|
||||
else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
|
||||
|
@ -3281,7 +3281,7 @@ expand_call (tree exp, rtx target, int ignore)
|
|||
adding to call_fusage before the call to emit_call_1 because TARGET
|
||||
may be modified in the meantime. */
|
||||
if (structure_value_addr != 0 && target != 0
|
||||
&& GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
|
||||
&& MEM_P (target) && RTX_UNCHANGING_P (target))
|
||||
add_function_usage_to
|
||||
(last_call_insn (),
|
||||
gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
|
||||
|
@ -3609,7 +3609,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
|
|||
value = gen_reg_rtx (outmode);
|
||||
#else /* not PCC_STATIC_STRUCT_RETURN */
|
||||
struct_value_size = GET_MODE_SIZE (outmode);
|
||||
if (value != 0 && GET_CODE (value) == MEM)
|
||||
if (value != 0 && MEM_P (value))
|
||||
mem_value = value;
|
||||
else
|
||||
mem_value = assign_temp (tfom, 0, 1, 1);
|
||||
|
@ -3659,7 +3659,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
|
|||
nargs++;
|
||||
|
||||
/* Make sure it is a reasonable operand for a move or push insn. */
|
||||
if (!REG_P (addr) && GET_CODE (addr) != MEM
|
||||
if (!REG_P (addr) && !MEM_P (addr)
|
||||
&& ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
|
||||
addr = force_operand (addr, NULL_RTX);
|
||||
|
||||
|
@ -3705,7 +3705,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
|
|||
either emit_move_insn or emit_push_insn will do that. */
|
||||
|
||||
/* Make sure it is a reasonable operand for a move or push insn. */
|
||||
if (!REG_P (val) && GET_CODE (val) != MEM
|
||||
if (!REG_P (val) && !MEM_P (val)
|
||||
&& ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
|
||||
val = force_operand (val, NULL_RTX);
|
||||
|
||||
|
@ -4530,7 +4530,7 @@ store_one_arg (struct arg_data *arg, rtx argblock, int flags,
|
|||
}
|
||||
}
|
||||
|
||||
if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
|
||||
if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
|
||||
{
|
||||
/* emit_push_insn might not work properly if arg->value and
|
||||
argblock + arg->locate.offset areas overlap. */
|
||||
|
|
|
@ -1234,7 +1234,7 @@ can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
|
|||
are intervening stores. Also, don't move a volatile asm or
|
||||
UNSPEC_VOLATILE across any other insns. */
|
||||
|| (! all_adjacent
|
||||
&& (((GET_CODE (src) != MEM
|
||||
&& (((!MEM_P (src)
|
||||
|| ! find_reg_note (insn, REG_EQUIV, src))
|
||||
&& use_crosses_set_p (src, INSN_CUID (insn)))
|
||||
|| (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
|
||||
|
@ -1433,7 +1433,7 @@ combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
|
|||
into the address of a MEM, so only prevent the combination if
|
||||
i1 or i2 set the same MEM. */
|
||||
if ((inner_dest != dest &&
|
||||
(GET_CODE (inner_dest) != MEM
|
||||
(!MEM_P (inner_dest)
|
||||
|| rtx_equal_p (i2dest, inner_dest)
|
||||
|| (i1dest && rtx_equal_p (i1dest, inner_dest)))
|
||||
&& (reg_overlap_mentioned_p (i2dest, inner_dest)
|
||||
|
@ -1914,7 +1914,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
|
|||
#if 0
|
||||
if (!(GET_CODE (PATTERN (i3)) == SET
|
||||
&& REG_P (SET_SRC (PATTERN (i3)))
|
||||
&& GET_CODE (SET_DEST (PATTERN (i3))) == MEM
|
||||
&& MEM_P (SET_DEST (PATTERN (i3)))
|
||||
&& (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
|
||||
|| GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
|
||||
/* It's not the exception. */
|
||||
|
@ -2414,7 +2414,7 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
|
|||
#ifdef INSN_SCHEDULING
|
||||
/* If *SPLIT is a paradoxical SUBREG, when we split it, it should
|
||||
be written as a ZERO_EXTEND. */
|
||||
if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
|
||||
if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
|
||||
{
|
||||
#ifdef LOAD_EXTEND_OP
|
||||
/* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
|
||||
|
@ -3046,7 +3046,7 @@ find_split_point (rtx *loc, rtx insn)
|
|||
#ifdef INSN_SCHEDULING
|
||||
/* If we are making a paradoxical SUBREG invalid, it becomes a split
|
||||
point. */
|
||||
if (GET_CODE (SUBREG_REG (x)) == MEM)
|
||||
if (MEM_P (SUBREG_REG (x)))
|
||||
return loc;
|
||||
#endif
|
||||
return find_split_point (&SUBREG_REG (x), insn);
|
||||
|
@ -3995,7 +3995,7 @@ combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
|
|||
|
||||
/* Don't change the mode of the MEM if that would change the meaning
|
||||
of the address. */
|
||||
if (GET_CODE (SUBREG_REG (x)) == MEM
|
||||
if (MEM_P (SUBREG_REG (x))
|
||||
&& (MEM_VOLATILE_P (SUBREG_REG (x))
|
||||
|| mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
|
||||
return gen_rtx_CLOBBER (mode, const0_rtx);
|
||||
|
@ -5358,7 +5358,7 @@ simplify_set (rtx x)
|
|||
&& SUBREG_BYTE (src) == 0
|
||||
&& (GET_MODE_SIZE (GET_MODE (src))
|
||||
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
|
||||
&& GET_CODE (SUBREG_REG (src)) == MEM)
|
||||
&& MEM_P (SUBREG_REG (src)))
|
||||
{
|
||||
SUBST (SET_SRC (x),
|
||||
gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
|
||||
|
@ -6139,7 +6139,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
The subreg adds or removes high bits; its mode is
|
||||
irrelevant to the meaning of this extraction,
|
||||
since POS and LEN count from the lsb. */
|
||||
if (GET_CODE (SUBREG_REG (inner)) == MEM)
|
||||
if (MEM_P (SUBREG_REG (inner)))
|
||||
is_mode = GET_MODE (SUBREG_REG (inner));
|
||||
inner = SUBREG_REG (inner);
|
||||
}
|
||||
|
@ -6180,11 +6180,11 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
if (tmode != BLKmode
|
||||
&& ! (spans_byte && inner_mode != tmode)
|
||||
&& ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
|
||||
&& GET_CODE (inner) != MEM
|
||||
&& !MEM_P (inner)
|
||||
&& (! in_dest
|
||||
|| (REG_P (inner)
|
||||
&& have_insn_for (STRICT_LOW_PART, tmode))))
|
||||
|| (GET_CODE (inner) == MEM && pos_rtx == 0
|
||||
|| (MEM_P (inner) && pos_rtx == 0
|
||||
&& (pos
|
||||
% (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
|
||||
: BITS_PER_UNIT)) == 0
|
||||
|
@ -6202,7 +6202,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
If INNER is not a MEM, get a piece consisting of just the field
|
||||
of interest (in this case POS % BITS_PER_WORD must be 0). */
|
||||
|
||||
if (GET_CODE (inner) == MEM)
|
||||
if (MEM_P (inner))
|
||||
{
|
||||
HOST_WIDE_INT offset;
|
||||
|
||||
|
@ -6261,7 +6261,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
make a STRICT_LOW_PART unless we made a MEM. */
|
||||
|
||||
if (in_dest)
|
||||
return (GET_CODE (new) == MEM ? new
|
||||
return (MEM_P (new) ? new
|
||||
: (GET_CODE (new) != SUBREG
|
||||
? gen_rtx_CLOBBER (tmode, const0_rtx)
|
||||
: gen_rtx_STRICT_LOW_PART (VOIDmode, new)));
|
||||
|
@ -6312,7 +6312,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
length is not 1. In all other cases, we would only be going outside
|
||||
our object in cases when an original shift would have been
|
||||
undefined. */
|
||||
if (! spans_byte && GET_CODE (inner) == MEM
|
||||
if (! spans_byte && MEM_P (inner)
|
||||
&& ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
|
||||
|| (pos_rtx != 0 && len != 1)))
|
||||
return 0;
|
||||
|
@ -6355,7 +6355,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
/* If this is not from memory, the desired mode is wanted_inner_reg_mode;
|
||||
if we have to change the mode of memory and cannot, the desired mode is
|
||||
EXTRACTION_MODE. */
|
||||
if (GET_CODE (inner) != MEM)
|
||||
if (!MEM_P (inner))
|
||||
wanted_inner_mode = wanted_inner_reg_mode;
|
||||
else if (inner_mode != wanted_inner_mode
|
||||
&& (mode_dependent_address_p (XEXP (inner, 0))
|
||||
|
@ -6373,7 +6373,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
If it's a MEM we need to recompute POS relative to that.
|
||||
However, if we're extracting from (or inserting into) a register,
|
||||
we want to recompute POS relative to wanted_inner_mode. */
|
||||
int width = (GET_CODE (inner) == MEM
|
||||
int width = (MEM_P (inner)
|
||||
? GET_MODE_BITSIZE (is_mode)
|
||||
: GET_MODE_BITSIZE (wanted_inner_mode));
|
||||
|
||||
|
@ -6383,7 +6383,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
pos_rtx
|
||||
= gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
|
||||
/* POS may be less than 0 now, but we check for that below.
|
||||
Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
|
||||
Note that it can only be less than 0 if !MEM_P (inner). */
|
||||
}
|
||||
|
||||
/* If INNER has a wider mode, make it smaller. If this is a constant
|
||||
|
@ -6391,7 +6391,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
the value. */
|
||||
if (wanted_inner_mode != VOIDmode
|
||||
&& GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
|
||||
&& ((GET_CODE (inner) == MEM
|
||||
&& ((MEM_P (inner)
|
||||
&& (inner_mode == wanted_inner_mode
|
||||
|| (! mode_dependent_address_p (XEXP (inner, 0))
|
||||
&& ! MEM_VOLATILE_P (inner))))))
|
||||
|
@ -6429,7 +6429,7 @@ make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
|
|||
/* If INNER is not memory, we can always get it into the proper mode. If we
|
||||
are changing its mode, POS must be a constant and smaller than the size
|
||||
of the new mode. */
|
||||
else if (GET_CODE (inner) != MEM)
|
||||
else if (!MEM_P (inner))
|
||||
{
|
||||
if (GET_MODE (inner) != wanted_inner_mode
|
||||
&& (pos_rtx != 0
|
||||
|
@ -7771,14 +7771,14 @@ rtx_equal_for_field_assignment_p (rtx x, rtx y)
|
|||
/* Check for a paradoxical SUBREG of a MEM compared with the MEM.
|
||||
Note that all SUBREGs of MEM are paradoxical; otherwise they
|
||||
would have been rewritten. */
|
||||
if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
|
||||
&& GET_CODE (SUBREG_REG (y)) == MEM
|
||||
if (MEM_P (x) && GET_CODE (y) == SUBREG
|
||||
&& MEM_P (SUBREG_REG (y))
|
||||
&& rtx_equal_p (SUBREG_REG (y),
|
||||
gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
|
||||
return 1;
|
||||
|
||||
if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
|
||||
&& GET_CODE (SUBREG_REG (x)) == MEM
|
||||
if (MEM_P (y) && GET_CODE (x) == SUBREG
|
||||
&& MEM_P (SUBREG_REG (x))
|
||||
&& rtx_equal_p (SUBREG_REG (x),
|
||||
gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
|
||||
return 1;
|
||||
|
@ -9348,7 +9348,7 @@ gen_lowpart_for_combine (enum machine_mode mode, rtx x)
|
|||
/* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
|
||||
won't know what to do. So we will strip off the SUBREG here and
|
||||
process normally. */
|
||||
if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
|
||||
if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
|
||||
{
|
||||
x = SUBREG_REG (x);
|
||||
if (GET_MODE (x) == mode)
|
||||
|
@ -9369,7 +9369,7 @@ gen_lowpart_for_combine (enum machine_mode mode, rtx x)
|
|||
if (result)
|
||||
return result;
|
||||
|
||||
if (GET_CODE (x) == MEM)
|
||||
if (MEM_P (x))
|
||||
{
|
||||
int offset = 0;
|
||||
|
||||
|
@ -10899,7 +10899,7 @@ record_dead_and_set_regs_1 (rtx dest, rtx setter, void *data)
|
|||
else
|
||||
record_value_for_reg (dest, record_dead_insn, NULL_RTX);
|
||||
}
|
||||
else if (GET_CODE (dest) == MEM
|
||||
else if (MEM_P (dest)
|
||||
/* Ignore pushes, they clobber nothing. */
|
||||
&& ! push_operand (dest, GET_MODE (dest)))
|
||||
mem_last_set = INSN_CUID (record_dead_insn);
|
||||
|
@ -11088,7 +11088,7 @@ get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
|
|||
/* If this is a memory reference, make sure that there were
|
||||
no stores after it that might have clobbered the value. We don't
|
||||
have alias info, so we assume any store invalidates it. */
|
||||
else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
|
||||
else if (MEM_P (x) && ! RTX_UNCHANGING_P (x)
|
||||
&& INSN_CUID (insn) <= mem_last_set)
|
||||
{
|
||||
if (replace)
|
||||
|
@ -11384,7 +11384,7 @@ mark_used_regs_combine (rtx x)
|
|||
case CLOBBER:
|
||||
/* If we are clobbering a MEM, mark any hard registers inside the
|
||||
address as used. */
|
||||
if (GET_CODE (XEXP (x, 0)) == MEM)
|
||||
if (MEM_P (XEXP (x, 0)))
|
||||
mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
|
||||
return;
|
||||
|
||||
|
@ -11425,7 +11425,7 @@ mark_used_regs_combine (rtx x)
|
|||
|| GET_CODE (testreg) == STRICT_LOW_PART)
|
||||
testreg = XEXP (testreg, 0);
|
||||
|
||||
if (GET_CODE (testreg) == MEM)
|
||||
if (MEM_P (testreg))
|
||||
mark_used_regs_combine (XEXP (testreg, 0));
|
||||
|
||||
mark_used_regs_combine (SET_SRC (x));
|
||||
|
@ -11624,7 +11624,7 @@ move_deaths (rtx x, rtx maybe_kill_insn, int from_cuid, rtx to_insn,
|
|||
For a REG (the only other possibility), the entire value is
|
||||
being replaced so the old value is not used in this insn. */
|
||||
|
||||
if (GET_CODE (dest) == MEM)
|
||||
if (MEM_P (dest))
|
||||
move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
|
||||
to_insn, pnotes);
|
||||
return;
|
||||
|
@ -12356,7 +12356,7 @@ unmentioned_reg_p_1 (rtx *loc, void *expr)
|
|||
rtx x = *loc;
|
||||
|
||||
if (x != NULL_RTX
|
||||
&& (REG_P (x) || GET_CODE (x) == MEM)
|
||||
&& (REG_P (x) || MEM_P (x))
|
||||
&& ! reg_mentioned_p (x, (rtx) expr))
|
||||
return 1;
|
||||
return 0;
|
||||
|
|
44
gcc/cse.c
44
gcc/cse.c
|
@ -1689,7 +1689,7 @@ static int
|
|||
check_dependence (rtx *x, void *data)
|
||||
{
|
||||
struct check_dependence_data *d = (struct check_dependence_data *) data;
|
||||
if (*x && GET_CODE (*x) == MEM)
|
||||
if (*x && MEM_P (*x))
|
||||
return canon_true_dependence (d->exp, d->mode, d->addr, *x,
|
||||
cse_rtx_varies_p);
|
||||
else
|
||||
|
@ -2246,7 +2246,7 @@ canon_hash (rtx x, enum machine_mode mode)
|
|||
handling since the MEM may be BLKmode which normally
|
||||
prevents an entry from being made. Pure calls are
|
||||
marked by a USE which mentions BLKmode memory. */
|
||||
if (GET_CODE (XEXP (x, 0)) == MEM
|
||||
if (MEM_P (XEXP (x, 0))
|
||||
&& ! MEM_VOLATILE_P (XEXP (x, 0)))
|
||||
{
|
||||
hash += (unsigned) USE;
|
||||
|
@ -4195,7 +4195,7 @@ equiv_constant (rtx x)
|
|||
is a constant-pool reference. Then try to look it up in the hash table
|
||||
in case it is something whose value we have seen before. */
|
||||
|
||||
if (GET_CODE (x) == MEM)
|
||||
if (MEM_P (x))
|
||||
{
|
||||
struct table_elt *elt;
|
||||
|
||||
|
@ -4231,7 +4231,7 @@ gen_lowpart_if_possible (enum machine_mode mode, rtx x)
|
|||
|
||||
if (result)
|
||||
return result;
|
||||
else if (GET_CODE (x) == MEM)
|
||||
else if (MEM_P (x))
|
||||
{
|
||||
/* This is the only other case we handle. */
|
||||
int offset = 0;
|
||||
|
@ -4700,7 +4700,7 @@ cse_insn (rtx insn, rtx libcall_insn)
|
|||
/* If we clobber memory, canon the address.
|
||||
This does nothing when a register is clobbered
|
||||
because we have already invalidated the reg. */
|
||||
if (GET_CODE (XEXP (y, 0)) == MEM)
|
||||
if (MEM_P (XEXP (y, 0)))
|
||||
canon_reg (XEXP (y, 0), NULL_RTX);
|
||||
}
|
||||
else if (GET_CODE (y) == USE
|
||||
|
@ -4719,7 +4719,7 @@ cse_insn (rtx insn, rtx libcall_insn)
|
|||
}
|
||||
else if (GET_CODE (x) == CLOBBER)
|
||||
{
|
||||
if (GET_CODE (XEXP (x, 0)) == MEM)
|
||||
if (MEM_P (XEXP (x, 0)))
|
||||
canon_reg (XEXP (x, 0), NULL_RTX);
|
||||
}
|
||||
|
||||
|
@ -4789,7 +4789,7 @@ cse_insn (rtx insn, rtx libcall_insn)
|
|||
|| GET_CODE (dest) == SIGN_EXTRACT)
|
||||
dest = XEXP (dest, 0);
|
||||
|
||||
if (GET_CODE (dest) == MEM)
|
||||
if (MEM_P (dest))
|
||||
canon_reg (dest, insn);
|
||||
}
|
||||
|
||||
|
@ -4916,7 +4916,7 @@ cse_insn (rtx insn, rtx libcall_insn)
|
|||
RTL would be referring to SRC, so we don't lose any optimization
|
||||
opportunities by not having SRC in the hash table. */
|
||||
|
||||
if (GET_CODE (src) == MEM
|
||||
if (MEM_P (src)
|
||||
&& find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
|
||||
&& REG_P (dest)
|
||||
&& REGNO (dest) >= FIRST_PSEUDO_REGISTER)
|
||||
|
@ -5130,7 +5130,7 @@ cse_insn (rtx insn, rtx libcall_insn)
|
|||
if (flag_expensive_optimizations && src_related == 0
|
||||
&& (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
|
||||
&& GET_MODE_CLASS (mode) == MODE_INT
|
||||
&& GET_CODE (src) == MEM && ! do_not_record
|
||||
&& MEM_P (src) && ! do_not_record
|
||||
&& LOAD_EXTEND_OP (mode) != NIL)
|
||||
{
|
||||
enum machine_mode tmode;
|
||||
|
@ -5391,7 +5391,7 @@ cse_insn (rtx insn, rtx libcall_insn)
|
|||
if (libcall_insn
|
||||
&& (REG_P (sets[i].orig_src)
|
||||
|| GET_CODE (sets[i].orig_src) == SUBREG
|
||||
|| GET_CODE (sets[i].orig_src) == MEM))
|
||||
|| MEM_P (sets[i].orig_src)))
|
||||
{
|
||||
rtx note = find_reg_equal_equiv_note (libcall_insn);
|
||||
if (note != 0)
|
||||
|
@ -5426,7 +5426,7 @@ cse_insn (rtx insn, rtx libcall_insn)
|
|||
&& GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
|
||||
&& GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
|
||||
&& (src_folded == 0
|
||||
|| (GET_CODE (src_folded) != MEM
|
||||
|| (!MEM_P (src_folded)
|
||||
&& ! src_folded_force_flag))
|
||||
&& GET_MODE_CLASS (mode) != MODE_CC
|
||||
&& mode != VOIDmode)
|
||||
|
@ -5542,7 +5542,7 @@ cse_insn (rtx insn, rtx libcall_insn)
|
|||
|
||||
sets[i].inner_dest = dest;
|
||||
|
||||
if (GET_CODE (dest) == MEM)
|
||||
if (MEM_P (dest))
|
||||
{
|
||||
#ifdef PUSH_ROUNDING
|
||||
/* Stack pushes invalidate the stack pointer. */
|
||||
|
@ -5658,7 +5658,7 @@ cse_insn (rtx insn, rtx libcall_insn)
|
|||
{
|
||||
if (REG_P (dest) || GET_CODE (dest) == SUBREG)
|
||||
invalidate (dest, VOIDmode);
|
||||
else if (GET_CODE (dest) == MEM)
|
||||
else if (MEM_P (dest))
|
||||
{
|
||||
/* Outgoing arguments for a libcall don't
|
||||
affect any recorded expressions. */
|
||||
|
@ -5831,7 +5831,7 @@ cse_insn (rtx insn, rtx libcall_insn)
|
|||
we have just done an invalidate_memory that covers even those. */
|
||||
if (REG_P (dest) || GET_CODE (dest) == SUBREG)
|
||||
invalidate (dest, VOIDmode);
|
||||
else if (GET_CODE (dest) == MEM)
|
||||
else if (MEM_P (dest))
|
||||
{
|
||||
/* Outgoing arguments for a libcall don't
|
||||
affect any recorded expressions. */
|
||||
|
@ -5931,7 +5931,7 @@ cse_insn (rtx insn, rtx libcall_insn)
|
|||
floating-point values in registers that might be wider than
|
||||
memory. */
|
||||
if ((flag_float_store
|
||||
&& GET_CODE (dest) == MEM
|
||||
&& MEM_P (dest)
|
||||
&& FLOAT_MODE_P (GET_MODE (dest)))
|
||||
/* Don't record BLKmode values, because we don't know the
|
||||
size of it, and can't be sure that other BLKmode values
|
||||
|
@ -5973,7 +5973,7 @@ cse_insn (rtx insn, rtx libcall_insn)
|
|||
sets[i].dest_hash = HASH (dest, GET_MODE (dest));
|
||||
}
|
||||
|
||||
if (GET_CODE (inner_dest) == MEM
|
||||
if (MEM_P (inner_dest)
|
||||
&& GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
|
||||
/* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
|
||||
that (MEM (ADDRESSOF (X))) is equivalent to Y.
|
||||
|
@ -5985,7 +5985,7 @@ cse_insn (rtx insn, rtx libcall_insn)
|
|||
elt = insert (dest, sets[i].src_elt,
|
||||
sets[i].dest_hash, GET_MODE (dest));
|
||||
|
||||
elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
|
||||
elt->in_memory = (MEM_P (sets[i].inner_dest)
|
||||
&& (! RTX_UNCHANGING_P (sets[i].inner_dest)
|
||||
|| fixed_base_plus_p (XEXP (sets[i].inner_dest,
|
||||
0))));
|
||||
|
@ -6248,7 +6248,7 @@ invalidate_from_clobbers (rtx x)
|
|||
if (ref)
|
||||
{
|
||||
if (REG_P (ref) || GET_CODE (ref) == SUBREG
|
||||
|| GET_CODE (ref) == MEM)
|
||||
|| MEM_P (ref))
|
||||
invalidate (ref, VOIDmode);
|
||||
else if (GET_CODE (ref) == STRICT_LOW_PART
|
||||
|| GET_CODE (ref) == ZERO_EXTRACT)
|
||||
|
@ -6265,7 +6265,7 @@ invalidate_from_clobbers (rtx x)
|
|||
{
|
||||
rtx ref = XEXP (y, 0);
|
||||
if (REG_P (ref) || GET_CODE (ref) == SUBREG
|
||||
|| GET_CODE (ref) == MEM)
|
||||
|| MEM_P (ref))
|
||||
invalidate (ref, VOIDmode);
|
||||
else if (GET_CODE (ref) == STRICT_LOW_PART
|
||||
|| GET_CODE (ref) == ZERO_EXTRACT)
|
||||
|
@ -6517,7 +6517,7 @@ cse_check_loop_start (rtx x, rtx set ATTRIBUTE_UNUSED, void *data)
|
|||
|| GET_CODE (x) == CC0 || GET_CODE (x) == PC)
|
||||
return;
|
||||
|
||||
if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
|
||||
if ((MEM_P (x) && MEM_P (*cse_check_loop_start_value))
|
||||
|| reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
|
||||
*cse_check_loop_start_value = NULL_RTX;
|
||||
}
|
||||
|
@ -6637,7 +6637,7 @@ cse_set_around_loop (rtx x, rtx insn, rtx loop_start)
|
|||
/* See comment on similar code in cse_insn for explanation of these
|
||||
tests. */
|
||||
if (REG_P (SET_DEST (x)) || GET_CODE (SET_DEST (x)) == SUBREG
|
||||
|| GET_CODE (SET_DEST (x)) == MEM)
|
||||
|| MEM_P (SET_DEST (x)))
|
||||
invalidate (SET_DEST (x), VOIDmode);
|
||||
else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
|
||||
|| GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
|
||||
|
@ -7303,7 +7303,7 @@ count_reg_usage (rtx x, int *counts, int incr)
|
|||
case CLOBBER:
|
||||
/* If we are clobbering a MEM, mark any registers inside the address
|
||||
as being used. */
|
||||
if (GET_CODE (XEXP (x, 0)) == MEM)
|
||||
if (MEM_P (XEXP (x, 0)))
|
||||
count_reg_usage (XEXP (XEXP (x, 0), 0), counts, incr);
|
||||
return;
|
||||
|
||||
|
|
22
gcc/cselib.c
22
gcc/cselib.c
|
@ -434,7 +434,7 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
|
|||
rtx t = l->loc;
|
||||
|
||||
/* Avoid infinite recursion. */
|
||||
if (REG_P (t) || GET_CODE (t) == MEM)
|
||||
if (REG_P (t) || MEM_P (t))
|
||||
continue;
|
||||
else if (rtx_equal_for_cselib_p (t, y))
|
||||
return 1;
|
||||
|
@ -452,7 +452,7 @@ rtx_equal_for_cselib_p (rtx x, rtx y)
|
|||
{
|
||||
rtx t = l->loc;
|
||||
|
||||
if (REG_P (t) || GET_CODE (t) == MEM)
|
||||
if (REG_P (t) || MEM_P (t))
|
||||
continue;
|
||||
else if (rtx_equal_for_cselib_p (x, t))
|
||||
return 1;
|
||||
|
@ -720,7 +720,7 @@ add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
|
|||
|
||||
/* Avoid duplicates. */
|
||||
for (l = mem_elt->locs; l; l = l->next)
|
||||
if (GET_CODE (l->loc) == MEM
|
||||
if (MEM_P (l->loc)
|
||||
&& CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
|
||||
return;
|
||||
|
||||
|
@ -923,7 +923,7 @@ cselib_lookup (rtx x, enum machine_mode mode, int create)
|
|||
return e;
|
||||
}
|
||||
|
||||
if (GET_CODE (x) == MEM)
|
||||
if (MEM_P (x))
|
||||
return cselib_lookup_mem (x, create);
|
||||
|
||||
hashval = hash_rtx (x, mode, create);
|
||||
|
@ -1085,7 +1085,7 @@ cselib_invalidate_mem (rtx mem_rtx)
|
|||
|
||||
/* MEMs may occur in locations only at the top level; below
|
||||
that every MEM or REG is substituted by its VALUE. */
|
||||
if (GET_CODE (x) != MEM)
|
||||
if (!MEM_P (x))
|
||||
{
|
||||
p = &(*p)->next;
|
||||
continue;
|
||||
|
@ -1148,7 +1148,7 @@ cselib_invalidate_rtx (rtx dest, rtx ignore ATTRIBUTE_UNUSED,
|
|||
|
||||
if (REG_P (dest))
|
||||
cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
|
||||
else if (GET_CODE (dest) == MEM)
|
||||
else if (MEM_P (dest))
|
||||
cselib_invalidate_mem (dest);
|
||||
|
||||
/* Some machines don't define AUTO_INC_DEC, but they still use push
|
||||
|
@ -1199,7 +1199,7 @@ cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
|
|||
n_useless_values--;
|
||||
src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
|
||||
}
|
||||
else if (GET_CODE (dest) == MEM && dest_addr_elt != 0
|
||||
else if (MEM_P (dest) && dest_addr_elt != 0
|
||||
&& cselib_record_memory)
|
||||
{
|
||||
if (src_elt->locs == 0)
|
||||
|
@ -1275,13 +1275,13 @@ cselib_record_sets (rtx insn)
|
|||
|
||||
/* We don't know how to record anything but REG or MEM. */
|
||||
if (REG_P (dest)
|
||||
|| (GET_CODE (dest) == MEM && cselib_record_memory))
|
||||
|| (MEM_P (dest) && cselib_record_memory))
|
||||
{
|
||||
rtx src = sets[i].src;
|
||||
if (cond)
|
||||
src = gen_rtx_IF_THEN_ELSE (GET_MODE (src), cond, src, dest);
|
||||
sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
|
||||
if (GET_CODE (dest) == MEM)
|
||||
if (MEM_P (dest))
|
||||
sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), Pmode, 1);
|
||||
else
|
||||
sets[i].dest_addr_elt = 0;
|
||||
|
@ -1303,7 +1303,7 @@ cselib_record_sets (rtx insn)
|
|||
for (i = 0; i < n_sets; i++)
|
||||
{
|
||||
rtx dest = sets[i].dest;
|
||||
if (REG_P (dest) || GET_CODE (dest) == MEM)
|
||||
if (REG_P (dest) || MEM_P (dest))
|
||||
{
|
||||
int j;
|
||||
for (j = i + 1; j < n_sets; j++)
|
||||
|
@ -1321,7 +1321,7 @@ cselib_record_sets (rtx insn)
|
|||
{
|
||||
rtx dest = sets[i].dest;
|
||||
if (REG_P (dest)
|
||||
|| (GET_CODE (dest) == MEM && cselib_record_memory))
|
||||
|| (MEM_P (dest) && cselib_record_memory))
|
||||
cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
|
||||
}
|
||||
}
|
||||
|
|
26
gcc/dbxout.c
26
gcc/dbxout.c
|
@ -276,7 +276,7 @@ static const char *cwd;
|
|||
/* 1 if PARM is passed to this function in memory. */
|
||||
|
||||
#define PARM_PASSED_IN_MEMORY(PARM) \
|
||||
(GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
|
||||
(MEM_P (DECL_INCOMING_RTL (PARM)))
|
||||
|
||||
/* A C expression for the integer offset value of an automatic variable
|
||||
(N_LSYM) having address X (an RTX). */
|
||||
|
@ -2169,7 +2169,7 @@ dbxout_symbol (tree decl, int local ATTRIBUTE_UNUSED)
|
|||
context = decl_function_context (decl);
|
||||
if (context == current_function_decl)
|
||||
break;
|
||||
if (GET_CODE (DECL_RTL (decl)) != MEM
|
||||
if (!MEM_P (DECL_RTL (decl))
|
||||
|| GET_CODE (XEXP (DECL_RTL (decl), 0)) != SYMBOL_REF)
|
||||
break;
|
||||
FORCE_TEXT;
|
||||
|
@ -2473,7 +2473,7 @@ dbxout_symbol_location (tree decl, tree type, const char *suffix, rtx home)
|
|||
no letter at all, and N_LSYM, for auto variable,
|
||||
r and N_RSYM for register variable. */
|
||||
|
||||
if (GET_CODE (home) == MEM
|
||||
if (MEM_P (home)
|
||||
&& GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
|
||||
{
|
||||
if (TREE_PUBLIC (decl))
|
||||
|
@ -2546,8 +2546,8 @@ dbxout_symbol_location (tree decl, tree type, const char *suffix, rtx home)
|
|||
current_sym_code = N_RSYM;
|
||||
current_sym_value = DBX_REGISTER_NUMBER (regno);
|
||||
}
|
||||
else if (GET_CODE (home) == MEM
|
||||
&& (GET_CODE (XEXP (home, 0)) == MEM
|
||||
else if (MEM_P (home)
|
||||
&& (MEM_P (XEXP (home, 0))
|
||||
|| (REG_P (XEXP (home, 0))
|
||||
&& REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
|
||||
&& REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
|
||||
|
@ -2586,13 +2586,13 @@ dbxout_symbol_location (tree decl, tree type, const char *suffix, rtx home)
|
|||
type = make_node (POINTER_TYPE);
|
||||
TREE_TYPE (type) = TREE_TYPE (decl);
|
||||
}
|
||||
else if (GET_CODE (home) == MEM
|
||||
else if (MEM_P (home)
|
||||
&& REG_P (XEXP (home, 0)))
|
||||
{
|
||||
current_sym_code = N_LSYM;
|
||||
current_sym_value = DEBUGGER_AUTO_OFFSET (XEXP (home, 0));
|
||||
}
|
||||
else if (GET_CODE (home) == MEM
|
||||
else if (MEM_P (home)
|
||||
&& GET_CODE (XEXP (home, 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
|
||||
{
|
||||
|
@ -2601,7 +2601,7 @@ dbxout_symbol_location (tree decl, tree type, const char *suffix, rtx home)
|
|||
We want the value of that CONST_INT. */
|
||||
current_sym_value = DEBUGGER_AUTO_OFFSET (XEXP (home, 0));
|
||||
}
|
||||
else if (GET_CODE (home) == MEM
|
||||
else if (MEM_P (home)
|
||||
&& GET_CODE (XEXP (home, 0)) == CONST)
|
||||
{
|
||||
/* Handle an obscure case which can arise when optimizing and
|
||||
|
@ -2892,7 +2892,7 @@ dbxout_parms (tree parms)
|
|||
dbxout_type (parm_type, 0);
|
||||
dbxout_finish_symbol (parms);
|
||||
}
|
||||
else if (GET_CODE (DECL_RTL (parms)) == MEM
|
||||
else if (MEM_P (DECL_RTL (parms))
|
||||
&& REG_P (XEXP (DECL_RTL (parms), 0))
|
||||
&& REGNO (XEXP (DECL_RTL (parms), 0)) != HARD_FRAME_POINTER_REGNUM
|
||||
&& REGNO (XEXP (DECL_RTL (parms), 0)) != STACK_POINTER_REGNUM
|
||||
|
@ -2946,8 +2946,8 @@ dbxout_parms (tree parms)
|
|||
dbxout_type (TREE_TYPE (parms), 0);
|
||||
dbxout_finish_symbol (parms);
|
||||
}
|
||||
else if (GET_CODE (DECL_RTL (parms)) == MEM
|
||||
&& GET_CODE (XEXP (DECL_RTL (parms), 0)) == MEM)
|
||||
else if (MEM_P (DECL_RTL (parms))
|
||||
&& MEM_P (XEXP (DECL_RTL (parms), 0)))
|
||||
{
|
||||
/* Parm was passed via invisible reference, with the reference
|
||||
living on the stack. DECL_RTL looks like
|
||||
|
@ -2973,7 +2973,7 @@ dbxout_parms (tree parms)
|
|||
dbxout_type (TREE_TYPE (parms), 0);
|
||||
dbxout_finish_symbol (parms);
|
||||
}
|
||||
else if (GET_CODE (DECL_RTL (parms)) == MEM
|
||||
else if (MEM_P (DECL_RTL (parms))
|
||||
&& XEXP (DECL_RTL (parms), 0) != const0_rtx
|
||||
/* ??? A constant address for a parm can happen
|
||||
when the reg it lives in is equiv to a constant in memory.
|
||||
|
@ -3064,7 +3064,7 @@ dbxout_reg_parms (tree parms)
|
|||
dbxout_symbol_location (parms, TREE_TYPE (parms),
|
||||
0, DECL_RTL (parms));
|
||||
/* Report parms that live in memory but not where they were passed. */
|
||||
else if (GET_CODE (DECL_RTL (parms)) == MEM
|
||||
else if (MEM_P (DECL_RTL (parms))
|
||||
&& ! rtx_equal_p (DECL_RTL (parms), DECL_INCOMING_RTL (parms)))
|
||||
dbxout_symbol_location (parms, TREE_TYPE (parms),
|
||||
0, DECL_RTL (parms));
|
||||
|
|
2
gcc/df.c
2
gcc/df.c
|
@ -985,7 +985,7 @@ df_uses_record (struct df *df, rtx *loc, enum df_ref_type ref_type,
|
|||
case CLOBBER:
|
||||
/* If we are clobbering a MEM, mark any registers inside the address
|
||||
as being used. */
|
||||
if (GET_CODE (XEXP (x, 0)) == MEM)
|
||||
if (MEM_P (XEXP (x, 0)))
|
||||
df_uses_record (df, &XEXP (XEXP (x, 0), 0),
|
||||
DF_REF_REG_MEM_STORE, bb, insn, flags);
|
||||
|
||||
|
|
|
@ -622,7 +622,7 @@ do_jump (tree exp, rtx if_false_label, rtx if_true_label)
|
|||
sequences. */
|
||||
/* Copy to register to avoid generating bad insns by cse
|
||||
from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
|
||||
if (!cse_not_expected && GET_CODE (temp) == MEM)
|
||||
if (!cse_not_expected && MEM_P (temp))
|
||||
temp = copy_to_reg (temp);
|
||||
#endif
|
||||
do_pending_stack_adjust ();
|
||||
|
|
|
@ -1000,7 +1000,7 @@ stack_adjust_offset (rtx pattern)
|
|||
if (code == PLUS)
|
||||
offset = -offset;
|
||||
}
|
||||
else if (GET_CODE (dest) == MEM)
|
||||
else if (MEM_P (dest))
|
||||
{
|
||||
/* (set (mem (pre_dec (reg sp))) (foo)) */
|
||||
src = XEXP (dest, 0);
|
||||
|
@ -8735,7 +8735,7 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, bool can_use_fbreg)
|
|||
case ADDRESSOF:
|
||||
/* If this is a MEM, return its address. Otherwise, we can't
|
||||
represent this. */
|
||||
if (GET_CODE (XEXP (rtl, 0)) == MEM)
|
||||
if (MEM_P (XEXP (rtl, 0)))
|
||||
return mem_loc_descriptor (XEXP (XEXP (rtl, 0), 0), mode,
|
||||
can_use_fbreg);
|
||||
else
|
||||
|
@ -8927,7 +8927,7 @@ loc_descriptor_from_tree (tree loc, int addressp)
|
|||
if (rtl == NULL_RTX)
|
||||
return 0;
|
||||
|
||||
if (GET_CODE (rtl) != MEM)
|
||||
if (!MEM_P (rtl))
|
||||
return 0;
|
||||
rtl = XEXP (rtl, 0);
|
||||
if (! CONSTANT_P (rtl))
|
||||
|
@ -8963,7 +8963,7 @@ loc_descriptor_from_tree (tree loc, int addressp)
|
|||
{
|
||||
enum machine_mode mode = GET_MODE (rtl);
|
||||
|
||||
if (GET_CODE (rtl) == MEM)
|
||||
if (MEM_P (rtl))
|
||||
{
|
||||
indirect_p = 1;
|
||||
rtl = XEXP (rtl, 0);
|
||||
|
@ -9045,7 +9045,7 @@ loc_descriptor_from_tree (tree loc, int addressp)
|
|||
rtx rtl = lookup_constant_def (loc);
|
||||
enum machine_mode mode;
|
||||
|
||||
if (GET_CODE (rtl) != MEM)
|
||||
if (!MEM_P (rtl))
|
||||
return 0;
|
||||
mode = GET_MODE (rtl);
|
||||
rtl = XEXP (rtl, 0);
|
||||
|
@ -9823,7 +9823,7 @@ rtl_for_decl_location (tree decl)
|
|||
{
|
||||
if (rtl
|
||||
&& (CONSTANT_P (rtl)
|
||||
|| (GET_CODE (rtl) == MEM
|
||||
|| (MEM_P (rtl)
|
||||
&& CONSTANT_P (XEXP (rtl, 0)))
|
||||
|| (REG_P (rtl)
|
||||
&& TREE_CODE (decl) == VAR_DECL
|
||||
|
@ -9860,11 +9860,11 @@ rtl_for_decl_location (tree decl)
|
|||
we reach the big endian correction code there. It isn't clear if all
|
||||
of these checks are necessary here, but keeping them all is the safe
|
||||
thing to do. */
|
||||
else if (GET_CODE (rtl) == MEM
|
||||
else if (MEM_P (rtl)
|
||||
&& XEXP (rtl, 0) != const0_rtx
|
||||
&& ! CONSTANT_P (XEXP (rtl, 0))
|
||||
/* Not passed in memory. */
|
||||
&& GET_CODE (DECL_INCOMING_RTL (decl)) != MEM
|
||||
&& !MEM_P (DECL_INCOMING_RTL (decl))
|
||||
/* Not passed by invisible reference. */
|
||||
&& (!REG_P (XEXP (rtl, 0))
|
||||
|| REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
|
||||
|
@ -9888,7 +9888,7 @@ rtl_for_decl_location (tree decl)
|
|||
}
|
||||
else if (TREE_CODE (decl) == VAR_DECL
|
||||
&& rtl
|
||||
&& GET_CODE (rtl) == MEM
|
||||
&& MEM_P (rtl)
|
||||
&& GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl))
|
||||
&& BYTES_BIG_ENDIAN)
|
||||
{
|
||||
|
@ -9949,7 +9949,7 @@ rtl_for_decl_location (tree decl)
|
|||
rtl = expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
|
||||
EXPAND_INITIALIZER);
|
||||
/* If expand_expr returns a MEM, it wasn't immediate. */
|
||||
if (rtl && GET_CODE (rtl) == MEM)
|
||||
if (rtl && MEM_P (rtl))
|
||||
abort ();
|
||||
}
|
||||
}
|
||||
|
@ -10268,7 +10268,7 @@ add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr, tree b
|
|||
value there unless it was going to be used repeatedly in the
|
||||
function, i.e. for cleanups. */
|
||||
if (SAVE_EXPR_RTL (bound)
|
||||
&& (! optimize || GET_CODE (SAVE_EXPR_RTL (bound)) == MEM))
|
||||
&& (! optimize || MEM_P (SAVE_EXPR_RTL (bound))))
|
||||
{
|
||||
dw_die_ref ctx = lookup_decl_die (current_function_decl);
|
||||
dw_die_ref decl_die = new_die (DW_TAG_variable, ctx, bound);
|
||||
|
@ -10276,7 +10276,7 @@ add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr, tree b
|
|||
|
||||
/* If the RTL for the SAVE_EXPR is memory, handle the case where
|
||||
it references an outer function's frame. */
|
||||
if (GET_CODE (loc) == MEM)
|
||||
if (MEM_P (loc))
|
||||
{
|
||||
rtx new_addr = fix_lexical_addr (XEXP (loc, 0), bound);
|
||||
|
||||
|
@ -10848,7 +10848,7 @@ decl_start_label (tree decl)
|
|||
const char *fnname;
|
||||
|
||||
x = DECL_RTL (decl);
|
||||
if (GET_CODE (x) != MEM)
|
||||
if (!MEM_P (x))
|
||||
abort ();
|
||||
|
||||
x = XEXP (x, 0);
|
||||
|
|
|
@ -1140,7 +1140,7 @@ gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p)
|
|||
{
|
||||
tree decl, part;
|
||||
|
||||
if (GET_CODE (x) == MEM
|
||||
if (MEM_P (x)
|
||||
&& GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
|
||||
{
|
||||
decl = SYMBOL_REF_DECL (XEXP (x, 0));
|
||||
|
@ -1223,7 +1223,7 @@ gen_highpart (enum machine_mode mode, rtx x)
|
|||
/* simplify_gen_subreg is not guaranteed to return a valid operand for
|
||||
the target if we have a MEM. gen_highpart must return a valid operand,
|
||||
emitting code if necessary to do so. */
|
||||
if (result != NULL_RTX && GET_CODE (result) == MEM)
|
||||
if (result != NULL_RTX && MEM_P (result))
|
||||
result = validize_mem (result);
|
||||
|
||||
if (!result)
|
||||
|
@ -1349,7 +1349,7 @@ operand_subword (rtx op, unsigned int offset, int validate_address, enum machine
|
|||
return const0_rtx;
|
||||
|
||||
/* Form a new MEM at the requested address. */
|
||||
if (GET_CODE (op) == MEM)
|
||||
if (MEM_P (op))
|
||||
{
|
||||
rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
|
||||
|
||||
|
@ -1802,7 +1802,7 @@ change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
|
|||
{
|
||||
rtx new;
|
||||
|
||||
if (GET_CODE (memref) != MEM)
|
||||
if (!MEM_P (memref))
|
||||
abort ();
|
||||
if (mode == VOIDmode)
|
||||
mode = GET_MODE (memref);
|
||||
|
@ -2785,7 +2785,7 @@ make_safe_from (rtx x, rtx other)
|
|||
goto done;
|
||||
}
|
||||
done:
|
||||
if ((GET_CODE (other) == MEM
|
||||
if ((MEM_P (other)
|
||||
&& ! CONSTANT_P (x)
|
||||
&& !REG_P (x)
|
||||
&& GET_CODE (x) != SUBREG)
|
||||
|
|
10
gcc/explow.c
10
gcc/explow.c
|
@ -283,7 +283,7 @@ int_expr_size (tree exp)
|
|||
static rtx
|
||||
break_out_memory_refs (rtx x)
|
||||
{
|
||||
if (GET_CODE (x) == MEM
|
||||
if (MEM_P (x)
|
||||
|| (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
|
||||
&& GET_MODE (x) != VOIDmode))
|
||||
x = force_reg (GET_MODE (x), x);
|
||||
|
@ -414,7 +414,7 @@ copy_all_regs (rtx x)
|
|||
)
|
||||
x = copy_to_reg (x);
|
||||
}
|
||||
else if (GET_CODE (x) == MEM)
|
||||
else if (MEM_P (x))
|
||||
x = copy_to_reg (x);
|
||||
else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
|
||||
|| GET_CODE (x) == MULT)
|
||||
|
@ -574,7 +574,7 @@ memory_address_noforce (enum machine_mode mode, rtx x)
|
|||
rtx
|
||||
validize_mem (rtx ref)
|
||||
{
|
||||
if (GET_CODE (ref) != MEM)
|
||||
if (!MEM_P (ref))
|
||||
return ref;
|
||||
if (! (flag_force_addr && CONSTANT_ADDRESS_P (XEXP (ref, 0)))
|
||||
&& memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
|
||||
|
@ -620,7 +620,7 @@ maybe_set_unchanging (rtx ref, tree t)
|
|||
rtx
|
||||
stabilize (rtx x)
|
||||
{
|
||||
if (GET_CODE (x) != MEM
|
||||
if (!MEM_P (x)
|
||||
|| ! rtx_unstable_p (XEXP (x, 0)))
|
||||
return x;
|
||||
|
||||
|
@ -763,7 +763,7 @@ force_not_mem (rtx x)
|
|||
{
|
||||
rtx temp;
|
||||
|
||||
if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
|
||||
if (!MEM_P (x) || GET_MODE (x) == BLKmode)
|
||||
return x;
|
||||
|
||||
temp = gen_reg_rtx (GET_MODE (x));
|
||||
|
|
64
gcc/expmed.c
64
gcc/expmed.c
|
@ -300,7 +300,7 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
rtx value, HOST_WIDE_INT total_size)
|
||||
{
|
||||
unsigned int unit
|
||||
= (GET_CODE (str_rtx) == MEM) ? BITS_PER_UNIT : BITS_PER_WORD;
|
||||
= (MEM_P (str_rtx)) ? BITS_PER_UNIT : BITS_PER_WORD;
|
||||
unsigned HOST_WIDE_INT offset = bitnum / unit;
|
||||
unsigned HOST_WIDE_INT bitpos = bitnum % unit;
|
||||
rtx op0 = str_rtx;
|
||||
|
@ -332,7 +332,7 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
/* Use vec_extract patterns for extracting parts of vectors whenever
|
||||
available. */
|
||||
if (VECTOR_MODE_P (GET_MODE (op0))
|
||||
&& GET_CODE (op0) != MEM
|
||||
&& !MEM_P (op0)
|
||||
&& (vec_set_optab->handlers[GET_MODE (op0)].insn_code
|
||||
!= CODE_FOR_nothing)
|
||||
&& fieldmode == GET_MODE_INNER (GET_MODE (op0))
|
||||
|
@ -396,7 +396,7 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
|
||||
if (bitpos == 0
|
||||
&& bitsize == GET_MODE_BITSIZE (fieldmode)
|
||||
&& (GET_CODE (op0) != MEM
|
||||
&& (!MEM_P (op0)
|
||||
? ((GET_MODE_SIZE (fieldmode) >= UNITS_PER_WORD
|
||||
|| GET_MODE_SIZE (GET_MODE (op0)) == GET_MODE_SIZE (fieldmode))
|
||||
&& byte_offset % GET_MODE_SIZE (fieldmode) == 0)
|
||||
|
@ -435,7 +435,7 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
enum machine_mode imode = int_mode_for_mode (GET_MODE (op0));
|
||||
if (imode != GET_MODE (op0))
|
||||
{
|
||||
if (GET_CODE (op0) == MEM)
|
||||
if (MEM_P (op0))
|
||||
op0 = adjust_address (op0, imode, 0);
|
||||
else if (imode != BLKmode)
|
||||
op0 = gen_lowpart (imode, op0);
|
||||
|
@ -446,7 +446,7 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
|
||||
/* We may be accessing data outside the field, which means
|
||||
we can alias adjacent data. */
|
||||
if (GET_CODE (op0) == MEM)
|
||||
if (MEM_P (op0))
|
||||
{
|
||||
op0 = shallow_copy_rtx (op0);
|
||||
set_mem_alias_set (op0, 0);
|
||||
|
@ -457,14 +457,14 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
But as we have it, it counts within whatever size OP0 now has.
|
||||
On a bigendian machine, these are not the same, so convert. */
|
||||
if (BYTES_BIG_ENDIAN
|
||||
&& GET_CODE (op0) != MEM
|
||||
&& !MEM_P (op0)
|
||||
&& unit > GET_MODE_BITSIZE (GET_MODE (op0)))
|
||||
bitpos += unit - GET_MODE_BITSIZE (GET_MODE (op0));
|
||||
|
||||
/* Storing an lsb-aligned field in a register
|
||||
can be done with a movestrict instruction. */
|
||||
|
||||
if (GET_CODE (op0) != MEM
|
||||
if (!MEM_P (op0)
|
||||
&& (BYTES_BIG_ENDIAN ? bitpos + bitsize == unit : bitpos == 0)
|
||||
&& bitsize == GET_MODE_BITSIZE (fieldmode)
|
||||
&& (movstrict_optab->handlers[fieldmode].insn_code
|
||||
|
@ -554,7 +554,7 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
/* OFFSET is the number of words or bytes (UNIT says which)
|
||||
from STR_RTX to the first word or byte containing part of the field. */
|
||||
|
||||
if (GET_CODE (op0) != MEM)
|
||||
if (!MEM_P (op0))
|
||||
{
|
||||
if (offset != 0
|
||||
|| GET_MODE_SIZE (GET_MODE (op0)) > UNITS_PER_WORD)
|
||||
|
@ -615,7 +615,7 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
into a register and save it back later. */
|
||||
/* This used to check flag_force_mem, but that was a serious
|
||||
de-optimization now that flag_force_mem is enabled by -O2. */
|
||||
if (GET_CODE (op0) == MEM
|
||||
if (MEM_P (op0)
|
||||
&& ! ((*insn_data[(int) CODE_FOR_insv].operand[0].predicate)
|
||||
(op0, VOIDmode)))
|
||||
{
|
||||
|
@ -658,7 +658,7 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
volatile_ok = save_volatile_ok;
|
||||
|
||||
/* Add OFFSET into OP0's address. */
|
||||
if (GET_CODE (xop0) == MEM)
|
||||
if (MEM_P (xop0))
|
||||
xop0 = adjust_address (xop0, byte_mode, offset);
|
||||
|
||||
/* If xop0 is a register, we need it in MAXMODE
|
||||
|
@ -678,7 +678,7 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
|
||||
/* We have been counting XBITPOS within UNIT.
|
||||
Count instead within the size of the register. */
|
||||
if (BITS_BIG_ENDIAN && GET_CODE (xop0) != MEM)
|
||||
if (BITS_BIG_ENDIAN && !MEM_P (xop0))
|
||||
xbitpos += GET_MODE_BITSIZE (maxmode) - unit;
|
||||
|
||||
unit = GET_MODE_BITSIZE (maxmode);
|
||||
|
@ -971,7 +971,7 @@ store_split_bit_field (rtx op0, unsigned HOST_WIDE_INT bitsize,
|
|||
/* We must do an endian conversion exactly the same way as it is
|
||||
done in extract_bit_field, so that the two calls to
|
||||
extract_fixed_bit_field will have comparable arguments. */
|
||||
if (GET_CODE (value) != MEM || GET_MODE (value) == BLKmode)
|
||||
if (!MEM_P (value) || GET_MODE (value) == BLKmode)
|
||||
total_bits = BITS_PER_WORD;
|
||||
else
|
||||
total_bits = GET_MODE_BITSIZE (GET_MODE (value));
|
||||
|
@ -1057,7 +1057,7 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
HOST_WIDE_INT total_size)
|
||||
{
|
||||
unsigned int unit
|
||||
= (GET_CODE (str_rtx) == MEM) ? BITS_PER_UNIT : BITS_PER_WORD;
|
||||
= (MEM_P (str_rtx)) ? BITS_PER_UNIT : BITS_PER_WORD;
|
||||
unsigned HOST_WIDE_INT offset = bitnum / unit;
|
||||
unsigned HOST_WIDE_INT bitpos = bitnum % unit;
|
||||
rtx op0 = str_rtx;
|
||||
|
@ -1101,7 +1101,7 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
/* Use vec_extract patterns for extracting parts of vectors whenever
|
||||
available. */
|
||||
if (VECTOR_MODE_P (GET_MODE (op0))
|
||||
&& GET_CODE (op0) != MEM
|
||||
&& !MEM_P (op0)
|
||||
&& (vec_extract_optab->handlers[GET_MODE (op0)].insn_code
|
||||
!= CODE_FOR_nothing)
|
||||
&& ((bitsize + bitnum) / GET_MODE_BITSIZE (GET_MODE_INNER (GET_MODE (op0)))
|
||||
|
@ -1159,7 +1159,7 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
enum machine_mode imode = int_mode_for_mode (GET_MODE (op0));
|
||||
if (imode != GET_MODE (op0))
|
||||
{
|
||||
if (GET_CODE (op0) == MEM)
|
||||
if (MEM_P (op0))
|
||||
op0 = adjust_address (op0, imode, 0);
|
||||
else if (imode != BLKmode)
|
||||
op0 = gen_lowpart (imode, op0);
|
||||
|
@ -1170,7 +1170,7 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
|
||||
/* We may be accessing data outside the field, which means
|
||||
we can alias adjacent data. */
|
||||
if (GET_CODE (op0) == MEM)
|
||||
if (MEM_P (op0))
|
||||
{
|
||||
op0 = shallow_copy_rtx (op0);
|
||||
set_mem_alias_set (op0, 0);
|
||||
|
@ -1189,7 +1189,7 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
But as we have it, it counts within whatever size OP0 now has.
|
||||
On a bigendian machine, these are not the same, so convert. */
|
||||
if (BYTES_BIG_ENDIAN
|
||||
&& GET_CODE (op0) != MEM
|
||||
&& !MEM_P (op0)
|
||||
&& unit > GET_MODE_BITSIZE (GET_MODE (op0)))
|
||||
bitpos += unit - GET_MODE_BITSIZE (GET_MODE (op0));
|
||||
|
||||
|
@ -1216,12 +1216,12 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
&& (BYTES_BIG_ENDIAN
|
||||
? bitpos + bitsize == BITS_PER_WORD
|
||||
: bitpos == 0)))
|
||||
&& ((GET_CODE (op0) != MEM
|
||||
&& ((!MEM_P (op0)
|
||||
&& TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
|
||||
GET_MODE_BITSIZE (GET_MODE (op0)))
|
||||
&& GET_MODE_SIZE (mode1) != 0
|
||||
&& byte_offset % GET_MODE_SIZE (mode1) == 0)
|
||||
|| (GET_CODE (op0) == MEM
|
||||
|| (MEM_P (op0)
|
||||
&& (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (op0))
|
||||
|| (offset * BITS_PER_UNIT % bitsize == 0
|
||||
&& MEM_ALIGN (op0) % bitsize == 0)))))
|
||||
|
@ -1342,7 +1342,7 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
/* OFFSET is the number of words or bytes (UNIT says which)
|
||||
from STR_RTX to the first word or byte containing part of the field. */
|
||||
|
||||
if (GET_CODE (op0) != MEM)
|
||||
if (!MEM_P (op0))
|
||||
{
|
||||
if (offset != 0
|
||||
|| GET_MODE_SIZE (GET_MODE (op0)) > UNITS_PER_WORD)
|
||||
|
@ -1376,7 +1376,7 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
rtx pat;
|
||||
enum machine_mode maxmode = mode_for_extraction (EP_extzv, 0);
|
||||
|
||||
if (GET_CODE (xop0) == MEM)
|
||||
if (MEM_P (xop0))
|
||||
{
|
||||
int save_volatile_ok = volatile_ok;
|
||||
volatile_ok = 1;
|
||||
|
@ -1440,13 +1440,13 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
xbitpos = unit - bitsize - xbitpos;
|
||||
|
||||
/* Now convert from counting within UNIT to counting in MAXMODE. */
|
||||
if (BITS_BIG_ENDIAN && GET_CODE (xop0) != MEM)
|
||||
if (BITS_BIG_ENDIAN && !MEM_P (xop0))
|
||||
xbitpos += GET_MODE_BITSIZE (maxmode) - unit;
|
||||
|
||||
unit = GET_MODE_BITSIZE (maxmode);
|
||||
|
||||
if (xtarget == 0
|
||||
|| (flag_force_mem && GET_CODE (xtarget) == MEM))
|
||||
|| (flag_force_mem && MEM_P (xtarget)))
|
||||
xtarget = xspec_target = gen_reg_rtx (tmode);
|
||||
|
||||
if (GET_MODE (xtarget) != maxmode)
|
||||
|
@ -1509,7 +1509,7 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
rtx pat;
|
||||
enum machine_mode maxmode = mode_for_extraction (EP_extv, 0);
|
||||
|
||||
if (GET_CODE (xop0) == MEM)
|
||||
if (MEM_P (xop0))
|
||||
{
|
||||
/* Is the memory operand acceptable? */
|
||||
if (! ((*insn_data[(int) CODE_FOR_extv].operand[1].predicate)
|
||||
|
@ -1569,13 +1569,13 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
|
|||
|
||||
/* XBITPOS counts within a size of UNIT.
|
||||
Adjust to count within a size of MAXMODE. */
|
||||
if (BITS_BIG_ENDIAN && GET_CODE (xop0) != MEM)
|
||||
if (BITS_BIG_ENDIAN && !MEM_P (xop0))
|
||||
xbitpos += (GET_MODE_BITSIZE (maxmode) - unit);
|
||||
|
||||
unit = GET_MODE_BITSIZE (maxmode);
|
||||
|
||||
if (xtarget == 0
|
||||
|| (flag_force_mem && GET_CODE (xtarget) == MEM))
|
||||
|| (flag_force_mem && MEM_P (xtarget)))
|
||||
xtarget = xspec_target = gen_reg_rtx (tmode);
|
||||
|
||||
if (GET_MODE (xtarget) != maxmode)
|
||||
|
@ -2514,7 +2514,7 @@ expand_mult_const (enum machine_mode mode, rtx op0, HOST_WIDE_INT val,
|
|||
|
||||
/* Avoid referencing memory over and over.
|
||||
For speed, but also for correctness when mem is volatile. */
|
||||
if (GET_CODE (op0) == MEM)
|
||||
if (MEM_P (op0))
|
||||
op0 = force_reg (mode, op0);
|
||||
|
||||
/* ACCUM starts out either as OP0 or as a zero, depending on
|
||||
|
@ -3290,9 +3290,9 @@ expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode,
|
|||
/* Don't clobber an operand while doing a multi-step calculation. */
|
||||
|| ((rem_flag || op1_is_constant)
|
||||
&& (reg_mentioned_p (target, op0)
|
||||
|| (GET_CODE (op0) == MEM && GET_CODE (target) == MEM)))
|
||||
|| (MEM_P (op0) && MEM_P (target))))
|
||||
|| reg_mentioned_p (target, op1)
|
||||
|| (GET_CODE (op1) == MEM && GET_CODE (target) == MEM)))
|
||||
|| (MEM_P (op1) && MEM_P (target))))
|
||||
target = 0;
|
||||
|
||||
/* Get the mode in which to perform this computation. Normally it will
|
||||
|
@ -3381,9 +3381,9 @@ expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode,
|
|||
|
||||
/* If one of the operands is a volatile MEM, copy it into a register. */
|
||||
|
||||
if (GET_CODE (op0) == MEM && MEM_VOLATILE_P (op0))
|
||||
if (MEM_P (op0) && MEM_VOLATILE_P (op0))
|
||||
op0 = force_reg (compute_mode, op0);
|
||||
if (GET_CODE (op1) == MEM && MEM_VOLATILE_P (op1))
|
||||
if (MEM_P (op1) && MEM_VOLATILE_P (op1))
|
||||
op1 = force_reg (compute_mode, op1);
|
||||
|
||||
/* If we need the remainder or if OP1 is constant, we need to
|
||||
|
@ -4610,7 +4610,7 @@ emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1,
|
|||
if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD * 2
|
||||
&& GET_MODE_CLASS (mode) == MODE_INT
|
||||
&& op1 == const0_rtx
|
||||
&& (GET_CODE (op0) != MEM || ! MEM_VOLATILE_P (op0)))
|
||||
&& (!MEM_P (op0) || ! MEM_VOLATILE_P (op0)))
|
||||
{
|
||||
if (code == EQ || code == NE)
|
||||
{
|
||||
|
|
144
gcc/expr.c
144
gcc/expr.c
|
@ -792,7 +792,7 @@ convert_move (rtx to, rtx from, int unsignedp)
|
|||
if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
|
||||
&& GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
|
||||
{
|
||||
if (!((GET_CODE (from) == MEM
|
||||
if (!((MEM_P (from)
|
||||
&& ! MEM_VOLATILE_P (from)
|
||||
&& direct_load[(int) to_mode]
|
||||
&& ! mode_dependent_address_p (XEXP (from, 0)))
|
||||
|
@ -811,7 +811,7 @@ convert_move (rtx to, rtx from, int unsignedp)
|
|||
&& TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
|
||||
GET_MODE_BITSIZE (from_mode)))
|
||||
{
|
||||
if (!((GET_CODE (from) == MEM
|
||||
if (!((MEM_P (from)
|
||||
&& ! MEM_VOLATILE_P (from)
|
||||
&& direct_load[(int) to_mode]
|
||||
&& ! mode_dependent_address_p (XEXP (from, 0)))
|
||||
|
@ -984,7 +984,7 @@ convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int uns
|
|||
&& GET_MODE_CLASS (oldmode) == MODE_INT
|
||||
&& (GET_CODE (x) == CONST_DOUBLE
|
||||
|| (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
|
||||
&& ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
|
||||
&& ((MEM_P (x) && ! MEM_VOLATILE_P (x)
|
||||
&& direct_load[(int) mode])
|
||||
|| (REG_P (x)
|
||||
&& (! HARD_REGISTER_P (x)
|
||||
|
@ -1358,9 +1358,9 @@ emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
|
|||
y = protect_from_queue (y, 0);
|
||||
size = protect_from_queue (size, 0);
|
||||
|
||||
if (GET_CODE (x) != MEM)
|
||||
if (!MEM_P (x))
|
||||
abort ();
|
||||
if (GET_CODE (y) != MEM)
|
||||
if (!MEM_P (y))
|
||||
abort ();
|
||||
if (size == 0)
|
||||
abort ();
|
||||
|
@ -1883,7 +1883,7 @@ emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
|
|||
from strange tricks we might play; but make sure that the source can
|
||||
be loaded directly into the destination. */
|
||||
src = orig_src;
|
||||
if (GET_CODE (orig_src) != MEM
|
||||
if (!MEM_P (orig_src)
|
||||
&& (!CONSTANT_P (orig_src)
|
||||
|| (GET_MODE (orig_src) != mode
|
||||
&& GET_MODE (orig_src) != VOIDmode)))
|
||||
|
@ -1897,7 +1897,7 @@ emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
|
|||
}
|
||||
|
||||
/* Optimize the access just a bit. */
|
||||
if (GET_CODE (src) == MEM
|
||||
if (MEM_P (src)
|
||||
&& (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
|
||||
|| MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
|
||||
&& bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
|
||||
|
@ -2043,7 +2043,7 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
|
|||
emit_group_load (dst, temp, type, ssize);
|
||||
return;
|
||||
}
|
||||
else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
|
||||
else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
|
||||
{
|
||||
dst = gen_reg_rtx (GET_MODE (orig_dst));
|
||||
/* Make life a bit easier for combine. */
|
||||
|
@ -2102,7 +2102,7 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
|
|||
}
|
||||
|
||||
/* Optimize the access just a bit. */
|
||||
if (GET_CODE (dest) == MEM
|
||||
if (MEM_P (dest)
|
||||
&& (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
|
||||
|| MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
|
||||
&& bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
|
||||
|
@ -2560,7 +2560,7 @@ rtx
|
|||
clear_storage (rtx object, rtx size)
|
||||
{
|
||||
rtx retval = 0;
|
||||
unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
|
||||
unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
|
||||
: GET_MODE_ALIGNMENT (GET_MODE (object)));
|
||||
|
||||
/* If OBJECT is not BLKmode and SIZE is the same size as its mode,
|
||||
|
@ -2824,14 +2824,14 @@ emit_move_insn (rtx x, rtx y)
|
|||
|
||||
/* If X or Y are memory references, verify that their addresses are valid
|
||||
for the machine. */
|
||||
if (GET_CODE (x) == MEM
|
||||
if (MEM_P (x)
|
||||
&& ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
|
||||
&& ! push_operand (x, GET_MODE (x)))
|
||||
|| (flag_force_addr
|
||||
&& CONSTANT_ADDRESS_P (XEXP (x, 0)))))
|
||||
x = validize_mem (x);
|
||||
|
||||
if (GET_CODE (y) == MEM
|
||||
if (MEM_P (y)
|
||||
&& (! memory_address_p (GET_MODE (y), XEXP (y, 0))
|
||||
|| (flag_force_addr
|
||||
&& CONSTANT_ADDRESS_P (XEXP (y, 0)))))
|
||||
|
@ -3056,14 +3056,14 @@ emit_move_insn_1 (rtx x, rtx y)
|
|||
if (reload_in_progress)
|
||||
{
|
||||
x = gen_lowpart_common (tmode, x1);
|
||||
if (x == 0 && GET_CODE (x1) == MEM)
|
||||
if (x == 0 && MEM_P (x1))
|
||||
{
|
||||
x = adjust_address_nv (x1, tmode, 0);
|
||||
copy_replacements (x1, x);
|
||||
}
|
||||
|
||||
y = gen_lowpart_common (tmode, y1);
|
||||
if (y == 0 && GET_CODE (y1) == MEM)
|
||||
if (y == 0 && MEM_P (y1))
|
||||
{
|
||||
y = adjust_address_nv (y1, tmode, 0);
|
||||
copy_replacements (y1, y);
|
||||
|
@ -3145,10 +3145,10 @@ emit_move_insn_1 (rtx x, rtx y)
|
|||
|
||||
/* If we are in reload, see if either operand is a MEM whose address
|
||||
is scheduled for replacement. */
|
||||
if (reload_in_progress && GET_CODE (x) == MEM
|
||||
if (reload_in_progress && MEM_P (x)
|
||||
&& (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
|
||||
x = replace_equiv_address_nv (x, inner);
|
||||
if (reload_in_progress && GET_CODE (y) == MEM
|
||||
if (reload_in_progress && MEM_P (y)
|
||||
&& (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
|
||||
y = replace_equiv_address_nv (y, inner);
|
||||
|
||||
|
@ -3791,7 +3791,7 @@ expand_assignment (tree to, tree from, int want_value)
|
|||
{
|
||||
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
|
||||
|
||||
if (GET_CODE (to_rtx) != MEM)
|
||||
if (!MEM_P (to_rtx))
|
||||
abort ();
|
||||
|
||||
#ifdef POINTERS_EXTEND_UNSIGNED
|
||||
|
@ -3804,7 +3804,7 @@ expand_assignment (tree to, tree from, int want_value)
|
|||
|
||||
/* A constant address in TO_RTX can have VOIDmode, we must not try
|
||||
to call force_reg for that case. Avoid that case. */
|
||||
if (GET_CODE (to_rtx) == MEM
|
||||
if (MEM_P (to_rtx)
|
||||
&& GET_MODE (to_rtx) == BLKmode
|
||||
&& GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
|
||||
&& bitsize > 0
|
||||
|
@ -3821,7 +3821,7 @@ expand_assignment (tree to, tree from, int want_value)
|
|||
offset));
|
||||
}
|
||||
|
||||
if (GET_CODE (to_rtx) == MEM)
|
||||
if (MEM_P (to_rtx))
|
||||
{
|
||||
/* If the field is at offset zero, we could have been given the
|
||||
DECL_RTX of the parent struct. Don't munge it. */
|
||||
|
@ -3832,7 +3832,7 @@ expand_assignment (tree to, tree from, int want_value)
|
|||
|
||||
/* Deal with volatile and readonly fields. The former is only done
|
||||
for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
|
||||
if (volatilep && GET_CODE (to_rtx) == MEM)
|
||||
if (volatilep && MEM_P (to_rtx))
|
||||
{
|
||||
if (to_rtx == orig_to_rtx)
|
||||
to_rtx = copy_rtx (to_rtx);
|
||||
|
@ -3844,14 +3844,14 @@ expand_assignment (tree to, tree from, int want_value)
|
|||
/* We can't assert that a MEM won't be set more than once
|
||||
if the component is not addressable because another
|
||||
non-addressable component may be referenced by the same MEM. */
|
||||
&& ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
|
||||
&& ! (MEM_P (to_rtx) && ! can_address_p (to)))
|
||||
{
|
||||
if (to_rtx == orig_to_rtx)
|
||||
to_rtx = copy_rtx (to_rtx);
|
||||
RTX_UNCHANGING_P (to_rtx) = 1;
|
||||
}
|
||||
|
||||
if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
|
||||
if (MEM_P (to_rtx) && ! can_address_p (to))
|
||||
{
|
||||
if (to_rtx == orig_to_rtx)
|
||||
to_rtx = copy_rtx (to_rtx);
|
||||
|
@ -4152,7 +4152,7 @@ store_expr (tree exp, rtx target, int want_value)
|
|||
dont_return_target = 1;
|
||||
}
|
||||
else if ((want_value & 1) != 0
|
||||
&& GET_CODE (target) == MEM
|
||||
&& MEM_P (target)
|
||||
&& ! MEM_VOLATILE_P (target)
|
||||
&& GET_MODE (target) != BLKmode)
|
||||
/* If target is in memory and caller wants value in a register instead,
|
||||
|
@ -4216,7 +4216,7 @@ store_expr (tree exp, rtx target, int want_value)
|
|||
only necessary if the MEM is volatile, or if the address
|
||||
overlaps TARGET. But not performing the load twice also
|
||||
reduces the amount of rtl we generate and then have to CSE. */
|
||||
if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
|
||||
if (MEM_P (temp) && (want_value & 1) != 0)
|
||||
temp = copy_to_reg (temp);
|
||||
|
||||
/* If TEMP is a VOIDmode constant, use convert_modes to make
|
||||
|
@ -4269,7 +4269,7 @@ store_expr (tree exp, rtx target, int want_value)
|
|||
or if we really want the correct value. */
|
||||
if (!(target && REG_P (target)
|
||||
&& REGNO (target) < FIRST_PSEUDO_REGISTER)
|
||||
&& !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
|
||||
&& !(MEM_P (target) && MEM_VOLATILE_P (target))
|
||||
&& ! rtx_equal_p (temp, target)
|
||||
&& (CONSTANT_P (temp) || (want_value & 1) != 0))
|
||||
dont_return_target = 1;
|
||||
|
@ -4427,7 +4427,7 @@ store_expr (tree exp, rtx target, int want_value)
|
|||
|
||||
/* If we are supposed to return TEMP, do so as long as it isn't a MEM.
|
||||
??? The latter test doesn't seem to make sense. */
|
||||
else if (dont_return_target && GET_CODE (temp) != MEM)
|
||||
else if (dont_return_target && !MEM_P (temp))
|
||||
return temp;
|
||||
|
||||
/* Return TARGET itself if it is a hard register. */
|
||||
|
@ -4651,9 +4651,9 @@ store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
|
|||
/* If we have a nonzero bitpos for a register target, then we just
|
||||
let store_field do the bitfield handling. This is unlikely to
|
||||
generate unnecessary clear instructions anyways. */
|
||||
&& (bitpos == 0 || GET_CODE (target) == MEM))
|
||||
&& (bitpos == 0 || MEM_P (target)))
|
||||
{
|
||||
if (GET_CODE (target) == MEM)
|
||||
if (MEM_P (target))
|
||||
target
|
||||
= adjust_address (target,
|
||||
GET_MODE (target) == BLKmode
|
||||
|
@ -4663,7 +4663,7 @@ store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
|
|||
|
||||
|
||||
/* Update the alias set, if required. */
|
||||
if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
|
||||
if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
|
||||
&& MEM_ALIAS_SET (target) != 0)
|
||||
{
|
||||
target = copy_rtx (target);
|
||||
|
@ -4800,7 +4800,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
|
|||
target));
|
||||
|
||||
offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
|
||||
if (GET_CODE (to_rtx) != MEM)
|
||||
if (!MEM_P (to_rtx))
|
||||
abort ();
|
||||
|
||||
#ifdef POINTERS_EXTEND_UNSIGNED
|
||||
|
@ -4817,7 +4817,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
|
|||
|
||||
if (TREE_READONLY (field))
|
||||
{
|
||||
if (GET_CODE (to_rtx) == MEM)
|
||||
if (MEM_P (to_rtx))
|
||||
to_rtx = copy_rtx (to_rtx);
|
||||
|
||||
RTX_UNCHANGING_P (to_rtx) = 1;
|
||||
|
@ -4854,7 +4854,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
|
|||
}
|
||||
#endif
|
||||
|
||||
if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
|
||||
if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
|
||||
&& DECL_NONADDRESSABLE_P (field))
|
||||
{
|
||||
to_rtx = copy_rtx (to_rtx);
|
||||
|
@ -5032,7 +5032,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
|
|||
&& (lo = tree_low_cst (lo_index, 0),
|
||||
hi = tree_low_cst (hi_index, 0),
|
||||
count = hi - lo + 1,
|
||||
(GET_CODE (target) != MEM
|
||||
(!MEM_P (target)
|
||||
|| count <= 2
|
||||
|| (host_integerp (TYPE_SIZE (elttype), 1)
|
||||
&& (tree_low_cst (TYPE_SIZE (elttype), 1) * count
|
||||
|
@ -5043,7 +5043,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
|
|||
{
|
||||
bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
|
||||
|
||||
if (GET_CODE (target) == MEM
|
||||
if (MEM_P (target)
|
||||
&& !MEM_KEEP_ALIAS_SET_P (target)
|
||||
&& TREE_CODE (type) == ARRAY_TYPE
|
||||
&& TYPE_NONALIASED_COMPONENT (type))
|
||||
|
@ -5165,7 +5165,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
|
|||
else
|
||||
bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
|
||||
|
||||
if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
|
||||
if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
|
||||
&& TREE_CODE (type) == ARRAY_TYPE
|
||||
&& TYPE_NONALIASED_COMPONENT (type))
|
||||
{
|
||||
|
@ -5254,7 +5254,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
|
|||
/* The assumption here is that it is safe to use
|
||||
XEXP if the set is multi-word, but not if
|
||||
it's single-word. */
|
||||
if (GET_CODE (target) == MEM)
|
||||
if (MEM_P (target))
|
||||
to_rtx = adjust_address (target, mode, offset);
|
||||
else if (offset == 0)
|
||||
to_rtx = target;
|
||||
|
@ -5325,7 +5325,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
|
|||
emit_move_insn (targetx, target);
|
||||
}
|
||||
|
||||
else if (GET_CODE (target) == MEM)
|
||||
else if (MEM_P (target))
|
||||
targetx = target;
|
||||
else
|
||||
abort ();
|
||||
|
@ -5486,7 +5486,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
|
|||
boundary. If so, we simply do a block copy. */
|
||||
if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
|
||||
{
|
||||
if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
|
||||
if (!MEM_P (target) || !MEM_P (temp)
|
||||
|| bitpos % BITS_PER_UNIT != 0)
|
||||
abort ();
|
||||
|
||||
|
@ -5508,7 +5508,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
|
|||
/* The caller wants an rtx for the value.
|
||||
If possible, avoid refetching from the bitfield itself. */
|
||||
if (width_mask != 0
|
||||
&& ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
|
||||
&& ! (MEM_P (target) && MEM_VOLATILE_P (target)))
|
||||
{
|
||||
tree count;
|
||||
enum machine_mode tmode;
|
||||
|
@ -5829,7 +5829,7 @@ force_operand (rtx value, rtx target)
|
|||
/* Check for subreg applied to an expression produced by loop optimizer. */
|
||||
if (code == SUBREG
|
||||
&& !REG_P (SUBREG_REG (value))
|
||||
&& GET_CODE (SUBREG_REG (value)) != MEM)
|
||||
&& !MEM_P (SUBREG_REG (value)))
|
||||
{
|
||||
value = simplify_gen_subreg (GET_MODE (value),
|
||||
force_reg (GET_MODE (SUBREG_REG (value)),
|
||||
|
@ -5940,7 +5940,7 @@ force_operand (rtx value, rtx target)
|
|||
#ifdef INSN_SCHEDULING
|
||||
/* On machines that have insn scheduling, we want all memory reference to be
|
||||
explicit, so we need to deal with such paradoxical SUBREGs. */
|
||||
if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
|
||||
if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
|
||||
&& (GET_MODE_SIZE (GET_MODE (value))
|
||||
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
|
||||
value
|
||||
|
@ -5985,7 +5985,7 @@ safe_from_p (rtx x, tree exp, int top_p)
|
|||
!= INTEGER_CST)
|
||||
&& GET_MODE (x) == BLKmode)
|
||||
/* If X is in the outgoing argument area, it is always safe. */
|
||||
|| (GET_CODE (x) == MEM
|
||||
|| (MEM_P (x)
|
||||
&& (XEXP (x, 0) == virtual_outgoing_args_rtx
|
||||
|| (GET_CODE (XEXP (x, 0)) == PLUS
|
||||
&& XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
|
||||
|
@ -6094,7 +6094,7 @@ safe_from_p (rtx x, tree exp, int top_p)
|
|||
if (DECL_P (exp))
|
||||
{
|
||||
if (!DECL_RTL_SET_P (exp)
|
||||
|| GET_CODE (DECL_RTL (exp)) != MEM)
|
||||
|| !MEM_P (DECL_RTL (exp)))
|
||||
return 0;
|
||||
else
|
||||
exp_rtl = XEXP (DECL_RTL (exp), 0);
|
||||
|
@ -6102,7 +6102,7 @@ safe_from_p (rtx x, tree exp, int top_p)
|
|||
break;
|
||||
|
||||
case INDIRECT_REF:
|
||||
if (GET_CODE (x) == MEM
|
||||
if (MEM_P (x)
|
||||
&& alias_sets_conflict_p (MEM_ALIAS_SET (x),
|
||||
get_alias_set (exp)))
|
||||
return 0;
|
||||
|
@ -6112,7 +6112,7 @@ safe_from_p (rtx x, tree exp, int top_p)
|
|||
/* Assume that the call will clobber all hard registers and
|
||||
all of memory. */
|
||||
if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
|
||||
|| GET_CODE (x) == MEM)
|
||||
|| MEM_P (x))
|
||||
return 0;
|
||||
break;
|
||||
|
||||
|
@ -6196,7 +6196,7 @@ safe_from_p (rtx x, tree exp, int top_p)
|
|||
/* If the rtl is X, then it is not safe. Otherwise, it is unless both
|
||||
are memory and they conflict. */
|
||||
return ! (rtx_equal_p (x, exp_rtl)
|
||||
|| (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
|
||||
|| (MEM_P (x) && MEM_P (exp_rtl)
|
||||
&& true_dependence (exp_rtl, VOIDmode, x,
|
||||
rtx_addr_varies_p)));
|
||||
}
|
||||
|
@ -6571,7 +6571,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
&& modifier != EXPAND_CONST_ADDRESS)
|
||||
{
|
||||
temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
|
||||
if (GET_CODE (temp) == MEM)
|
||||
if (MEM_P (temp))
|
||||
temp = copy_to_reg (temp);
|
||||
return const0_rtx;
|
||||
}
|
||||
|
@ -6676,7 +6676,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
|
||||
if (context != 0 && context != current_function_decl
|
||||
/* If var is static, we don't need a static chain to access it. */
|
||||
&& ! (GET_CODE (DECL_RTL (exp)) == MEM
|
||||
&& ! (MEM_P (DECL_RTL (exp))
|
||||
&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
|
||||
{
|
||||
rtx addr;
|
||||
|
@ -6686,10 +6686,10 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
if (DECL_NO_STATIC_CHAIN (current_function_decl))
|
||||
abort ();
|
||||
lang_hooks.mark_addressable (exp);
|
||||
if (GET_CODE (DECL_RTL (exp)) != MEM)
|
||||
if (!MEM_P (DECL_RTL (exp)))
|
||||
abort ();
|
||||
addr = XEXP (DECL_RTL (exp), 0);
|
||||
if (GET_CODE (addr) == MEM)
|
||||
if (MEM_P (addr))
|
||||
addr
|
||||
= replace_equiv_address (addr,
|
||||
fix_lexical_addr (XEXP (addr, 0), exp));
|
||||
|
@ -6703,7 +6703,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
from its initializer, while the initializer is still being parsed.
|
||||
See expand_decl. */
|
||||
|
||||
else if (GET_CODE (DECL_RTL (exp)) == MEM
|
||||
else if (MEM_P (DECL_RTL (exp))
|
||||
&& REG_P (XEXP (DECL_RTL (exp), 0)))
|
||||
temp = validize_mem (DECL_RTL (exp));
|
||||
|
||||
|
@ -6711,7 +6711,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
the address is not valid or it is not a register and -fforce-addr
|
||||
is specified, get the address into a register. */
|
||||
|
||||
else if (GET_CODE (DECL_RTL (exp)) == MEM
|
||||
else if (MEM_P (DECL_RTL (exp))
|
||||
&& modifier != EXPAND_CONST_ADDRESS
|
||||
&& modifier != EXPAND_SUM
|
||||
&& modifier != EXPAND_INITIALIZER
|
||||
|
@ -6730,7 +6730,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
if the address is a register. */
|
||||
if (temp != 0)
|
||||
{
|
||||
if (GET_CODE (temp) == MEM && REG_P (XEXP (temp, 0)))
|
||||
if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
|
||||
mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
|
||||
|
||||
return temp;
|
||||
|
@ -6856,7 +6856,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
put_var_into_stack (exp, /*rescan=*/true);
|
||||
temp = SAVE_EXPR_RTL (exp);
|
||||
}
|
||||
if (temp == 0 || GET_CODE (temp) != MEM)
|
||||
if (temp == 0 || !MEM_P (temp))
|
||||
abort ();
|
||||
return
|
||||
replace_equiv_address (temp,
|
||||
|
@ -7325,7 +7325,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
C, but can in Ada if we have unchecked conversion of an expression
|
||||
from a scalar type to an array or record type or for an
|
||||
ARRAY_RANGE_REF whose type is BLKmode. */
|
||||
else if (GET_CODE (op0) != MEM
|
||||
else if (!MEM_P (op0)
|
||||
&& (offset != 0
|
||||
|| (code == ARRAY_RANGE_REF && mode == BLKmode)))
|
||||
{
|
||||
|
@ -7355,7 +7355,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
|
||||
EXPAND_SUM);
|
||||
|
||||
if (GET_CODE (op0) != MEM)
|
||||
if (!MEM_P (op0))
|
||||
abort ();
|
||||
|
||||
#ifdef POINTERS_EXTEND_UNSIGNED
|
||||
|
@ -7385,12 +7385,12 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
|
||||
/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
|
||||
record its alignment as BIGGEST_ALIGNMENT. */
|
||||
if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
|
||||
if (MEM_P (op0) && bitpos == 0 && offset != 0
|
||||
&& is_aligning_offset (offset, tem))
|
||||
set_mem_align (op0, BIGGEST_ALIGNMENT);
|
||||
|
||||
/* Don't forget about volatility even if this is a bitfield. */
|
||||
if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
|
||||
if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
|
||||
{
|
||||
if (op0 == orig_op0)
|
||||
op0 = copy_rtx (op0);
|
||||
|
@ -7426,7 +7426,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
|| (mode1 != BLKmode
|
||||
&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
|
||||
|| (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
|
||||
|| (GET_CODE (op0) == MEM
|
||||
|| (MEM_P (op0)
|
||||
&& (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
|
||||
|| (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
|
||||
&& ((modifier == EXPAND_CONST_ADDRESS
|
||||
|
@ -7446,8 +7446,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
enum machine_mode ext_mode = mode;
|
||||
|
||||
if (ext_mode == BLKmode
|
||||
&& ! (target != 0 && GET_CODE (op0) == MEM
|
||||
&& GET_CODE (target) == MEM
|
||||
&& ! (target != 0 && MEM_P (op0)
|
||||
&& MEM_P (target)
|
||||
&& bitpos % BITS_PER_UNIT == 0))
|
||||
ext_mode = mode_for_size (bitsize, MODE_INT, 1);
|
||||
|
||||
|
@ -7461,8 +7461,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
|
||||
/* In this case, BITPOS must start at a byte boundary and
|
||||
TARGET, if specified, must be a MEM. */
|
||||
if (GET_CODE (op0) != MEM
|
||||
|| (target != 0 && GET_CODE (target) != MEM)
|
||||
if (!MEM_P (op0)
|
||||
|| (target != 0 && !MEM_P (target))
|
||||
|| bitpos % BITS_PER_UNIT != 0)
|
||||
abort ();
|
||||
|
||||
|
@ -7479,7 +7479,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
|
||||
op0 = validize_mem (op0);
|
||||
|
||||
if (GET_CODE (op0) == MEM && REG_P (XEXP (op0, 0)))
|
||||
if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
|
||||
mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
|
||||
|
||||
op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
|
||||
|
@ -7742,7 +7742,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
target = assign_temp (type, 0, 1, 1);
|
||||
}
|
||||
|
||||
if (GET_CODE (target) == MEM)
|
||||
if (MEM_P (target))
|
||||
/* Store data into beginning of memory target. */
|
||||
store_expr (TREE_OPERAND (exp, 0),
|
||||
adjust_address (target, TYPE_MODE (valtype), 0),
|
||||
|
@ -7825,7 +7825,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
&& GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
|
||||
&& GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
|
||||
op0 = gen_lowpart (TYPE_MODE (type), op0);
|
||||
else if (GET_CODE (op0) != MEM)
|
||||
else if (!MEM_P (op0))
|
||||
{
|
||||
/* If the operand is not a MEM, force it into memory. Since we
|
||||
are going to be be changing the mode of the MEM, don't call
|
||||
|
@ -7850,7 +7850,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
that the operand is known to be aligned, indicate that it is.
|
||||
Otherwise, we need only be concerned about alignment for non-BLKmode
|
||||
results. */
|
||||
if (GET_CODE (op0) == MEM)
|
||||
if (MEM_P (op0))
|
||||
{
|
||||
op0 = copy_rtx (op0);
|
||||
|
||||
|
@ -8263,7 +8263,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
target = original_target;
|
||||
if (target == 0
|
||||
|| modifier == EXPAND_STACK_PARM
|
||||
|| (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
|
||||
|| (MEM_P (target) && MEM_VOLATILE_P (target))
|
||||
|| GET_MODE (target) != mode
|
||||
|| (REG_P (target)
|
||||
&& REGNO (target) < FIRST_PSEUDO_REGISTER))
|
||||
|
@ -8286,7 +8286,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
/* At this point, a MEM target is no longer useful; we will get better
|
||||
code without it. */
|
||||
|
||||
if (GET_CODE (target) == MEM)
|
||||
if (MEM_P (target))
|
||||
target = gen_reg_rtx (mode);
|
||||
|
||||
/* If op1 was placed in target, swap op0 and op1. */
|
||||
|
@ -8654,7 +8654,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
|| REG_P (original_target)
|
||||
|| TREE_ADDRESSABLE (type))
|
||||
#endif
|
||||
&& (GET_CODE (original_target) != MEM
|
||||
&& (!MEM_P (original_target)
|
||||
|| TREE_ADDRESSABLE (type)))
|
||||
temp = original_target;
|
||||
else if (TREE_ADDRESSABLE (type))
|
||||
|
@ -9100,7 +9100,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
|
|||
}
|
||||
}
|
||||
|
||||
if (GET_CODE (op0) != MEM)
|
||||
if (!MEM_P (op0))
|
||||
abort ();
|
||||
|
||||
mark_temp_addr_taken (op0);
|
||||
|
@ -9725,7 +9725,7 @@ expand_increment (tree exp, int post, int ignore)
|
|||
|
||||
return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
|
||||
}
|
||||
if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
|
||||
if (icode != (int) CODE_FOR_nothing && MEM_P (op0))
|
||||
{
|
||||
rtx addr = (general_operand (XEXP (op0, 0), mode)
|
||||
? force_reg (Pmode, XEXP (op0, 0))
|
||||
|
|
|
@ -2602,7 +2602,7 @@ cleanup_subreg_operands (rtx insn)
|
|||
recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
|
||||
else if (GET_CODE (recog_data.operand[i]) == PLUS
|
||||
|| GET_CODE (recog_data.operand[i]) == MULT
|
||||
|| GET_CODE (recog_data.operand[i]) == MEM)
|
||||
|| MEM_P (recog_data.operand[i]))
|
||||
recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i]);
|
||||
}
|
||||
|
||||
|
@ -2612,7 +2612,7 @@ cleanup_subreg_operands (rtx insn)
|
|||
*recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
|
||||
else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
|
||||
|| GET_CODE (*recog_data.dup_loc[i]) == MULT
|
||||
|| GET_CODE (*recog_data.dup_loc[i]) == MEM)
|
||||
|| MEM_P (*recog_data.dup_loc[i]))
|
||||
*recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i]);
|
||||
}
|
||||
}
|
||||
|
@ -2628,7 +2628,7 @@ alter_subreg (rtx *xp)
|
|||
|
||||
/* simplify_subreg does not remove subreg from volatile references.
|
||||
We are required to. */
|
||||
if (GET_CODE (y) == MEM)
|
||||
if (MEM_P (y))
|
||||
*xp = adjust_address (y, GET_MODE (x), SUBREG_BYTE (x));
|
||||
else
|
||||
{
|
||||
|
@ -2906,7 +2906,7 @@ get_mem_expr_from_op (rtx op, int *paddressp)
|
|||
|
||||
if (REG_P (op))
|
||||
return REG_EXPR (op);
|
||||
else if (GET_CODE (op) != MEM)
|
||||
else if (!MEM_P (op))
|
||||
return 0;
|
||||
|
||||
if (MEM_EXPR (op) != 0)
|
||||
|
|
16
gcc/flow.c
16
gcc/flow.c
|
@ -858,7 +858,7 @@ notice_stack_pointer_modification_1 (rtx x, rtx pat ATTRIBUTE_UNUSED,
|
|||
/* The stack pointer is only modified indirectly as the result
|
||||
of a push until later in flow. See the comments in rtl.texi
|
||||
regarding Embedded Side-Effects on Addresses. */
|
||||
|| (GET_CODE (x) == MEM
|
||||
|| (MEM_P (x)
|
||||
&& GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == RTX_AUTOINC
|
||||
&& XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
|
||||
current_function_sp_is_unchanging = 0;
|
||||
|
@ -1934,7 +1934,7 @@ init_propagate_block_info (basic_block bb, regset live, regset local_set,
|
|||
for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
|
||||
if (GET_CODE (insn) == INSN
|
||||
&& (set = single_set (insn))
|
||||
&& GET_CODE (SET_DEST (set)) == MEM)
|
||||
&& MEM_P (SET_DEST (set)))
|
||||
{
|
||||
rtx mem = SET_DEST (set);
|
||||
rtx canon_mem = canon_rtx (mem);
|
||||
|
@ -2104,7 +2104,7 @@ insn_dead_p (struct propagate_block_info *pbi, rtx x, int call_ok,
|
|||
else if (volatile_refs_p (SET_SRC (x)))
|
||||
return 0;
|
||||
|
||||
if (GET_CODE (r) == MEM)
|
||||
if (MEM_P (r))
|
||||
{
|
||||
rtx temp, canon_r;
|
||||
|
||||
|
@ -2540,7 +2540,7 @@ mark_set_1 (struct propagate_block_info *pbi, enum rtx_code code, rtx reg, rtx c
|
|||
|| GET_CODE (reg) == ZERO_EXTRACT
|
||||
|| GET_CODE (reg) == SIGN_EXTRACT
|
||||
|| GET_CODE (reg) == STRICT_LOW_PART);
|
||||
if (GET_CODE (reg) == MEM)
|
||||
if (MEM_P (reg))
|
||||
break;
|
||||
not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
|
||||
/* Fall through. */
|
||||
|
@ -2611,10 +2611,10 @@ mark_set_1 (struct propagate_block_info *pbi, enum rtx_code code, rtx reg, rtx c
|
|||
/* If the memory reference had embedded side effects (autoincrement
|
||||
address modes. Then we may need to kill some entries on the
|
||||
memory set list. */
|
||||
if (insn && GET_CODE (reg) == MEM)
|
||||
if (insn && MEM_P (reg))
|
||||
for_each_rtx (&PATTERN (insn), invalidate_mems_from_autoinc, pbi);
|
||||
|
||||
if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
|
||||
if (MEM_P (reg) && ! side_effects_p (reg)
|
||||
/* ??? With more effort we could track conditional memory life. */
|
||||
&& ! cond)
|
||||
add_to_mem_set_list (pbi, canon_rtx (reg));
|
||||
|
@ -3730,7 +3730,7 @@ mark_used_regs (struct propagate_block_info *pbi, rtx x, rtx cond, rtx insn)
|
|||
case CLOBBER:
|
||||
/* If we are clobbering a MEM, mark any registers inside the address
|
||||
as being used. */
|
||||
if (GET_CODE (XEXP (x, 0)) == MEM)
|
||||
if (MEM_P (XEXP (x, 0)))
|
||||
mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
|
||||
return;
|
||||
|
||||
|
@ -3811,7 +3811,7 @@ mark_used_regs (struct propagate_block_info *pbi, rtx x, rtx cond, rtx insn)
|
|||
|
||||
/* If storing into MEM, don't show it as being used. But do
|
||||
show the address as being used. */
|
||||
if (GET_CODE (testreg) == MEM)
|
||||
if (MEM_P (testreg))
|
||||
{
|
||||
#ifdef AUTO_INC_DEC
|
||||
if (flags & PROP_AUTOINC)
|
||||
|
|
|
@ -1134,7 +1134,7 @@ mark_temp_addr_taken (rtx x)
|
|||
|
||||
/* If X is not in memory or is at a constant address, it cannot be in
|
||||
a temporary slot. */
|
||||
if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
|
||||
if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
|
||||
return;
|
||||
|
||||
p = find_temp_slot_from_address (XEXP (x, 0));
|
||||
|
@ -1181,7 +1181,7 @@ preserve_temp_slots (rtx x)
|
|||
/* If X is not in memory or is at a constant address, it cannot be in
|
||||
a temporary slot, but it can contain something whose address was
|
||||
taken. */
|
||||
if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
|
||||
if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
|
||||
{
|
||||
for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
|
||||
{
|
||||
|
@ -1242,7 +1242,7 @@ preserve_rtl_expr_result (rtx x)
|
|||
|
||||
/* If X is not in memory or is at a constant address, it cannot be in
|
||||
a temporary slot. */
|
||||
if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
|
||||
if (x == 0 || !MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
|
||||
return;
|
||||
|
||||
/* If we can find a match, move it to our level unless it is already at
|
||||
|
@ -1400,7 +1400,7 @@ put_var_into_stack (tree decl, int rescan)
|
|||
reference, with a pseudo to address it, put that pseudo into the stack
|
||||
if the var is non-local. */
|
||||
if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
|
||||
&& GET_CODE (reg) == MEM
|
||||
&& MEM_P (reg)
|
||||
&& REG_P (XEXP (reg, 0))
|
||||
&& REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
|
||||
{
|
||||
|
@ -1425,7 +1425,7 @@ put_var_into_stack (tree decl, int rescan)
|
|||
/* If we can't use ADDRESSOF, make sure we see through one we already
|
||||
generated. */
|
||||
if (! can_use_addressof_p
|
||||
&& GET_CODE (reg) == MEM
|
||||
&& MEM_P (reg)
|
||||
&& GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
|
||||
reg = XEXP (XEXP (reg, 0), 0);
|
||||
|
||||
|
@ -1442,7 +1442,7 @@ put_var_into_stack (tree decl, int rescan)
|
|||
/* If this was previously a MEM but we've removed the ADDRESSOF,
|
||||
set this address into that MEM so we always use the same
|
||||
rtx for this variable. */
|
||||
if (orig_reg != reg && GET_CODE (orig_reg) == MEM)
|
||||
if (orig_reg != reg && MEM_P (orig_reg))
|
||||
XEXP (orig_reg, 0) = XEXP (reg, 0);
|
||||
}
|
||||
else if (GET_CODE (reg) == CONCAT)
|
||||
|
@ -2157,7 +2157,7 @@ fixup_var_refs_1 (rtx var, enum machine_mode promoted_mode, rtx *loc, rtx insn,
|
|||
means that the insn may have become invalid again. We can't
|
||||
in this case make a new replacement since we already have one
|
||||
and we must deal with MATCH_DUPs. */
|
||||
if (GET_CODE (replacement->new) == MEM)
|
||||
if (MEM_P (replacement->new))
|
||||
{
|
||||
INSN_CODE (insn) = -1;
|
||||
if (recog_memoized (insn) >= 0)
|
||||
|
@ -2702,13 +2702,13 @@ optimize_bit_field (rtx body, rtx insn, rtx *equiv_mem)
|
|||
/* Now check that the containing word is memory, not a register,
|
||||
and that it is safe to change the machine mode. */
|
||||
|
||||
if (GET_CODE (XEXP (bitfield, 0)) == MEM)
|
||||
if (MEM_P (XEXP (bitfield, 0)))
|
||||
memref = XEXP (bitfield, 0);
|
||||
else if (REG_P (XEXP (bitfield, 0))
|
||||
&& equiv_mem != 0)
|
||||
memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
|
||||
else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
|
||||
&& GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
|
||||
&& MEM_P (SUBREG_REG (XEXP (bitfield, 0))))
|
||||
memref = SUBREG_REG (XEXP (bitfield, 0));
|
||||
else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
|
||||
&& equiv_mem != 0
|
||||
|
@ -2947,7 +2947,7 @@ flush_addressof (tree decl)
|
|||
{
|
||||
if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
|
||||
&& DECL_RTL (decl) != 0
|
||||
&& GET_CODE (DECL_RTL (decl)) == MEM
|
||||
&& MEM_P (DECL_RTL (decl))
|
||||
&& GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
|
||||
&& REG_P (XEXP (XEXP (DECL_RTL (decl), 0), 0)))
|
||||
put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
|
||||
|
@ -3043,7 +3043,7 @@ purge_addressof_1 (rtx *loc, rtx insn, int force, int store, int may_postpone,
|
|||
{
|
||||
rtx sub, insns;
|
||||
|
||||
if (GET_CODE (XEXP (x, 0)) != MEM)
|
||||
if (!MEM_P (XEXP (x, 0)))
|
||||
put_addressof_into_stack (x, ht);
|
||||
|
||||
/* We must create a copy of the rtx because it was created by
|
||||
|
@ -3077,7 +3077,7 @@ purge_addressof_1 (rtx *loc, rtx insn, int force, int store, int may_postpone,
|
|||
{
|
||||
rtx sub = XEXP (XEXP (x, 0), 0);
|
||||
|
||||
if (GET_CODE (sub) == MEM)
|
||||
if (MEM_P (sub))
|
||||
sub = adjust_address_nv (sub, GET_MODE (x), 0);
|
||||
else if (REG_P (sub)
|
||||
&& (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
|
||||
|
@ -3706,7 +3706,7 @@ instantiate_decl (rtx x, HOST_WIDE_INT size, int valid_only)
|
|||
/* If this is not a MEM, no need to do anything. Similarly if the
|
||||
address is a constant or a register that is not a virtual register. */
|
||||
|
||||
if (x == 0 || GET_CODE (x) != MEM)
|
||||
if (x == 0 || !MEM_P (x))
|
||||
return;
|
||||
|
||||
addr = XEXP (x, 0);
|
||||
|
@ -4129,7 +4129,7 @@ instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
|
|||
go ahead and make the invalid one, but do it to a copy. For a REG,
|
||||
just make the recursive call, since there's no chance of a problem. */
|
||||
|
||||
if ((GET_CODE (XEXP (x, 0)) == MEM
|
||||
if ((MEM_P (XEXP (x, 0))
|
||||
&& instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
|
||||
0))
|
||||
|| (REG_P (XEXP (x, 0))
|
||||
|
@ -4169,7 +4169,7 @@ instantiate_virtual_regs_1 (rtx *loc, rtx object, int extra_insns)
|
|||
if (REG_P (XEXP (x, 0)))
|
||||
return 1;
|
||||
|
||||
else if (GET_CODE (XEXP (x, 0)) == MEM)
|
||||
else if (MEM_P (XEXP (x, 0)))
|
||||
{
|
||||
/* If we have a (addressof (mem ..)), do any instantiation inside
|
||||
since we know we'll be making the inside valid when we finally
|
||||
|
@ -5106,7 +5106,7 @@ assign_parms (tree fndecl)
|
|||
if (nominal_mode == passed_mode
|
||||
&& ! did_conversion
|
||||
&& stack_parm != 0
|
||||
&& GET_CODE (stack_parm) == MEM
|
||||
&& MEM_P (stack_parm)
|
||||
&& locate.offset.var == 0
|
||||
&& reg_mentioned_p (virtual_incoming_args_rtx,
|
||||
XEXP (stack_parm, 0)))
|
||||
|
@ -5254,7 +5254,7 @@ assign_parms (tree fndecl)
|
|||
/* Set MEM_EXPR to the original decl, i.e. to PARM,
|
||||
instead of the copy of decl, i.e. FNARGS. */
|
||||
if (DECL_INCOMING_RTL (parm)
|
||||
&& GET_CODE (DECL_INCOMING_RTL (parm)) == MEM)
|
||||
&& MEM_P (DECL_INCOMING_RTL (parm)))
|
||||
set_mem_expr (DECL_INCOMING_RTL (parm), parm);
|
||||
}
|
||||
fnargs = TREE_CHAIN (fnargs);
|
||||
|
@ -5737,7 +5737,7 @@ setjmp_protect (tree block)
|
|||
|| TREE_CODE (decl) == PARM_DECL)
|
||||
&& DECL_RTL (decl) != 0
|
||||
&& (REG_P (DECL_RTL (decl))
|
||||
|| (GET_CODE (DECL_RTL (decl)) == MEM
|
||||
|| (MEM_P (DECL_RTL (decl))
|
||||
&& GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
|
||||
/* If this variable came from an inline function, it must be
|
||||
that its life doesn't overlap the setjmp. If there was a
|
||||
|
@ -5770,7 +5770,7 @@ setjmp_protect_args (void)
|
|||
|| TREE_CODE (decl) == PARM_DECL)
|
||||
&& DECL_RTL (decl) != 0
|
||||
&& (REG_P (DECL_RTL (decl))
|
||||
|| (GET_CODE (DECL_RTL (decl)) == MEM
|
||||
|| (MEM_P (DECL_RTL (decl))
|
||||
&& GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
|
||||
&& (
|
||||
/* If longjmp doesn't restore the registers,
|
||||
|
@ -5802,7 +5802,7 @@ fix_lexical_addr (rtx addr, tree var)
|
|||
|
||||
fp = find_function_data (context);
|
||||
|
||||
if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
|
||||
if (GET_CODE (addr) == ADDRESSOF && MEM_P (XEXP (addr, 0)))
|
||||
addr = XEXP (XEXP (addr, 0), 0);
|
||||
|
||||
/* Decode given address as base reg plus displacement. */
|
||||
|
@ -7050,10 +7050,10 @@ keep_stack_depressed (rtx insns)
|
|||
insn = next;
|
||||
continue;
|
||||
}
|
||||
else if (GET_CODE (retaddr) == MEM
|
||||
else if (MEM_P (retaddr)
|
||||
&& REG_P (XEXP (retaddr, 0)))
|
||||
base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
|
||||
else if (GET_CODE (retaddr) == MEM
|
||||
else if (MEM_P (retaddr)
|
||||
&& GET_CODE (XEXP (retaddr, 0)) == PLUS
|
||||
&& REG_P (XEXP (XEXP (retaddr, 0), 0))
|
||||
&& GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
|
||||
|
|
10
gcc/ifcvt.c
10
gcc/ifcvt.c
|
@ -1188,7 +1188,7 @@ noce_try_cmove_arith (struct noce_if_info *if_info)
|
|||
early because it'll screw alias analysis. Note that we've
|
||||
already checked for no side effects. */
|
||||
if (! no_new_pseudos && cse_not_expected
|
||||
&& GET_CODE (a) == MEM && GET_CODE (b) == MEM
|
||||
&& MEM_P (a) && MEM_P (b)
|
||||
&& BRANCH_COST >= 5)
|
||||
{
|
||||
a = XEXP (a, 0);
|
||||
|
@ -1629,7 +1629,7 @@ noce_try_abs (struct noce_if_info *if_info)
|
|||
return FALSE;
|
||||
c = XEXP (note, 0);
|
||||
}
|
||||
if (GET_CODE (c) == MEM
|
||||
if (MEM_P (c)
|
||||
&& GET_CODE (XEXP (c, 0)) == SYMBOL_REF
|
||||
&& CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
|
||||
c = get_pool_constant (XEXP (c, 0));
|
||||
|
@ -1833,7 +1833,7 @@ noce_operand_ok (rtx op)
|
|||
{
|
||||
/* We special-case memories, so handle any of them with
|
||||
no address side effects. */
|
||||
if (GET_CODE (op) == MEM)
|
||||
if (MEM_P (op))
|
||||
return ! side_effects_p (XEXP (op, 0));
|
||||
|
||||
if (side_effects_p (op))
|
||||
|
@ -2022,7 +2022,7 @@ noce_process_if_block (struct ce_if_block * ce_info)
|
|||
for most optimizations if writing to x may trap, i.e. it's a memory
|
||||
other than a static var or a stack slot. */
|
||||
if (! set_b
|
||||
&& GET_CODE (orig_x) == MEM
|
||||
&& MEM_P (orig_x)
|
||||
&& ! MEM_NOTRAP_P (orig_x)
|
||||
&& rtx_addr_can_trap_p (XEXP (orig_x, 0)))
|
||||
{
|
||||
|
@ -2997,7 +2997,7 @@ find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
|
|||
static int
|
||||
find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
|
||||
{
|
||||
return GET_CODE (*px) == MEM;
|
||||
return MEM_P (*px);
|
||||
}
|
||||
|
||||
/* Used by the code above to perform the actual rtl transformations.
|
||||
|
|
|
@ -763,7 +763,7 @@ subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
|
|||
case CLOBBER:
|
||||
/* The only thing we can do with a USE or CLOBBER is possibly do
|
||||
some substitutions in a MEM within it. */
|
||||
if (GET_CODE (XEXP (x, 0)) == MEM)
|
||||
if (MEM_P (XEXP (x, 0)))
|
||||
subst_constants (&XEXP (XEXP (x, 0), 0), insn, map, 0);
|
||||
return;
|
||||
|
||||
|
@ -864,7 +864,7 @@ subst_constants (rtx *loc, rtx insn, struct inline_remap *map, int memonly)
|
|||
}
|
||||
|
||||
/* Do substitute in the address of a destination in memory. */
|
||||
if (GET_CODE (*dest_loc) == MEM)
|
||||
if (MEM_P (*dest_loc))
|
||||
subst_constants (&XEXP (*dest_loc, 0), insn, map, 0);
|
||||
|
||||
/* Check for the case of DEST a SUBREG, both it and the underlying
|
||||
|
@ -1320,7 +1320,7 @@ allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
|
|||
|
||||
if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
|
||||
; /* Do nothing. */
|
||||
else if (GET_CODE (x) == MEM)
|
||||
else if (MEM_P (x))
|
||||
reg_equiv_memory_loc[regno] = x;
|
||||
else if (REG_P (x))
|
||||
{
|
||||
|
|
|
@ -450,7 +450,7 @@ validate_equiv_mem_from_store (rtx dest, rtx set ATTRIBUTE_UNUSED,
|
|||
{
|
||||
if ((REG_P (dest)
|
||||
&& reg_overlap_mentioned_p (dest, equiv_mem))
|
||||
|| (GET_CODE (dest) == MEM
|
||||
|| (MEM_P (dest)
|
||||
&& true_dependence (dest, VOIDmode, equiv_mem, rtx_varies_p)))
|
||||
equiv_mem_modified = 1;
|
||||
}
|
||||
|
@ -711,7 +711,7 @@ memref_referenced_p (rtx memref, rtx x)
|
|||
case SET:
|
||||
/* If we are setting a MEM, it doesn't count (its address does), but any
|
||||
other SET_DEST that has a MEM in it is referencing the MEM. */
|
||||
if (GET_CODE (SET_DEST (x)) == MEM)
|
||||
if (MEM_P (SET_DEST (x)))
|
||||
{
|
||||
if (memref_referenced_p (memref, XEXP (SET_DEST (x), 0)))
|
||||
return 1;
|
||||
|
@ -882,7 +882,7 @@ update_equiv_regs (void)
|
|||
|| (regno = REGNO (dest)) < FIRST_PSEUDO_REGISTER
|
||||
|| reg_equiv[regno].init_insns == const0_rtx
|
||||
|| (CLASS_LIKELY_SPILLED_P (reg_preferred_class (regno))
|
||||
&& GET_CODE (src) == MEM))
|
||||
&& MEM_P (src)))
|
||||
{
|
||||
/* This might be setting a SUBREG of a pseudo, a pseudo that is
|
||||
also set somewhere else to a constant. */
|
||||
|
@ -940,7 +940,7 @@ update_equiv_regs (void)
|
|||
note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
|
||||
|
||||
if (note == 0 && REG_BASIC_BLOCK (regno) >= 0
|
||||
&& GET_CODE (SET_SRC (set)) == MEM
|
||||
&& MEM_P (SET_SRC (set))
|
||||
&& validate_equiv_mem (insn, dest, SET_SRC (set)))
|
||||
REG_NOTES (insn) = note = gen_rtx_EXPR_LIST (REG_EQUIV, SET_SRC (set),
|
||||
REG_NOTES (insn));
|
||||
|
|
12
gcc/loop.c
12
gcc/loop.c
|
@ -825,7 +825,7 @@ scan_loop (struct loop *loop, int flags)
|
|||
= gen_rtx_EXPR_LIST (VOIDmode, XEXP (x, 0),
|
||||
dependencies);
|
||||
else if (GET_CODE (x) == CLOBBER
|
||||
&& GET_CODE (XEXP (x, 0)) == MEM)
|
||||
&& MEM_P (XEXP (x, 0)))
|
||||
dependencies = find_regs_nested (dependencies,
|
||||
XEXP (XEXP (x, 0), 0));
|
||||
}
|
||||
|
@ -2593,7 +2593,7 @@ prescan_loop (struct loop *loop)
|
|||
rtx fusage = XEXP (fusage_entry, 0);
|
||||
|
||||
if (GET_CODE (fusage) == CLOBBER
|
||||
&& GET_CODE (XEXP (fusage, 0)) == MEM
|
||||
&& MEM_P (XEXP (fusage, 0))
|
||||
&& RTX_UNCHANGING_P (XEXP (fusage, 0)))
|
||||
{
|
||||
note_stores (fusage, note_addr_stored, loop_info);
|
||||
|
@ -3233,7 +3233,7 @@ note_addr_stored (rtx x, rtx y ATTRIBUTE_UNUSED,
|
|||
{
|
||||
struct loop_info *loop_info = data;
|
||||
|
||||
if (x == 0 || GET_CODE (x) != MEM)
|
||||
if (x == 0 || !MEM_P (x))
|
||||
return;
|
||||
|
||||
/* Count number of memory writes.
|
||||
|
@ -3657,7 +3657,7 @@ check_store (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
|
|||
{
|
||||
struct check_store_data *d = (struct check_store_data *) data;
|
||||
|
||||
if ((GET_CODE (x) == MEM) && rtx_equal_p (d->mem_address, XEXP (x, 0)))
|
||||
if ((MEM_P (x)) && rtx_equal_p (d->mem_address, XEXP (x, 0)))
|
||||
d->mem_write = 1;
|
||||
}
|
||||
|
||||
|
@ -8960,7 +8960,7 @@ maybe_eliminate_biv_1 (const struct loop *loop, rtx x, rtx insn,
|
|||
return 1;
|
||||
}
|
||||
}
|
||||
else if (REG_P (arg) || GET_CODE (arg) == MEM)
|
||||
else if (REG_P (arg) || MEM_P (arg))
|
||||
{
|
||||
if (loop_invariant_p (loop, arg) == 1)
|
||||
{
|
||||
|
@ -10288,7 +10288,7 @@ try_swap_copy_prop (const struct loop *loop, rtx replacement,
|
|||
static int
|
||||
find_mem_in_note_1 (rtx *x, void *data)
|
||||
{
|
||||
if (*x != NULL_RTX && GET_CODE (*x) == MEM)
|
||||
if (*x != NULL_RTX && MEM_P (*x))
|
||||
{
|
||||
rtx *res = (rtx *) data;
|
||||
*res = *x;
|
||||
|
|
|
@ -2985,7 +2985,7 @@ expand_abs (enum machine_mode mode, rtx op0, rtx target,
|
|||
op1 = gen_label_rtx ();
|
||||
if (target == 0 || ! safe
|
||||
|| GET_MODE (target) != mode
|
||||
|| (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
|
||||
|| (MEM_P (target) && MEM_VOLATILE_P (target))
|
||||
|| (REG_P (target)
|
||||
&& REGNO (target) < FIRST_PSEUDO_REGISTER))
|
||||
target = gen_reg_rtx (mode);
|
||||
|
@ -3241,7 +3241,7 @@ emit_unop_insn (int icode, rtx target, rtx op0, enum rtx_code code)
|
|||
op0 = copy_to_mode_reg (mode0, op0);
|
||||
|
||||
if (! (*insn_data[icode].operand[0].predicate) (temp, GET_MODE (temp))
|
||||
|| (flag_force_mem && GET_CODE (temp) == MEM))
|
||||
|| (flag_force_mem && MEM_P (temp)))
|
||||
temp = gen_reg_rtx (GET_MODE (temp));
|
||||
|
||||
pat = GEN_FCN (icode) (temp, op0);
|
||||
|
|
|
@ -440,7 +440,7 @@ rest_of_handle_final (void)
|
|||
different from the DECL_NAME name used in the source file. */
|
||||
|
||||
x = DECL_RTL (current_function_decl);
|
||||
if (GET_CODE (x) != MEM)
|
||||
if (!MEM_P (x))
|
||||
abort ();
|
||||
x = XEXP (x, 0);
|
||||
if (GET_CODE (x) != SYMBOL_REF)
|
||||
|
|
|
@ -232,7 +232,7 @@ reload_cse_simplify_set (rtx set, rtx insn)
|
|||
that combine made wrt the contents of sign bits. We'll do this by
|
||||
generating an extend instruction instead of a reg->reg copy. Thus
|
||||
the destination must be a register that we can widen. */
|
||||
if (GET_CODE (src) == MEM
|
||||
if (MEM_P (src)
|
||||
&& GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
|
||||
&& (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != NIL
|
||||
&& !REG_P (SET_DEST (set)))
|
||||
|
@ -244,7 +244,7 @@ reload_cse_simplify_set (rtx set, rtx insn)
|
|||
return 0;
|
||||
|
||||
/* If memory loads are cheaper than register copies, don't change them. */
|
||||
if (GET_CODE (src) == MEM)
|
||||
if (MEM_P (src))
|
||||
old_cost = MEMORY_MOVE_COST (GET_MODE (src), dclass, 1);
|
||||
else if (REG_P (src))
|
||||
old_cost = REGISTER_MOVE_COST (GET_MODE (src),
|
||||
|
@ -404,7 +404,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg)
|
|||
op = recog_data.operand[i];
|
||||
mode = GET_MODE (op);
|
||||
#ifdef LOAD_EXTEND_OP
|
||||
if (GET_CODE (op) == MEM
|
||||
if (MEM_P (op)
|
||||
&& GET_MODE_BITSIZE (mode) < BITS_PER_WORD
|
||||
&& LOAD_EXTEND_OP (mode) != NIL)
|
||||
{
|
||||
|
@ -418,7 +418,7 @@ reload_cse_simplify_operands (rtx insn, rtx testreg)
|
|||
extension applies.
|
||||
Also, if there is an explicit extension, we don't have to
|
||||
worry about an implicit one. */
|
||||
else if (GET_CODE (SET_DEST (set)) == MEM
|
||||
else if (MEM_P (SET_DEST (set))
|
||||
|| GET_CODE (SET_DEST (set)) == STRICT_LOW_PART
|
||||
|| GET_CODE (SET_SRC (set)) == ZERO_EXTEND
|
||||
|| GET_CODE (SET_SRC (set)) == SIGN_EXTEND)
|
||||
|
@ -1432,7 +1432,7 @@ move2add_note_store (rtx dst, rtx set, void *data ATTRIBUTE_UNUSED)
|
|||
|
||||
/* Some targets do argument pushes without adding REG_INC notes. */
|
||||
|
||||
if (GET_CODE (dst) == MEM)
|
||||
if (MEM_P (dst))
|
||||
{
|
||||
dst = XEXP (dst, 0);
|
||||
if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
|
||||
|
|
|
@ -2597,7 +2597,7 @@ detect_remat_webs (void)
|
|||
we created them ourself. They might not have set their
|
||||
unchanging flag set, but nevertheless they are stable across
|
||||
the livetime in question. */
|
||||
|| (GET_CODE (src) == MEM
|
||||
|| (MEM_P (src)
|
||||
&& INSN_UID (insn) >= orig_max_uid
|
||||
&& memref_is_stack_slot (src)))
|
||||
/* And we must be able to construct an insn without
|
||||
|
|
|
@ -955,10 +955,10 @@ dump_static_insn_cost (FILE *file, const char *message, const char *prefix)
|
|||
src = SUBREG_REG (src);
|
||||
if (GET_CODE (dest) == SUBREG)
|
||||
dest = SUBREG_REG (dest);
|
||||
if (GET_CODE (src) == MEM && GET_CODE (dest) != MEM
|
||||
if (MEM_P (src) && !MEM_P (dest)
|
||||
&& memref_is_stack_slot (src))
|
||||
pcost = &load;
|
||||
else if (GET_CODE (src) != MEM && GET_CODE (dest) == MEM
|
||||
else if (!MEM_P (src) && MEM_P (dest)
|
||||
&& memref_is_stack_slot (dest))
|
||||
pcost = &store;
|
||||
}
|
||||
|
|
|
@ -570,7 +570,7 @@ slots_overlap_p (rtx s1, rtx s2)
|
|||
return 0;
|
||||
return 1;
|
||||
}
|
||||
if (GET_CODE (s1) != MEM || GET_CODE (s2) != MEM)
|
||||
if (!MEM_P (s1) || GET_CODE (s2) != MEM)
|
||||
abort ();
|
||||
s1 = XEXP (s1, 0);
|
||||
s2 = XEXP (s2, 0);
|
||||
|
@ -722,7 +722,7 @@ insert_stores (bitmap new_deaths)
|
|||
slots = NULL;
|
||||
else
|
||||
{
|
||||
if (1 || GET_CODE (SET_SRC (set)) == MEM)
|
||||
if (1 || MEM_P (SET_SRC (set)))
|
||||
delete_overlapping_slots (&slots, SET_SRC (set));
|
||||
}
|
||||
}
|
||||
|
|
54
gcc/recog.c
54
gcc/recog.c
|
@ -233,7 +233,7 @@ validate_change (rtx object, rtx *loc, rtx new, int in_group)
|
|||
changes[num_changes].loc = loc;
|
||||
changes[num_changes].old = old;
|
||||
|
||||
if (object && GET_CODE (object) != MEM)
|
||||
if (object && !MEM_P (object))
|
||||
{
|
||||
/* Set INSN_CODE to force rerecognition of insn. Save old code in
|
||||
case invalid. */
|
||||
|
@ -338,7 +338,7 @@ apply_change_group (void)
|
|||
if (object == 0 || object == last_validated)
|
||||
continue;
|
||||
|
||||
if (GET_CODE (object) == MEM)
|
||||
if (MEM_P (object))
|
||||
{
|
||||
if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
|
||||
break;
|
||||
|
@ -433,7 +433,7 @@ cancel_changes (int num)
|
|||
for (i = num_changes - 1; i >= num; i--)
|
||||
{
|
||||
*changes[i].loc = changes[i].old;
|
||||
if (changes[i].object && GET_CODE (changes[i].object) != MEM)
|
||||
if (changes[i].object && !MEM_P (changes[i].object))
|
||||
INSN_CODE (changes[i].object) = changes[i].old_code;
|
||||
}
|
||||
num_changes = num;
|
||||
|
@ -586,7 +586,7 @@ validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
|
|||
likely to be an insertion operation; if it was, nothing bad will
|
||||
happen, we might just fail in some cases). */
|
||||
|
||||
if (GET_CODE (XEXP (x, 0)) == MEM
|
||||
if (MEM_P (XEXP (x, 0))
|
||||
&& GET_CODE (XEXP (x, 1)) == CONST_INT
|
||||
&& GET_CODE (XEXP (x, 2)) == CONST_INT
|
||||
&& !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
|
||||
|
@ -942,7 +942,7 @@ general_operand (rtx op, enum machine_mode mode)
|
|||
#ifdef INSN_SCHEDULING
|
||||
/* On machines that have insn scheduling, we want all memory
|
||||
reference to be explicit, so outlaw paradoxical SUBREGs. */
|
||||
if (GET_CODE (sub) == MEM
|
||||
if (MEM_P (sub)
|
||||
&& GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
|
||||
return 0;
|
||||
#endif
|
||||
|
@ -953,7 +953,7 @@ general_operand (rtx op, enum machine_mode mode)
|
|||
|
||||
??? This is a kludge. */
|
||||
if (!reload_completed && SUBREG_BYTE (op) != 0
|
||||
&& GET_CODE (sub) == MEM)
|
||||
&& MEM_P (sub))
|
||||
return 0;
|
||||
|
||||
/* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
|
||||
|
@ -1039,7 +1039,7 @@ register_operand (rtx op, enum machine_mode mode)
|
|||
(Ideally, (SUBREG (MEM)...) should not exist after reload,
|
||||
but currently it does result from (SUBREG (REG)...) where the
|
||||
reg went on the stack.) */
|
||||
if (! reload_completed && GET_CODE (sub) == MEM)
|
||||
if (! reload_completed && MEM_P (sub))
|
||||
return general_operand (op, mode);
|
||||
|
||||
#ifdef CANNOT_CHANGE_MODE_CLASS
|
||||
|
@ -1202,7 +1202,7 @@ nonmemory_operand (rtx op, enum machine_mode mode)
|
|||
(Ideally, (SUBREG (MEM)...) should not exist after reload,
|
||||
but currently it does result from (SUBREG (REG)...) where the
|
||||
reg went on the stack.) */
|
||||
if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
|
||||
if (! reload_completed && MEM_P (SUBREG_REG (op)))
|
||||
return general_operand (op, mode);
|
||||
op = SUBREG_REG (op);
|
||||
}
|
||||
|
@ -1229,7 +1229,7 @@ push_operand (rtx op, enum machine_mode mode)
|
|||
rounded_size = PUSH_ROUNDING (rounded_size);
|
||||
#endif
|
||||
|
||||
if (GET_CODE (op) != MEM)
|
||||
if (!MEM_P (op))
|
||||
return 0;
|
||||
|
||||
if (mode != VOIDmode && GET_MODE (op) != mode)
|
||||
|
@ -1269,7 +1269,7 @@ push_operand (rtx op, enum machine_mode mode)
|
|||
int
|
||||
pop_operand (rtx op, enum machine_mode mode)
|
||||
{
|
||||
if (GET_CODE (op) != MEM)
|
||||
if (!MEM_P (op))
|
||||
return 0;
|
||||
|
||||
if (mode != VOIDmode && GET_MODE (op) != mode)
|
||||
|
@ -1312,7 +1312,7 @@ memory_operand (rtx op, enum machine_mode mode)
|
|||
if (! reload_completed)
|
||||
/* Note that no SUBREG is a memory operand before end of reload pass,
|
||||
because (SUBREG (MEM...)) forces reloading into a register. */
|
||||
return GET_CODE (op) == MEM && general_operand (op, mode);
|
||||
return MEM_P (op) && general_operand (op, mode);
|
||||
|
||||
if (mode != VOIDmode && GET_MODE (op) != mode)
|
||||
return 0;
|
||||
|
@ -1321,7 +1321,7 @@ memory_operand (rtx op, enum machine_mode mode)
|
|||
if (GET_CODE (inner) == SUBREG)
|
||||
inner = SUBREG_REG (inner);
|
||||
|
||||
return (GET_CODE (inner) == MEM && general_operand (op, mode));
|
||||
return (MEM_P (inner) && general_operand (op, mode));
|
||||
}
|
||||
|
||||
/* Return 1 if OP is a valid indirect memory reference with mode MODE;
|
||||
|
@ -1332,7 +1332,7 @@ indirect_operand (rtx op, enum machine_mode mode)
|
|||
{
|
||||
/* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
|
||||
if (! reload_completed
|
||||
&& GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
|
||||
&& GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
|
||||
{
|
||||
int offset = SUBREG_BYTE (op);
|
||||
rtx inner = SUBREG_REG (op);
|
||||
|
@ -1352,7 +1352,7 @@ indirect_operand (rtx op, enum machine_mode mode)
|
|||
&& general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
|
||||
}
|
||||
|
||||
return (GET_CODE (op) == MEM
|
||||
return (MEM_P (op)
|
||||
&& memory_operand (op, mode)
|
||||
&& general_operand (XEXP (op, 0), Pmode));
|
||||
}
|
||||
|
@ -1653,7 +1653,7 @@ asm_operand_ok (rtx op, const char *constraint)
|
|||
|
||||
Match any memory and hope things are resolved after reload. */
|
||||
|
||||
if (GET_CODE (op) == MEM
|
||||
if (MEM_P (op)
|
||||
&& (1
|
||||
|| GET_CODE (XEXP (op, 0)) == PRE_DEC
|
||||
|| GET_CODE (XEXP (op, 0)) == POST_DEC))
|
||||
|
@ -1661,7 +1661,7 @@ asm_operand_ok (rtx op, const char *constraint)
|
|||
break;
|
||||
|
||||
case '>':
|
||||
if (GET_CODE (op) == MEM
|
||||
if (MEM_P (op)
|
||||
&& (1
|
||||
|| GET_CODE (XEXP (op, 0)) == PRE_INC
|
||||
|| GET_CODE (XEXP (op, 0)) == POST_INC))
|
||||
|
@ -1856,7 +1856,7 @@ find_constant_term_loc (rtx *p)
|
|||
int
|
||||
offsettable_memref_p (rtx op)
|
||||
{
|
||||
return ((GET_CODE (op) == MEM)
|
||||
return ((MEM_P (op))
|
||||
&& offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
|
||||
}
|
||||
|
||||
|
@ -1866,7 +1866,7 @@ offsettable_memref_p (rtx op)
|
|||
int
|
||||
offsettable_nonstrict_memref_p (rtx op)
|
||||
{
|
||||
return ((GET_CODE (op) == MEM)
|
||||
return ((MEM_P (op))
|
||||
&& offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
|
||||
}
|
||||
|
||||
|
@ -2432,7 +2432,7 @@ constrain_operands (int strict)
|
|||
case 'm':
|
||||
/* Memory operands must be valid, to the extent
|
||||
required by STRICT. */
|
||||
if (GET_CODE (op) == MEM)
|
||||
if (MEM_P (op))
|
||||
{
|
||||
if (strict > 0
|
||||
&& !strict_memory_address_p (GET_MODE (op),
|
||||
|
@ -2453,14 +2453,14 @@ constrain_operands (int strict)
|
|||
break;
|
||||
|
||||
case '<':
|
||||
if (GET_CODE (op) == MEM
|
||||
if (MEM_P (op)
|
||||
&& (GET_CODE (XEXP (op, 0)) == PRE_DEC
|
||||
|| GET_CODE (XEXP (op, 0)) == POST_DEC))
|
||||
win = 1;
|
||||
break;
|
||||
|
||||
case '>':
|
||||
if (GET_CODE (op) == MEM
|
||||
if (MEM_P (op)
|
||||
&& (GET_CODE (XEXP (op, 0)) == PRE_INC
|
||||
|| GET_CODE (XEXP (op, 0)) == POST_INC))
|
||||
win = 1;
|
||||
|
@ -2512,10 +2512,10 @@ constrain_operands (int strict)
|
|||
break;
|
||||
|
||||
case 'V':
|
||||
if (GET_CODE (op) == MEM
|
||||
if (MEM_P (op)
|
||||
&& ((strict > 0 && ! offsettable_memref_p (op))
|
||||
|| (strict < 0
|
||||
&& !(CONSTANT_P (op) || GET_CODE (op) == MEM))
|
||||
&& !(CONSTANT_P (op) || MEM_P (op)))
|
||||
|| (reload_in_progress
|
||||
&& !(REG_P (op)
|
||||
&& REGNO (op) >= FIRST_PSEUDO_REGISTER))))
|
||||
|
@ -2527,7 +2527,7 @@ constrain_operands (int strict)
|
|||
|| (strict == 0 && offsettable_nonstrict_memref_p (op))
|
||||
/* Before reload, accept what reload can handle. */
|
||||
|| (strict < 0
|
||||
&& (CONSTANT_P (op) || GET_CODE (op) == MEM))
|
||||
&& (CONSTANT_P (op) || MEM_P (op)))
|
||||
/* During reload, accept a pseudo */
|
||||
|| (reload_in_progress && REG_P (op)
|
||||
&& REGNO (op) >= FIRST_PSEUDO_REGISTER))
|
||||
|
@ -2557,7 +2557,7 @@ constrain_operands (int strict)
|
|||
|
||||
else if (EXTRA_MEMORY_CONSTRAINT (c, p)
|
||||
/* Every memory operand can be reloaded to fit. */
|
||||
&& ((strict < 0 && GET_CODE (op) == MEM)
|
||||
&& ((strict < 0 && MEM_P (op))
|
||||
/* Before reload, accept what reload can turn
|
||||
into mem. */
|
||||
|| (strict < 0 && CONSTANT_P (op))
|
||||
|
@ -2598,7 +2598,7 @@ constrain_operands (int strict)
|
|||
if (earlyclobber[eopno]
|
||||
&& REG_P (recog_data.operand[eopno]))
|
||||
for (opno = 0; opno < recog_data.n_operands; opno++)
|
||||
if ((GET_CODE (recog_data.operand[opno]) == MEM
|
||||
if ((MEM_P (recog_data.operand[opno])
|
||||
|| recog_data.operand_type[opno] != OP_OUT)
|
||||
&& opno != eopno
|
||||
/* Ignore things like match_operator operands. */
|
||||
|
@ -3309,7 +3309,7 @@ store_data_bypass_p (rtx out_insn, rtx in_insn)
|
|||
if (! in_set)
|
||||
abort ();
|
||||
|
||||
if (GET_CODE (SET_DEST (in_set)) != MEM)
|
||||
if (!MEM_P (SET_DEST (in_set)))
|
||||
return false;
|
||||
|
||||
out_set = single_set (out_insn);
|
||||
|
|
|
@ -1530,7 +1530,7 @@ subst_stack_regs_pat (rtx insn, stack regstack, rtx pat)
|
|||
/* See if this is a `movM' pattern, and handle elsewhere if so. */
|
||||
if (STACK_REG_P (*src)
|
||||
|| (STACK_REG_P (*dest)
|
||||
&& (REG_P (*src) || GET_CODE (*src) == MEM
|
||||
&& (REG_P (*src) || MEM_P (*src)
|
||||
|| GET_CODE (*src) == CONST_DOUBLE)))
|
||||
{
|
||||
control_flow_insn_deleted |= move_for_stack_reg (insn, regstack, pat);
|
||||
|
|
|
@ -967,7 +967,7 @@ record_operand_costs (rtx insn, struct costs *op_costs,
|
|||
if (GET_CODE (recog_data.operand[i]) == SUBREG)
|
||||
recog_data.operand[i] = SUBREG_REG (recog_data.operand[i]);
|
||||
|
||||
if (GET_CODE (recog_data.operand[i]) == MEM)
|
||||
if (MEM_P (recog_data.operand[i]))
|
||||
record_address_regs (XEXP (recog_data.operand[i], 0),
|
||||
MODE_BASE_REG_CLASS (modes[i]), frequency * 2);
|
||||
else if (constraints[i][0] == 'p'
|
||||
|
@ -1038,10 +1038,10 @@ scan_one_insn (rtx insn, int pass)
|
|||
parameter is stored in memory. Record this fact. */
|
||||
|
||||
if (set != 0 && REG_P (SET_DEST (set))
|
||||
&& GET_CODE (SET_SRC (set)) == MEM
|
||||
&& MEM_P (SET_SRC (set))
|
||||
&& (note = find_reg_note (insn, REG_EQUIV,
|
||||
NULL_RTX)) != 0
|
||||
&& GET_CODE (XEXP (note, 0)) == MEM)
|
||||
&& MEM_P (XEXP (note, 0)))
|
||||
{
|
||||
costs[REGNO (SET_DEST (set))].mem_cost
|
||||
-= (MEMORY_MOVE_COST (GET_MODE (SET_DEST (set)),
|
||||
|
@ -1570,19 +1570,19 @@ record_reg_classes (int n_alts, int n_ops, rtx *ops,
|
|||
/* It doesn't seem worth distinguishing between offsettable
|
||||
and non-offsettable addresses here. */
|
||||
allows_mem[i] = 1;
|
||||
if (GET_CODE (op) == MEM)
|
||||
if (MEM_P (op))
|
||||
win = 1;
|
||||
break;
|
||||
|
||||
case '<':
|
||||
if (GET_CODE (op) == MEM
|
||||
if (MEM_P (op)
|
||||
&& (GET_CODE (XEXP (op, 0)) == PRE_DEC
|
||||
|| GET_CODE (XEXP (op, 0)) == POST_DEC))
|
||||
win = 1;
|
||||
break;
|
||||
|
||||
case '>':
|
||||
if (GET_CODE (op) == MEM
|
||||
if (MEM_P (op)
|
||||
&& (GET_CODE (XEXP (op, 0)) == PRE_INC
|
||||
|| GET_CODE (XEXP (op, 0)) == POST_INC))
|
||||
win = 1;
|
||||
|
@ -1643,7 +1643,7 @@ record_reg_classes (int n_alts, int n_ops, rtx *ops,
|
|||
break;
|
||||
|
||||
case 'g':
|
||||
if (GET_CODE (op) == MEM
|
||||
if (MEM_P (op)
|
||||
|| (CONSTANT_P (op)
|
||||
#ifdef LEGITIMATE_PIC_OPERAND_P
|
||||
&& (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
|
||||
|
@ -1669,7 +1669,7 @@ record_reg_classes (int n_alts, int n_ops, rtx *ops,
|
|||
{
|
||||
/* Every MEM can be reloaded to fit. */
|
||||
allows_mem[i] = 1;
|
||||
if (GET_CODE (op) == MEM)
|
||||
if (MEM_P (op))
|
||||
win = 1;
|
||||
}
|
||||
if (EXTRA_ADDRESS_CONSTRAINT (c, p))
|
||||
|
@ -1909,7 +1909,7 @@ copy_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
|
|||
cost to move between the register classes, and use 2 for everything
|
||||
else (constants). */
|
||||
|
||||
if (GET_CODE (x) == MEM || class == NO_REGS)
|
||||
if (MEM_P (x) || class == NO_REGS)
|
||||
return MEMORY_MOVE_COST (mode, class, to_p);
|
||||
|
||||
else if (REG_P (x))
|
||||
|
@ -2418,7 +2418,7 @@ reg_scan_mark_refs (rtx x, rtx insn, int note_flag, unsigned int min_regno)
|
|||
REG_N_SETS (REGNO (reg))++;
|
||||
REG_N_REFS (REGNO (reg))++;
|
||||
}
|
||||
else if (GET_CODE (reg) == MEM)
|
||||
else if (MEM_P (reg))
|
||||
reg_scan_mark_refs (XEXP (reg, 0), insn, note_flag, min_regno);
|
||||
}
|
||||
break;
|
||||
|
@ -2505,7 +2505,7 @@ reg_scan_mark_refs (rtx x, rtx insn, int note_flag, unsigned int min_regno)
|
|||
|
||||
if (!REG_ATTRS (dest) && REG_P (src))
|
||||
REG_ATTRS (dest) = REG_ATTRS (src);
|
||||
if (!REG_ATTRS (dest) && GET_CODE (src) == MEM)
|
||||
if (!REG_ATTRS (dest) && MEM_P (src))
|
||||
set_reg_attrs_from_mem (dest, src);
|
||||
}
|
||||
|
||||
|
|
|
@ -671,7 +671,7 @@ optimize_reg_copy_3 (rtx insn, rtx dest, rtx src)
|
|||
return;
|
||||
|
||||
if (! (set = single_set (p))
|
||||
|| GET_CODE (SET_SRC (set)) != MEM
|
||||
|| !MEM_P (SET_SRC (set))
|
||||
/* If there's a REG_EQUIV note, this must be an insn that loads an
|
||||
argument. Prefer keeping the note over doing this optimization. */
|
||||
|| find_reg_note (p, REG_EQUIV, NULL_RTX)
|
||||
|
@ -2134,7 +2134,7 @@ combine_stack_adjustments (void)
|
|||
static int
|
||||
stack_memref_p (rtx x)
|
||||
{
|
||||
if (GET_CODE (x) != MEM)
|
||||
if (!MEM_P (x))
|
||||
return 0;
|
||||
x = XEXP (x, 0);
|
||||
|
||||
|
@ -2411,7 +2411,7 @@ combine_stack_adjustments_for_block (basic_block bb)
|
|||
turn it into a direct store. Obviously we can't do this if
|
||||
there were any intervening uses of the stack pointer. */
|
||||
if (memlist == NULL
|
||||
&& GET_CODE (dest) == MEM
|
||||
&& MEM_P (dest)
|
||||
&& ((GET_CODE (XEXP (dest, 0)) == PRE_DEC
|
||||
&& (last_sp_adjust
|
||||
== (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest))))
|
||||
|
|
|
@ -1676,11 +1676,11 @@ copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
|
|||
= replace_oldest_value_reg (recog_data.operand_loc[i],
|
||||
recog_op_alt[i][alt].class,
|
||||
insn, vd);
|
||||
else if (GET_CODE (recog_data.operand[i]) == MEM)
|
||||
else if (MEM_P (recog_data.operand[i]))
|
||||
replaced = replace_oldest_value_mem (recog_data.operand[i],
|
||||
insn, vd);
|
||||
}
|
||||
else if (GET_CODE (recog_data.operand[i]) == MEM)
|
||||
else if (MEM_P (recog_data.operand[i]))
|
||||
replaced = replace_oldest_value_mem (recog_data.operand[i],
|
||||
insn, vd);
|
||||
|
||||
|
|
82
gcc/reload.c
82
gcc/reload.c
|
@ -862,7 +862,7 @@ can_reload_into (rtx in, int regno, enum machine_mode mode)
|
|||
that are already scheduled, which can become quite complicated.
|
||||
And since we've already handled address reloads for this MEM, it
|
||||
should always succeed anyway. */
|
||||
if (GET_CODE (in) == MEM)
|
||||
if (MEM_P (in))
|
||||
return 1;
|
||||
|
||||
/* If we can make a simple SET insn that does the job, everything should
|
||||
|
@ -961,7 +961,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
|
|||
|
||||
/* If we have a read-write operand with an address side-effect,
|
||||
change either IN or OUT so the side-effect happens only once. */
|
||||
if (in != 0 && out != 0 && GET_CODE (in) == MEM && rtx_equal_p (in, out))
|
||||
if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
|
||||
switch (GET_CODE (XEXP (in, 0)))
|
||||
{
|
||||
case POST_INC: case POST_DEC: case POST_MODIFY:
|
||||
|
@ -1016,7 +1016,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
|
|||
|| strict_low
|
||||
|| (((REG_P (SUBREG_REG (in))
|
||||
&& REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
|
||||
|| GET_CODE (SUBREG_REG (in)) == MEM)
|
||||
|| MEM_P (SUBREG_REG (in)))
|
||||
&& ((GET_MODE_SIZE (inmode)
|
||||
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
|
||||
#ifdef LOAD_EXTEND_OP
|
||||
|
@ -1068,7 +1068,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
|
|||
inloc = &SUBREG_REG (in);
|
||||
in = *inloc;
|
||||
#if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
|
||||
if (GET_CODE (in) == MEM)
|
||||
if (MEM_P (in))
|
||||
/* This is supposed to happen only for paradoxical subregs made by
|
||||
combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
|
||||
if (GET_MODE_SIZE (GET_MODE (in)) > GET_MODE_SIZE (inmode))
|
||||
|
@ -1125,7 +1125,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
|
|||
|| strict_low
|
||||
|| (((REG_P (SUBREG_REG (out))
|
||||
&& REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
|
||||
|| GET_CODE (SUBREG_REG (out)) == MEM)
|
||||
|| MEM_P (SUBREG_REG (out)))
|
||||
&& ((GET_MODE_SIZE (outmode)
|
||||
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
|
||||
#ifdef WORD_REGISTER_OPERATIONS
|
||||
|
@ -1166,7 +1166,7 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
|
|||
outloc = &SUBREG_REG (out);
|
||||
out = *outloc;
|
||||
#if ! defined (LOAD_EXTEND_OP) && ! defined (WORD_REGISTER_OPERATIONS)
|
||||
if (GET_CODE (out) == MEM
|
||||
if (MEM_P (out)
|
||||
&& GET_MODE_SIZE (GET_MODE (out)) > GET_MODE_SIZE (outmode))
|
||||
abort ();
|
||||
#endif
|
||||
|
@ -1201,8 +1201,8 @@ push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
|
|||
}
|
||||
|
||||
/* If IN appears in OUT, we can't share any input-only reload for IN. */
|
||||
if (in != 0 && out != 0 && GET_CODE (out) == MEM
|
||||
&& (REG_P (in) || GET_CODE (in) == MEM)
|
||||
if (in != 0 && out != 0 && MEM_P (out)
|
||||
&& (REG_P (in) || MEM_P (in))
|
||||
&& reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
|
||||
dont_share = 1;
|
||||
|
||||
|
@ -2268,7 +2268,7 @@ decompose (rtx x)
|
|||
|
||||
memset (&val, 0, sizeof (val));
|
||||
|
||||
if (GET_CODE (x) == MEM)
|
||||
if (MEM_P (x))
|
||||
{
|
||||
rtx base = NULL_RTX, offset = 0;
|
||||
rtx addr = XEXP (x, 0);
|
||||
|
@ -2407,10 +2407,10 @@ immune_p (rtx x, rtx y, struct decomposition ydata)
|
|||
if (ydata.safe)
|
||||
return 1;
|
||||
|
||||
if (GET_CODE (y) != MEM)
|
||||
if (!MEM_P (y))
|
||||
abort ();
|
||||
/* If Y is memory and X is not, Y can't affect X. */
|
||||
if (GET_CODE (x) != MEM)
|
||||
if (!MEM_P (x))
|
||||
return 1;
|
||||
|
||||
xdata = decompose (x);
|
||||
|
@ -2761,7 +2761,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
wider reload. */
|
||||
|
||||
if (replace
|
||||
&& GET_CODE (op) == MEM
|
||||
&& MEM_P (op)
|
||||
&& REG_P (reg)
|
||||
&& (GET_MODE_SIZE (GET_MODE (reg))
|
||||
>= GET_MODE_SIZE (GET_MODE (op))))
|
||||
|
@ -2955,7 +2955,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
??? When is it right at this stage to have a subreg
|
||||
of a mem that is _not_ to be handled specially? IMO
|
||||
those should have been reduced to just a mem. */
|
||||
|| ((GET_CODE (operand) == MEM
|
||||
|| ((MEM_P (operand)
|
||||
|| (REG_P (operand)
|
||||
&& REGNO (operand) >= FIRST_PSEUDO_REGISTER))
|
||||
#ifndef WORD_REGISTER_OPERATIONS
|
||||
|
@ -3066,7 +3066,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
offsettable address was expected, then we must reject
|
||||
this combination, because we can't reload it. */
|
||||
if (this_alternative_offmemok[m]
|
||||
&& GET_CODE (recog_data.operand[m]) == MEM
|
||||
&& MEM_P (recog_data.operand[m])
|
||||
&& this_alternative[m] == (int) NO_REGS
|
||||
&& ! this_alternative_win[m])
|
||||
bad = 1;
|
||||
|
@ -3125,7 +3125,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
case 'm':
|
||||
if (force_reload)
|
||||
break;
|
||||
if (GET_CODE (operand) == MEM
|
||||
if (MEM_P (operand)
|
||||
|| (REG_P (operand)
|
||||
&& REGNO (operand) >= FIRST_PSEUDO_REGISTER
|
||||
&& reg_renumber[REGNO (operand)] < 0))
|
||||
|
@ -3136,7 +3136,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
break;
|
||||
|
||||
case '<':
|
||||
if (GET_CODE (operand) == MEM
|
||||
if (MEM_P (operand)
|
||||
&& ! address_reloaded[i]
|
||||
&& (GET_CODE (XEXP (operand, 0)) == PRE_DEC
|
||||
|| GET_CODE (XEXP (operand, 0)) == POST_DEC))
|
||||
|
@ -3144,7 +3144,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
break;
|
||||
|
||||
case '>':
|
||||
if (GET_CODE (operand) == MEM
|
||||
if (MEM_P (operand)
|
||||
&& ! address_reloaded[i]
|
||||
&& (GET_CODE (XEXP (operand, 0)) == PRE_INC
|
||||
|| GET_CODE (XEXP (operand, 0)) == POST_INC))
|
||||
|
@ -3155,7 +3155,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
case 'V':
|
||||
if (force_reload)
|
||||
break;
|
||||
if (GET_CODE (operand) == MEM
|
||||
if (MEM_P (operand)
|
||||
&& ! (ind_levels ? offsettable_memref_p (operand)
|
||||
: offsettable_nonstrict_memref_p (operand))
|
||||
/* Certain mem addresses will become offsettable
|
||||
|
@ -3172,7 +3172,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
case 'o':
|
||||
if (force_reload)
|
||||
break;
|
||||
if ((GET_CODE (operand) == MEM
|
||||
if ((MEM_P (operand)
|
||||
/* If IND_LEVELS, find_reloads_address won't reload a
|
||||
pseudo that didn't get a hard reg, so we have to
|
||||
reject that case. */
|
||||
|
@ -3193,7 +3193,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
|| (reg_equiv_address[REGNO (operand)] != 0))))
|
||||
win = 1;
|
||||
if (CONST_POOL_OK_P (operand)
|
||||
|| GET_CODE (operand) == MEM)
|
||||
|| MEM_P (operand))
|
||||
badop = 0;
|
||||
constmemok = 1;
|
||||
offmemok = 1;
|
||||
|
@ -3295,7 +3295,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
win = 1;
|
||||
/* If the address was already reloaded,
|
||||
we win as well. */
|
||||
else if (GET_CODE (operand) == MEM
|
||||
else if (MEM_P (operand)
|
||||
&& address_reloaded[i])
|
||||
win = 1;
|
||||
/* Likewise if the address will be reloaded because
|
||||
|
@ -3313,7 +3313,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
constants via force_const_mem, and other
|
||||
MEMs by reloading the address like for 'o'. */
|
||||
if (CONST_POOL_OK_P (operand)
|
||||
|| GET_CODE (operand) == MEM)
|
||||
|| MEM_P (operand))
|
||||
badop = 0;
|
||||
constmemok = 1;
|
||||
offmemok = 1;
|
||||
|
@ -3513,7 +3513,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
|
||||
for (j = 0; j < noperands; j++)
|
||||
/* Is this an input operand or a memory ref? */
|
||||
if ((GET_CODE (recog_data.operand[j]) == MEM
|
||||
if ((MEM_P (recog_data.operand[j])
|
||||
|| modified[j] != RELOAD_WRITE)
|
||||
&& j != i
|
||||
/* Ignore things like match_operator operands. */
|
||||
|
@ -3801,7 +3801,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
so we don't bother with it. It may not be worth doing. */
|
||||
else if (goal_alternative_matched[i] == -1
|
||||
&& goal_alternative_offmemok[i]
|
||||
&& GET_CODE (recog_data.operand[i]) == MEM)
|
||||
&& MEM_P (recog_data.operand[i]))
|
||||
{
|
||||
operand_reloadnum[i]
|
||||
= push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
|
||||
|
@ -3907,7 +3907,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
|
||||
while (GET_CODE (operand) == SUBREG)
|
||||
operand = SUBREG_REG (operand);
|
||||
if ((GET_CODE (operand) == MEM
|
||||
if ((MEM_P (operand)
|
||||
|| (REG_P (operand)
|
||||
&& REGNO (operand) >= FIRST_PSEUDO_REGISTER))
|
||||
/* If this is only for an output, the optional reload would not
|
||||
|
@ -3948,7 +3948,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
we then need to emit a USE and/or a CLOBBER so that reload
|
||||
inheritance will do the right thing. */
|
||||
else if (replace
|
||||
&& (GET_CODE (operand) == MEM
|
||||
&& (MEM_P (operand)
|
||||
|| (REG_P (operand)
|
||||
&& REGNO (operand) >= FIRST_PSEUDO_REGISTER
|
||||
&& reg_renumber [REGNO (operand)] < 0)))
|
||||
|
@ -3984,7 +3984,7 @@ find_reloads (rtx insn, int replace, int ind_levels, int live_known,
|
|||
|
||||
while (GET_CODE (operand) == SUBREG)
|
||||
operand = SUBREG_REG (operand);
|
||||
if ((GET_CODE (operand) == MEM
|
||||
if ((MEM_P (operand)
|
||||
|| (REG_P (operand)
|
||||
&& REGNO (operand) >= FIRST_PSEUDO_REGISTER))
|
||||
&& ((enum reg_class) goal_alternative[goal_alternative_matches[i]]
|
||||
|
@ -4796,7 +4796,7 @@ find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
|
|||
frame and stack pointers is not its initial value. In that case the
|
||||
pseudo will have been replaced by a MEM referring to the
|
||||
stack pointer. */
|
||||
if (GET_CODE (ad) == MEM)
|
||||
if (MEM_P (ad))
|
||||
{
|
||||
/* First ensure that the address in this MEM is valid. Then, unless
|
||||
indirect addresses are valid, reload the MEM into a register. */
|
||||
|
@ -4822,7 +4822,7 @@ find_reloads_address (enum machine_mode mode, rtx *memrefloc, rtx ad,
|
|||
|
||||
if (ind_levels == 0
|
||||
|| (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
|
||||
|| GET_CODE (XEXP (tem, 0)) == MEM
|
||||
|| MEM_P (XEXP (tem, 0))
|
||||
|| ! (REG_P (XEXP (tem, 0))
|
||||
|| (GET_CODE (XEXP (tem, 0)) == PLUS
|
||||
&& REG_P (XEXP (XEXP (tem, 0), 0))
|
||||
|
@ -5524,7 +5524,7 @@ find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
|
|||
memory location, since this will make it harder to
|
||||
reuse address reloads, and increases register pressure.
|
||||
Also don't do this if we can probably update x directly. */
|
||||
rtx equiv = (GET_CODE (XEXP (x, 0)) == MEM
|
||||
rtx equiv = (MEM_P (XEXP (x, 0))
|
||||
? XEXP (x, 0)
|
||||
: reg_equiv_mem[regno]);
|
||||
int icode = (int) add_optab->handlers[(int) Pmode].insn_code;
|
||||
|
@ -5573,7 +5573,7 @@ find_reloads_address_1 (enum machine_mode mode, rtx x, int context,
|
|||
return value;
|
||||
}
|
||||
|
||||
else if (GET_CODE (XEXP (x, 0)) == MEM)
|
||||
else if (MEM_P (XEXP (x, 0)))
|
||||
{
|
||||
/* This is probably the result of a substitution, by eliminate_regs,
|
||||
of an equivalent address for a pseudo that was not allocated to a
|
||||
|
@ -6311,7 +6311,7 @@ reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
|
|||
abort ();
|
||||
}
|
||||
}
|
||||
else if (GET_CODE (x) == MEM)
|
||||
else if (MEM_P (x))
|
||||
return refers_to_mem_for_reload_p (in);
|
||||
else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
|
||||
|| GET_CODE (x) == CC0)
|
||||
|
@ -6323,7 +6323,7 @@ reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
|
|||
(plus (sp) (const_int 64)), since that can lead to incorrect reload
|
||||
allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
|
||||
into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
|
||||
while (GET_CODE (in) == MEM)
|
||||
while (MEM_P (in))
|
||||
in = XEXP (in, 0);
|
||||
if (REG_P (in))
|
||||
return 0;
|
||||
|
@ -6351,7 +6351,7 @@ refers_to_mem_for_reload_p (rtx x)
|
|||
const char *fmt;
|
||||
int i;
|
||||
|
||||
if (GET_CODE (x) == MEM)
|
||||
if (MEM_P (x))
|
||||
return 1;
|
||||
|
||||
if (REG_P (x))
|
||||
|
@ -6361,7 +6361,7 @@ refers_to_mem_for_reload_p (rtx x)
|
|||
fmt = GET_RTX_FORMAT (GET_CODE (x));
|
||||
for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
|
||||
if (fmt[i] == 'e'
|
||||
&& (GET_CODE (XEXP (x, i)) == MEM
|
||||
&& (MEM_P (XEXP (x, i))
|
||||
|| refers_to_mem_for_reload_p (XEXP (x, i))))
|
||||
return 1;
|
||||
|
||||
|
@ -6416,7 +6416,7 @@ find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other,
|
|||
regno = goalreg;
|
||||
else if (REG_P (goal))
|
||||
regno = REGNO (goal);
|
||||
else if (GET_CODE (goal) == MEM)
|
||||
else if (MEM_P (goal))
|
||||
{
|
||||
enum rtx_code code = GET_CODE (XEXP (goal, 0));
|
||||
if (MEM_VOLATILE_P (goal))
|
||||
|
@ -6719,10 +6719,10 @@ find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other,
|
|||
if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
|
||||
return 0;
|
||||
}
|
||||
else if (goal_mem && GET_CODE (dest) == MEM
|
||||
else if (goal_mem && MEM_P (dest)
|
||||
&& ! push_operand (dest, GET_MODE (dest)))
|
||||
return 0;
|
||||
else if (GET_CODE (dest) == MEM && regno >= FIRST_PSEUDO_REGISTER
|
||||
else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
|
||||
&& reg_equiv_memory_loc[regno] != 0)
|
||||
return 0;
|
||||
else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
|
||||
|
@ -6765,10 +6765,10 @@ find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other,
|
|||
if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
|
||||
return 0;
|
||||
}
|
||||
else if (goal_mem && GET_CODE (dest) == MEM
|
||||
else if (goal_mem && MEM_P (dest)
|
||||
&& ! push_operand (dest, GET_MODE (dest)))
|
||||
return 0;
|
||||
else if (GET_CODE (dest) == MEM && regno >= FIRST_PSEUDO_REGISTER
|
||||
else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
|
||||
&& reg_equiv_memory_loc[regno] != 0)
|
||||
return 0;
|
||||
else if (need_stable_sp
|
||||
|
@ -6808,7 +6808,7 @@ find_equiv_reg (rtx goal, rtx insn, enum reg_class class, int other,
|
|||
return 0;
|
||||
}
|
||||
|
||||
else if (goal_mem && GET_CODE (dest) == MEM
|
||||
else if (goal_mem && MEM_P (dest)
|
||||
&& ! push_operand (dest, GET_MODE (dest)))
|
||||
return 0;
|
||||
else if (need_stable_sp
|
||||
|
|
|
@ -793,7 +793,7 @@ reload (rtx first, int global)
|
|||
and the MEM is not SET_SRC, the equivalencing insn
|
||||
is one with the MEM as a SET_DEST and it occurs later.
|
||||
So don't mark this insn now. */
|
||||
if (GET_CODE (x) != MEM
|
||||
if (!MEM_P (x)
|
||||
|| rtx_equal_p (SET_SRC (set), x))
|
||||
reg_equiv_init[i]
|
||||
= gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]);
|
||||
|
@ -803,7 +803,7 @@ reload (rtx first, int global)
|
|||
|
||||
/* If this insn is setting a MEM from a register equivalent to it,
|
||||
this is the equivalencing insn. */
|
||||
else if (set && GET_CODE (SET_DEST (set)) == MEM
|
||||
else if (set && MEM_P (SET_DEST (set))
|
||||
&& REG_P (SET_SRC (set))
|
||||
&& reg_equiv_memory_loc[REGNO (SET_SRC (set))]
|
||||
&& rtx_equal_p (SET_DEST (set),
|
||||
|
@ -1171,7 +1171,7 @@ reload (rtx first, int global)
|
|||
&& (GET_MODE (insn) == QImode
|
||||
|| find_reg_note (insn, REG_EQUAL, NULL_RTX)))
|
||||
|| (GET_CODE (PATTERN (insn)) == CLOBBER
|
||||
&& (GET_CODE (XEXP (PATTERN (insn), 0)) != MEM
|
||||
&& (!MEM_P (XEXP (PATTERN (insn), 0))
|
||||
|| GET_MODE (XEXP (PATTERN (insn), 0)) != BLKmode
|
||||
|| (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0)) != SCRATCH
|
||||
&& XEXP (XEXP (PATTERN (insn), 0), 0)
|
||||
|
@ -2287,7 +2287,7 @@ eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
|
|||
eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
|
||||
removed after CSE. */
|
||||
new = eliminate_regs (XEXP (x, 0), 0, insn);
|
||||
if (GET_CODE (new) == MEM)
|
||||
if (MEM_P (new))
|
||||
return XEXP (new, 0);
|
||||
return x;
|
||||
|
||||
|
@ -2529,7 +2529,7 @@ eliminate_regs (rtx x, enum machine_mode mem_mode, rtx insn)
|
|||
int x_size = GET_MODE_SIZE (GET_MODE (x));
|
||||
int new_size = GET_MODE_SIZE (GET_MODE (new));
|
||||
|
||||
if (GET_CODE (new) == MEM
|
||||
if (MEM_P (new)
|
||||
&& ((x_size < new_size
|
||||
#ifdef WORD_REGISTER_OPERATIONS
|
||||
/* On these machines, combine can create rtl of the form
|
||||
|
@ -3128,7 +3128,7 @@ eliminate_regs_in_insn (rtx insn, int replace)
|
|||
insn, write a CLOBBER insn. */
|
||||
if (recog_data.operand_type[i] != OP_IN
|
||||
&& REG_P (orig_operand[i])
|
||||
&& GET_CODE (substed_operand[i]) == MEM
|
||||
&& MEM_P (substed_operand[i])
|
||||
&& replace)
|
||||
emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
|
||||
insn);
|
||||
|
@ -3189,9 +3189,9 @@ eliminate_regs_in_insn (rtx insn, int replace)
|
|||
the MEM in recog_data.operand to the one in the insn.
|
||||
If they are not equal, then rerecognize the insn. */
|
||||
|| (old_set != 0
|
||||
&& ((GET_CODE (SET_SRC (old_set)) == MEM
|
||||
&& ((MEM_P (SET_SRC (old_set))
|
||||
&& SET_SRC (old_set) != recog_data.operand[1])
|
||||
|| (GET_CODE (SET_DEST (old_set)) == MEM
|
||||
|| (MEM_P (SET_DEST (old_set))
|
||||
&& SET_DEST (old_set) != recog_data.operand[0])))
|
||||
/* If this was an add insn before, rerecognize. */
|
||||
|| GET_CODE (SET_SRC (old_set)) == PLUS))
|
||||
|
@ -3821,7 +3821,7 @@ reload_as_needed (int live_known)
|
|||
|
||||
if ((GET_CODE (PATTERN (insn)) == USE
|
||||
|| GET_CODE (PATTERN (insn)) == CLOBBER)
|
||||
&& GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
|
||||
&& MEM_P (XEXP (PATTERN (insn), 0)))
|
||||
XEXP (XEXP (PATTERN (insn), 0), 0)
|
||||
= eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
|
||||
GET_MODE (XEXP (PATTERN (insn), 0)),
|
||||
|
@ -5354,7 +5354,7 @@ choose_reload_regs (struct insn_chain *chain)
|
|||
if (rld[r].in != 0 && rld[r].reg_rtx != 0
|
||||
&& (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
|
||||
|| (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
|
||||
&& GET_CODE (rld[r].in) != MEM
|
||||
&& !MEM_P (rld[r].in)
|
||||
&& true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
|
||||
continue;
|
||||
|
||||
|
@ -5592,7 +5592,7 @@ choose_reload_regs (struct insn_chain *chain)
|
|||
&& (CONSTANT_P (rld[r].in)
|
||||
|| GET_CODE (rld[r].in) == PLUS
|
||||
|| REG_P (rld[r].in)
|
||||
|| GET_CODE (rld[r].in) == MEM)
|
||||
|| MEM_P (rld[r].in))
|
||||
&& (rld[r].nregs == max_group_size
|
||||
|| ! reg_classes_intersect_p (rld[r].class, group_class)))
|
||||
search_equiv = rld[r].in;
|
||||
|
@ -6205,7 +6205,7 @@ emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
|
|||
because we will use this equiv reg right away. */
|
||||
|
||||
if (oldequiv == 0 && optimize
|
||||
&& (GET_CODE (old) == MEM
|
||||
&& (MEM_P (old)
|
||||
|| (REG_P (old)
|
||||
&& REGNO (old) >= FIRST_PSEUDO_REGISTER
|
||||
&& reg_renumber[REGNO (old)] < 0)))
|
||||
|
@ -6837,7 +6837,7 @@ static void
|
|||
do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
|
||||
{
|
||||
rtx insn = chain->insn;
|
||||
rtx old = (rl->in && GET_CODE (rl->in) == MEM
|
||||
rtx old = (rl->in && MEM_P (rl->in)
|
||||
? rl->in_reg : rl->in);
|
||||
|
||||
if (old != 0
|
||||
|
@ -6852,8 +6852,8 @@ do_input_reload (struct insn_chain *chain, struct reload *rl, int j)
|
|||
e.g. inheriting a SImode output reload for
|
||||
(mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
|
||||
if (optimize && reload_inherited[j] && rl->in
|
||||
&& GET_CODE (rl->in) == MEM
|
||||
&& GET_CODE (rl->in_reg) == MEM
|
||||
&& MEM_P (rl->in)
|
||||
&& MEM_P (rl->in_reg)
|
||||
&& reload_spill_index[j] >= 0
|
||||
&& TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
|
||||
rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
|
||||
|
@ -7283,7 +7283,7 @@ emit_reload_insns (struct insn_chain *chain)
|
|||
it thinks only about the original insn. So invalidate it here. */
|
||||
if (i < 0 && rld[r].out != 0
|
||||
&& (REG_P (rld[r].out)
|
||||
|| (GET_CODE (rld[r].out) == MEM
|
||||
|| (MEM_P (rld[r].out)
|
||||
&& REG_P (rld[r].out_reg))))
|
||||
{
|
||||
rtx out = (REG_P (rld[r].out)
|
||||
|
@ -7426,11 +7426,11 @@ gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
|
|||
if (GET_CODE (in) == PLUS
|
||||
&& (REG_P (XEXP (in, 0))
|
||||
|| GET_CODE (XEXP (in, 0)) == SUBREG
|
||||
|| GET_CODE (XEXP (in, 0)) == MEM)
|
||||
|| MEM_P (XEXP (in, 0)))
|
||||
&& (REG_P (XEXP (in, 1))
|
||||
|| GET_CODE (XEXP (in, 1)) == SUBREG
|
||||
|| CONSTANT_P (XEXP (in, 1))
|
||||
|| GET_CODE (XEXP (in, 1)) == MEM))
|
||||
|| MEM_P (XEXP (in, 1))))
|
||||
{
|
||||
/* We need to compute the sum of a register or a MEM and another
|
||||
register, constant, or MEM, and put it into the reload
|
||||
|
@ -7497,7 +7497,7 @@ gen_reload (rtx out, rtx in, int opnum, enum reload_type type)
|
|||
|
||||
code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
|
||||
|
||||
if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
|
||||
if (CONSTANT_P (op1) || MEM_P (op1) || GET_CODE (op1) == SUBREG
|
||||
|| (REG_P (op1)
|
||||
&& REGNO (op1) >= FIRST_PSEUDO_REGISTER)
|
||||
|| (code != CODE_FOR_nothing
|
||||
|
@ -7627,7 +7627,7 @@ delete_output_reload (rtx insn, int j, int last_reload_reg)
|
|||
rtx reg2 = rld[k].in;
|
||||
if (! reg2)
|
||||
continue;
|
||||
if (GET_CODE (reg2) == MEM || reload_override_in[k])
|
||||
if (MEM_P (reg2) || reload_override_in[k])
|
||||
reg2 = rld[k].in_reg;
|
||||
#ifdef AUTO_INC_DEC
|
||||
if (rld[k].out && ! rld[k].out_reg)
|
||||
|
@ -7773,7 +7773,7 @@ delete_address_reloads (rtx dead_insn, rtx current_insn)
|
|||
if (set)
|
||||
{
|
||||
rtx dst = SET_DEST (set);
|
||||
if (GET_CODE (dst) == MEM)
|
||||
if (MEM_P (dst))
|
||||
delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
|
||||
}
|
||||
/* If we deleted the store from a reloaded post_{in,de}c expression,
|
||||
|
|
|
@ -315,7 +315,7 @@ mark_referenced_resources (rtx x, struct resources *res,
|
|||
mark_referenced_resources (x, res, 0);
|
||||
else if (GET_CODE (x) == SUBREG)
|
||||
x = SUBREG_REG (x);
|
||||
if (GET_CODE (x) == MEM)
|
||||
if (MEM_P (x))
|
||||
mark_referenced_resources (XEXP (x, 0), res, 0);
|
||||
return;
|
||||
|
||||
|
|
|
@ -534,7 +534,7 @@ get_jump_table_offset (rtx insn, rtx *earliest)
|
|||
}
|
||||
|
||||
/* If X isn't a MEM then this isn't a tablejump we understand. */
|
||||
if (GET_CODE (x) != MEM)
|
||||
if (!MEM_P (x))
|
||||
return NULL_RTX;
|
||||
|
||||
/* Strip off the MEM. */
|
||||
|
@ -688,7 +688,7 @@ count_occurrences (rtx x, rtx find, int count_dest)
|
|||
return 0;
|
||||
|
||||
case MEM:
|
||||
if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
|
||||
if (MEM_P (find) && rtx_equal_p (x, find))
|
||||
return 1;
|
||||
break;
|
||||
|
||||
|
@ -898,7 +898,7 @@ reg_referenced_p (rtx x, rtx body)
|
|||
return 0;
|
||||
|
||||
case CLOBBER:
|
||||
if (GET_CODE (XEXP (body, 0)) == MEM)
|
||||
if (MEM_P (XEXP (body, 0)))
|
||||
if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
|
||||
return 1;
|
||||
return 0;
|
||||
|
@ -968,7 +968,7 @@ reg_set_p (rtx reg, rtx insn)
|
|||
information holds all clobbered registers. */
|
||||
&& ((REG_P (reg)
|
||||
&& REGNO (reg) < FIRST_PSEUDO_REGISTER)
|
||||
|| GET_CODE (reg) == MEM
|
||||
|| MEM_P (reg)
|
||||
|| find_reg_fusage (insn, CLOBBER, reg)))))
|
||||
return 1;
|
||||
|
||||
|
@ -1186,7 +1186,7 @@ set_of_1 (rtx x, rtx pat, void *data1)
|
|||
{
|
||||
struct set_of_data *data = (struct set_of_data *) (data1);
|
||||
if (rtx_equal_p (x, data->pat)
|
||||
|| (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
|
||||
|| (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
|
||||
data->found = pat;
|
||||
}
|
||||
|
||||
|
@ -1299,7 +1299,7 @@ set_noop_p (rtx set)
|
|||
if (dst == pc_rtx && src == pc_rtx)
|
||||
return 1;
|
||||
|
||||
if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
|
||||
if (MEM_P (dst) && MEM_P (src))
|
||||
return rtx_equal_p (dst, src) && !side_effects_p (dst);
|
||||
|
||||
if (GET_CODE (dst) == SIGN_EXTRACT
|
||||
|
@ -1573,7 +1573,7 @@ reg_overlap_mentioned_p (rtx x, rtx in)
|
|||
const char *fmt;
|
||||
int i;
|
||||
|
||||
if (GET_CODE (in) == MEM)
|
||||
if (MEM_P (in))
|
||||
return 1;
|
||||
|
||||
fmt = GET_RTX_FORMAT (GET_CODE (in));
|
||||
|
@ -1708,7 +1708,7 @@ note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
|
|||
return;
|
||||
|
||||
case CLOBBER:
|
||||
if (GET_CODE (XEXP (body, 0)) == MEM)
|
||||
if (MEM_P (XEXP (body, 0)))
|
||||
(*fun) (&XEXP (XEXP (body, 0), 0), data);
|
||||
return;
|
||||
|
||||
|
@ -1729,7 +1729,7 @@ note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
|
|||
while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
|
||||
dest = XEXP (dest, 0);
|
||||
|
||||
if (GET_CODE (dest) == MEM)
|
||||
if (MEM_P (dest))
|
||||
(*fun) (&XEXP (dest, 0), data);
|
||||
}
|
||||
return;
|
||||
|
@ -2033,7 +2033,7 @@ pure_call_p (rtx insn)
|
|||
rtx u, m;
|
||||
|
||||
if (GET_CODE (u = XEXP (link, 0)) == USE
|
||||
&& GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
|
||||
&& MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
|
||||
&& GET_CODE (XEXP (m, 0)) == SCRATCH)
|
||||
return 1;
|
||||
}
|
||||
|
@ -2648,7 +2648,7 @@ replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
|
|||
if (replace_dest)
|
||||
SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
|
||||
|
||||
else if (GET_CODE (SET_DEST (x)) == MEM
|
||||
else if (MEM_P (SET_DEST (x))
|
||||
|| GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
|
||||
/* Even if we are not to replace destinations, replace register if it
|
||||
is CONTAINED in destination (destination is memory or
|
||||
|
@ -4266,7 +4266,7 @@ nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
|
|||
<< (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
|
||||
!= 0))
|
||||
: LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
|
||||
|| GET_CODE (SUBREG_REG (x)) != MEM)
|
||||
|| !MEM_P (SUBREG_REG (x)))
|
||||
#endif
|
||||
{
|
||||
/* On many CISC machines, accessing an object in a wider mode
|
||||
|
@ -4575,7 +4575,7 @@ num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
|
|||
if ((GET_MODE_SIZE (GET_MODE (x))
|
||||
> GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
|
||||
&& LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
|
||||
&& GET_CODE (SUBREG_REG (x)) == MEM)
|
||||
&& MEM_P (SUBREG_REG (x)))
|
||||
return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
|
||||
known_x, known_mode, known_ret);
|
||||
#endif
|
||||
|
|
|
@ -51,7 +51,7 @@ gen_lowpart_general (enum machine_mode mode, rtx x)
|
|||
abort ();
|
||||
return result;
|
||||
}
|
||||
else if (GET_CODE (x) == MEM)
|
||||
else if (MEM_P (x))
|
||||
{
|
||||
/* The only additional case we can do is MEM. */
|
||||
int offset = 0;
|
||||
|
|
|
@ -526,7 +526,7 @@ sched_analyze_1 (struct deps *deps, rtx x, rtx insn)
|
|||
if (!reload_completed && get_reg_known_equiv_p (regno))
|
||||
{
|
||||
rtx t = get_reg_known_value (regno);
|
||||
if (GET_CODE (t) == MEM)
|
||||
if (MEM_P (t))
|
||||
sched_analyze_2 (deps, XEXP (t, 0), insn);
|
||||
}
|
||||
|
||||
|
@ -536,7 +536,7 @@ sched_analyze_1 (struct deps *deps, rtx x, rtx insn)
|
|||
add_dependence_list (insn, deps->last_function_call, REG_DEP_ANTI);
|
||||
}
|
||||
}
|
||||
else if (GET_CODE (dest) == MEM)
|
||||
else if (MEM_P (dest))
|
||||
{
|
||||
/* Writing memory. */
|
||||
rtx t = dest;
|
||||
|
@ -664,7 +664,7 @@ sched_analyze_2 (struct deps *deps, rtx x, rtx insn)
|
|||
if (!reload_completed && get_reg_known_equiv_p (regno))
|
||||
{
|
||||
rtx t = get_reg_known_value (regno);
|
||||
if (GET_CODE (t) == MEM)
|
||||
if (MEM_P (t))
|
||||
sched_analyze_2 (deps, XEXP (t, 0), insn);
|
||||
}
|
||||
|
||||
|
|
28
gcc/sdbout.c
28
gcc/sdbout.c
|
@ -88,7 +88,7 @@ static GTY(()) bool sdbout_initialized;
|
|||
/* 1 if PARM is passed to this function in memory. */
|
||||
|
||||
#define PARM_PASSED_IN_MEMORY(PARM) \
|
||||
(GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
|
||||
(MEM_P (DECL_INCOMING_RTL (PARM)))
|
||||
|
||||
/* A C expression for the integer offset value of an automatic variable
|
||||
(C_AUTO) having address X (an RTX). */
|
||||
|
@ -732,7 +732,7 @@ sdbout_symbol (tree decl, int local)
|
|||
a DECL_INITIAL value of 0. */
|
||||
if (! DECL_INITIAL (decl))
|
||||
return;
|
||||
if (GET_CODE (DECL_RTL (decl)) != MEM
|
||||
if (!MEM_P (DECL_RTL (decl))
|
||||
|| GET_CODE (XEXP (DECL_RTL (decl), 0)) != SYMBOL_REF)
|
||||
return;
|
||||
PUT_SDB_DEF (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
|
||||
|
@ -811,7 +811,7 @@ sdbout_symbol (tree decl, int local)
|
|||
/* Don't output anything if an auto variable
|
||||
gets RTL that is static.
|
||||
GAS version 2.2 can't handle such output. */
|
||||
else if (GET_CODE (value) == MEM && CONSTANT_P (XEXP (value, 0))
|
||||
else if (MEM_P (value) && CONSTANT_P (XEXP (value, 0))
|
||||
&& ! TREE_STATIC (decl))
|
||||
return;
|
||||
|
||||
|
@ -830,7 +830,7 @@ sdbout_symbol (tree decl, int local)
|
|||
|
||||
/* Defer SDB information for top-level initialized variables! */
|
||||
if (! local
|
||||
&& GET_CODE (value) == MEM
|
||||
&& MEM_P (value)
|
||||
&& DECL_INITIAL (decl))
|
||||
return;
|
||||
|
||||
|
@ -845,7 +845,7 @@ sdbout_symbol (tree decl, int local)
|
|||
else
|
||||
name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
|
||||
|
||||
if (GET_CODE (value) == MEM
|
||||
if (MEM_P (value)
|
||||
&& GET_CODE (XEXP (value, 0)) == SYMBOL_REF)
|
||||
{
|
||||
PUT_SDB_DEF (name);
|
||||
|
@ -866,8 +866,8 @@ sdbout_symbol (tree decl, int local)
|
|||
PUT_SDB_INT_VAL (DBX_REGISTER_NUMBER (regno));
|
||||
PUT_SDB_SCL (C_REG);
|
||||
}
|
||||
else if (GET_CODE (value) == MEM
|
||||
&& (GET_CODE (XEXP (value, 0)) == MEM
|
||||
else if (MEM_P (value)
|
||||
&& (MEM_P (XEXP (value, 0))
|
||||
|| (REG_P (XEXP (value, 0))
|
||||
&& REGNO (XEXP (value, 0)) != HARD_FRAME_POINTER_REGNUM
|
||||
&& REGNO (XEXP (value, 0)) != STACK_POINTER_REGNUM)))
|
||||
|
@ -901,7 +901,7 @@ sdbout_symbol (tree decl, int local)
|
|||
type = make_node (POINTER_TYPE);
|
||||
TREE_TYPE (type) = TREE_TYPE (decl);
|
||||
}
|
||||
else if (GET_CODE (value) == MEM
|
||||
else if (MEM_P (value)
|
||||
&& ((GET_CODE (XEXP (value, 0)) == PLUS
|
||||
&& REG_P (XEXP (XEXP (value, 0), 0))
|
||||
&& GET_CODE (XEXP (XEXP (value, 0), 1)) == CONST_INT)
|
||||
|
@ -943,7 +943,7 @@ sdbout_toplevel_data (tree decl)
|
|||
return;
|
||||
|
||||
if (! (TREE_CODE (decl) == VAR_DECL
|
||||
&& GET_CODE (DECL_RTL (decl)) == MEM
|
||||
&& MEM_P (DECL_RTL (decl))
|
||||
&& DECL_INITIAL (decl)))
|
||||
abort ();
|
||||
|
||||
|
@ -1310,7 +1310,7 @@ sdbout_parms (tree parms)
|
|||
(GET_MODE_SIZE (TYPE_MODE (DECL_ARG_TYPE (parms)))
|
||||
- GET_MODE_SIZE (GET_MODE (DECL_RTL (parms))));
|
||||
|
||||
if (GET_CODE (DECL_RTL (parms)) == MEM
|
||||
if (MEM_P (DECL_RTL (parms))
|
||||
&& GET_CODE (XEXP (DECL_RTL (parms), 0)) == PLUS
|
||||
&& (GET_CODE (XEXP (XEXP (DECL_RTL (parms), 0), 1))
|
||||
== CONST_INT)
|
||||
|
@ -1352,7 +1352,7 @@ sdbout_parms (tree parms)
|
|||
PUT_SDB_TYPE (plain_type (TREE_TYPE (parms)));
|
||||
PUT_SDB_ENDEF;
|
||||
}
|
||||
else if (GET_CODE (DECL_RTL (parms)) == MEM
|
||||
else if (MEM_P (DECL_RTL (parms))
|
||||
&& XEXP (DECL_RTL (parms), 0) != const0_rtx)
|
||||
{
|
||||
/* Parm was passed in registers but lives on the stack. */
|
||||
|
@ -1362,7 +1362,7 @@ sdbout_parms (tree parms)
|
|||
or (MEM (REG ...)) or (MEM (MEM ...)),
|
||||
in which case we use a value of zero. */
|
||||
if (REG_P (XEXP (DECL_RTL (parms), 0))
|
||||
|| GET_CODE (XEXP (DECL_RTL (parms), 0)) == MEM)
|
||||
|| MEM_P (XEXP (DECL_RTL (parms), 0)))
|
||||
current_sym_value = 0;
|
||||
else
|
||||
current_sym_value = INTVAL (XEXP (XEXP (DECL_RTL (parms), 0), 1));
|
||||
|
@ -1412,7 +1412,7 @@ sdbout_reg_parms (tree parms)
|
|||
PUT_SDB_ENDEF;
|
||||
}
|
||||
/* Report parms that live in memory but not where they were passed. */
|
||||
else if (GET_CODE (DECL_RTL (parms)) == MEM
|
||||
else if (MEM_P (DECL_RTL (parms))
|
||||
&& GET_CODE (XEXP (DECL_RTL (parms), 0)) == PLUS
|
||||
&& GET_CODE (XEXP (XEXP (DECL_RTL (parms), 0), 1)) == CONST_INT
|
||||
&& PARM_PASSED_IN_MEMORY (parms)
|
||||
|
@ -1464,7 +1464,7 @@ sdbout_global_decl (tree decl)
|
|||
|
||||
/* Output COFF information for non-global file-scope initialized
|
||||
variables. */
|
||||
if (DECL_INITIAL (decl) && GET_CODE (DECL_RTL (decl)) == MEM)
|
||||
if (DECL_INITIAL (decl) && MEM_P (DECL_RTL (decl)))
|
||||
sdbout_toplevel_data (decl);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3719,7 +3719,7 @@ simplify_subreg (enum machine_mode outermode, rtx op,
|
|||
SUBREG with it. Don't do this if the MEM has a mode-dependent address
|
||||
or if we would be widening it. */
|
||||
|
||||
if (GET_CODE (op) == MEM
|
||||
if (MEM_P (op)
|
||||
&& ! mode_dependent_address_p (XEXP (op, 0))
|
||||
/* Allow splitting of volatile memory references in case we don't
|
||||
have instruction to move the whole thing. */
|
||||
|
|
18
gcc/stmt.c
18
gcc/stmt.c
|
@ -1513,12 +1513,12 @@ expand_asm_operands (tree string, tree outputs, tree inputs,
|
|||
|| is_inout)
|
||||
{
|
||||
op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
|
||||
if (GET_CODE (op) == MEM)
|
||||
if (MEM_P (op))
|
||||
op = validize_mem (op);
|
||||
|
||||
if (! allows_reg && GET_CODE (op) != MEM)
|
||||
if (! allows_reg && !MEM_P (op))
|
||||
error ("output number %d not directly addressable", i);
|
||||
if ((! allows_mem && GET_CODE (op) == MEM)
|
||||
if ((! allows_mem && MEM_P (op))
|
||||
|| GET_CODE (op) == CONCAT)
|
||||
{
|
||||
real_output_rtx[i] = protect_from_queue (op, 1);
|
||||
|
@ -1587,7 +1587,7 @@ expand_asm_operands (tree string, tree outputs, tree inputs,
|
|||
/* Never pass a CONCAT to an ASM. */
|
||||
if (GET_CODE (op) == CONCAT)
|
||||
op = force_reg (GET_MODE (op), op);
|
||||
else if (GET_CODE (op) == MEM)
|
||||
else if (MEM_P (op))
|
||||
op = validize_mem (op);
|
||||
|
||||
if (asm_operand_ok (op, constraint) <= 0)
|
||||
|
@ -1597,7 +1597,7 @@ expand_asm_operands (tree string, tree outputs, tree inputs,
|
|||
else if (!allows_mem)
|
||||
warning ("asm operand %d probably doesn't match constraints",
|
||||
i + noutputs);
|
||||
else if (GET_CODE (op) == MEM)
|
||||
else if (MEM_P (op))
|
||||
{
|
||||
/* We won't recognize either volatile memory or memory
|
||||
with a queued address as available a memory_operand
|
||||
|
@ -2108,7 +2108,7 @@ expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
|
|||
|
||||
/* If all we do is reference a volatile value in memory,
|
||||
copy it to a register to be sure it is actually touched. */
|
||||
if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
|
||||
if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
|
||||
{
|
||||
if (TYPE_MODE (type) == VOIDmode)
|
||||
;
|
||||
|
@ -3297,7 +3297,7 @@ expand_decl (tree decl)
|
|||
to the proper address. */
|
||||
if (DECL_RTL_SET_P (decl))
|
||||
{
|
||||
if (GET_CODE (DECL_RTL (decl)) != MEM
|
||||
if (!MEM_P (DECL_RTL (decl))
|
||||
|| !REG_P (XEXP (DECL_RTL (decl), 0)))
|
||||
abort ();
|
||||
oldaddr = XEXP (DECL_RTL (decl), 0);
|
||||
|
@ -3621,7 +3621,7 @@ expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
|
|||
|
||||
/* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
|
||||
instead create a new MEM rtx with the proper mode. */
|
||||
if (GET_CODE (x) == MEM)
|
||||
if (MEM_P (x))
|
||||
{
|
||||
if (mode == GET_MODE (x))
|
||||
SET_DECL_RTL (decl_elt, x);
|
||||
|
@ -4530,7 +4530,7 @@ expand_end_case_type (tree orig_index, tree orig_type)
|
|||
do_pending_stack_adjust ();
|
||||
|
||||
index = protect_from_queue (index, 0);
|
||||
if (GET_CODE (index) == MEM)
|
||||
if (MEM_P (index))
|
||||
index = copy_to_reg (index);
|
||||
if (GET_CODE (index) == CONST_INT
|
||||
|| TREE_CODE (index_expr) == INTEGER_CST)
|
||||
|
|
|
@ -1599,7 +1599,7 @@ calculate_giv_inc (rtx pattern, rtx src_insn, unsigned int regno)
|
|||
|
||||
/* Some ports store large constants in memory and add a REG_EQUAL
|
||||
note to the store insn. */
|
||||
else if (GET_CODE (increment) == MEM)
|
||||
else if (MEM_P (increment))
|
||||
{
|
||||
rtx note = find_reg_note (src_insn, REG_EQUAL, 0);
|
||||
if (note)
|
||||
|
|
|
@ -370,7 +370,7 @@ stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre,
|
|||
else
|
||||
*post -= INTVAL (XEXP (src, 1));
|
||||
}
|
||||
else if (GET_CODE (dest) == MEM)
|
||||
else if (MEM_P (dest))
|
||||
{
|
||||
/* (set (mem (pre_dec (reg sp))) (foo)) */
|
||||
src = XEXP (dest, 0);
|
||||
|
@ -474,7 +474,7 @@ bb_stack_adjust_offset (basic_block bb)
|
|||
offset += VTI (bb)->mos[i].u.adjust;
|
||||
else if (VTI (bb)->mos[i].type != MO_CALL)
|
||||
{
|
||||
if (GET_CODE (VTI (bb)->mos[i].u.loc) == MEM)
|
||||
if (MEM_P (VTI (bb)->mos[i].u.loc))
|
||||
{
|
||||
VTI (bb)->mos[i].u.loc
|
||||
= adjust_stack_reference (VTI (bb)->mos[i].u.loc, -offset);
|
||||
|
@ -1466,13 +1466,13 @@ track_expr_p (tree expr)
|
|||
extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv")));
|
||||
char **_dl_argv;
|
||||
*/
|
||||
if (GET_CODE (decl_rtl) == MEM
|
||||
if (MEM_P (decl_rtl)
|
||||
&& contains_symbol_ref (XEXP (decl_rtl, 0)))
|
||||
return 0;
|
||||
|
||||
/* If RTX is a memory it should not be very large (because it would be
|
||||
an array or struct). */
|
||||
if (GET_CODE (decl_rtl) == MEM)
|
||||
if (MEM_P (decl_rtl))
|
||||
{
|
||||
/* Do not track structures and arrays. */
|
||||
if (GET_MODE (decl_rtl) == BLKmode)
|
||||
|
@ -1501,7 +1501,7 @@ count_uses (rtx *loc, void *insn)
|
|||
#endif
|
||||
VTI (bb)->n_mos++;
|
||||
}
|
||||
else if (GET_CODE (*loc) == MEM
|
||||
else if (MEM_P (*loc)
|
||||
&& MEM_EXPR (*loc)
|
||||
&& track_expr_p (MEM_EXPR (*loc)))
|
||||
{
|
||||
|
@ -1544,7 +1544,7 @@ add_uses (rtx *loc, void *insn)
|
|||
mo->u.loc = *loc;
|
||||
mo->insn = (rtx) insn;
|
||||
}
|
||||
else if (GET_CODE (*loc) == MEM
|
||||
else if (MEM_P (*loc)
|
||||
&& MEM_EXPR (*loc)
|
||||
&& track_expr_p (MEM_EXPR (*loc)))
|
||||
{
|
||||
|
@ -1585,7 +1585,7 @@ add_stores (rtx loc, rtx expr, void *insn)
|
|||
mo->u.loc = loc;
|
||||
mo->insn = (rtx) insn;
|
||||
}
|
||||
else if (GET_CODE (loc) == MEM
|
||||
else if (MEM_P (loc)
|
||||
&& MEM_EXPR (loc)
|
||||
&& track_expr_p (MEM_EXPR (loc)))
|
||||
{
|
||||
|
@ -1631,7 +1631,7 @@ compute_bb_dataflow (basic_block bb)
|
|||
|
||||
if (REG_P (loc))
|
||||
var_reg_delete_and_set (out, loc);
|
||||
else if (GET_CODE (loc) == MEM)
|
||||
else if (MEM_P (loc))
|
||||
var_mem_delete_and_set (out, loc);
|
||||
}
|
||||
break;
|
||||
|
@ -1643,7 +1643,7 @@ compute_bb_dataflow (basic_block bb)
|
|||
|
||||
if (REG_P (loc))
|
||||
var_reg_delete (out, loc);
|
||||
else if (GET_CODE (loc) == MEM)
|
||||
else if (MEM_P (loc))
|
||||
var_mem_delete (out, loc);
|
||||
}
|
||||
break;
|
||||
|
@ -2472,7 +2472,7 @@ vt_get_decl_and_offset (rtx rtl, tree *declp, HOST_WIDE_INT *offsetp)
|
|||
return true;
|
||||
}
|
||||
}
|
||||
else if (GET_CODE (rtl) == MEM)
|
||||
else if (MEM_P (rtl))
|
||||
{
|
||||
if (MEM_ATTRS (rtl))
|
||||
{
|
||||
|
@ -2529,7 +2529,7 @@ vt_add_function_parameters (void)
|
|||
#endif
|
||||
|
||||
incoming = eliminate_regs (incoming, 0, NULL_RTX);
|
||||
if (!frame_pointer_needed && GET_CODE (incoming) == MEM)
|
||||
if (!frame_pointer_needed && MEM_P (incoming))
|
||||
incoming = adjust_stack_reference (incoming, -stack_adjust);
|
||||
out = &VTI (ENTRY_BLOCK_PTR)->out;
|
||||
|
||||
|
@ -2543,7 +2543,7 @@ vt_add_function_parameters (void)
|
|||
parm, offset, incoming);
|
||||
set_variable_part (out, incoming, parm, offset);
|
||||
}
|
||||
else if (GET_CODE (incoming) == MEM)
|
||||
else if (MEM_P (incoming))
|
||||
{
|
||||
set_variable_part (out, incoming, parm, offset);
|
||||
}
|
||||
|
|
12
gcc/varasm.c
12
gcc/varasm.c
|
@ -896,7 +896,7 @@ make_decl_rtl (tree decl, const char *asmspec)
|
|||
void
|
||||
make_var_volatile (tree var)
|
||||
{
|
||||
if (GET_CODE (DECL_RTL (var)) != MEM)
|
||||
if (!MEM_P (DECL_RTL (var)))
|
||||
abort ();
|
||||
|
||||
MEM_VOLATILE_P (DECL_RTL (var)) = 1;
|
||||
|
@ -1059,7 +1059,7 @@ notice_global_symbol (tree decl)
|
|||
|| (DECL_COMMON (decl)
|
||||
&& (DECL_INITIAL (decl) == 0
|
||||
|| DECL_INITIAL (decl) == error_mark_node))))
|
||||
|| GET_CODE (DECL_RTL (decl)) != MEM)
|
||||
|| !MEM_P (DECL_RTL (decl)))
|
||||
return;
|
||||
|
||||
/* We win when global object is found, but it is useful to know about weak
|
||||
|
@ -1652,7 +1652,7 @@ assemble_external (tree decl ATTRIBUTE_UNUSED)
|
|||
{
|
||||
rtx rtl = DECL_RTL (decl);
|
||||
|
||||
if (GET_CODE (rtl) == MEM && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
|
||||
if (MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
|
||||
&& !SYMBOL_REF_USED (XEXP (rtl, 0))
|
||||
&& !incorporeal_function_p (decl))
|
||||
{
|
||||
|
@ -2039,7 +2039,7 @@ decode_addr_const (tree exp, struct addr_const *value)
|
|||
abort ();
|
||||
}
|
||||
|
||||
if (GET_CODE (x) != MEM)
|
||||
if (!MEM_P (x))
|
||||
abort ();
|
||||
x = XEXP (x, 0);
|
||||
|
||||
|
@ -4029,7 +4029,7 @@ mark_weak (tree decl)
|
|||
DECL_WEAK (decl) = 1;
|
||||
|
||||
if (DECL_RTL_SET_P (decl)
|
||||
&& GET_CODE (DECL_RTL (decl)) == MEM
|
||||
&& MEM_P (DECL_RTL (decl))
|
||||
&& XEXP (DECL_RTL (decl), 0)
|
||||
&& GET_CODE (XEXP (DECL_RTL (decl), 0)) == SYMBOL_REF)
|
||||
SYMBOL_REF_WEAK (XEXP (DECL_RTL (decl), 0)) = 1;
|
||||
|
@ -4893,7 +4893,7 @@ default_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
|
|||
int flags;
|
||||
|
||||
/* Careful not to prod global register variables. */
|
||||
if (GET_CODE (rtl) != MEM)
|
||||
if (!MEM_P (rtl))
|
||||
return;
|
||||
symbol = XEXP (rtl, 0);
|
||||
if (GET_CODE (symbol) != SYMBOL_REF)
|
||||
|
|
Loading…
Reference in New Issue