calls.c (store_one_arg): Revert 1999-02-16 change.

* calls.c (store_one_arg): Revert 1999-02-16 change.  Revert
	2000-12-17 change.  Pass EXPAND_STACK_PARM to expand_expr.
	* expr.h (enum expand_modifier): Define EXPAND_STACK_PARM.
	(enum block_op_methods): Reorder for better store_expr optimization.
	* expr.c (store_expr): Test bit 1 of "want_value" for call param
	stores, test bit 0 for original want_value meaning.  Pass
	BLOCK_OP_CALL_PARM to emit_block_move when bit 1 set.  Adjust
	recursive calls, and calls to expand_param.
	(expand_expr): Handle EXPAND_STACK_PARM modifier.  When cse
	expected, set target to 0 rather than to subtarget.  Formatting.

From-SVN: r63337
This commit is contained in:
Alan Modra 2003-02-23 22:19:39 +00:00 committed by Alan Modra
parent ef90638128
commit 8403445aba
4 changed files with 171 additions and 101 deletions

View File

@ -1,3 +1,16 @@
2003-02-24 Alan Modra <amodra@bigpond.net.au>
* calls.c (store_one_arg): Revert 1999-02-16 change. Revert
2000-12-17 change. Pass EXPAND_STACK_PARM to expand_expr.
* expr.h (enum expand_modifier): Define EXPAND_STACK_PARM.
(enum block_op_methods): Reorder for better store_expr optimization.
* expr.c (store_expr): Test bit 1 of "want_value" for call param
stores, test bit 0 for original want_value meaning. Pass
BLOCK_OP_CALL_PARM to emit_block_move when bit 1 set. Adjust
recursive calls, and calls to expand_param.
(expand_expr): Handle EXPAND_STACK_PARM modifier. When cse
expected, set target to 0 rather than to subtarget. Formatting.
2003-02-23 Kazu Hirata <kazu@cs.umass.edu>
* gcse.c (cprop_jump): Use the REG_EQUAL note if available.

View File

@ -4341,13 +4341,6 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
}
}
}
/* Now that we have saved any slots that will be overwritten by this
store, mark all slots this store will use. We must do this before
we actually expand the argument since the expansion itself may
trigger library calls which might need to use the same stack slot. */
if (argblock && ! variable_size && arg->stack)
for (i = lower_bound; i < upper_bound; i++)
stack_usage_map[i] = 1;
}
/* If this isn't going to be placed on both the stack and in registers,
@ -4400,7 +4393,7 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
(partial
|| TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
? NULL_RTX : arg->stack,
VOIDmode, 0);
VOIDmode, EXPAND_STACK_PARM);
/* If we are promoting object (or for any other reason) the mode
doesn't agree, convert the mode. */
@ -4543,37 +4536,6 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
}
}
/* Special handling is required if part of the parameter lies in the
register parameter area. The argument may be copied into the stack
slot using memcpy(), but the original contents of the register
parameter area will be restored after the memcpy() call.
To ensure that the part that lies in the register parameter area
is copied correctly, we emit a separate push for that part. This
push should be small enough to avoid a call to memcpy(). */
#ifndef STACK_PARMS_IN_REG_PARM_AREA
if (arg->reg && arg->pass_on_stack)
#else
if (1)
#endif
{
if (arg->offset.constant < reg_parm_stack_space && arg->offset.var)
error ("variable offset is passed partially in stack and in reg");
else if (arg->offset.constant < reg_parm_stack_space && arg->size.var)
error ("variable size is passed partially in stack and in reg");
else if (arg->offset.constant < reg_parm_stack_space
&& ((arg->offset.constant + arg->size.constant)
> reg_parm_stack_space))
{
rtx size_rtx1 = GEN_INT (reg_parm_stack_space - arg->offset.constant);
emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx1,
parm_align, partial, reg, excess, argblock,
ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
ARGS_SIZE_RTX (arg->alignment_pad));
}
}
emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
parm_align, partial, reg, excess, argblock,
ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
@ -4591,6 +4553,12 @@ store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
arg->value = arg->stack_slot;
}
/* Mark all slots this store used. */
if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
&& argblock && ! variable_size && arg->stack)
for (i = lower_bound; i < upper_bound; i++)
stack_usage_map[i] = 1;
/* Once we have pushed something, pops can't safely
be deferred during the rest of the arguments. */
NO_DEFER_POP;

View File

@ -4256,7 +4256,7 @@ expand_assignment (to, from, want_value, suggest_reg)
and storing the value into TARGET.
TARGET may contain a QUEUED rtx.
If WANT_VALUE is nonzero, return a copy of the value
If WANT_VALUE & 1 is nonzero, return a copy of the value
not in TARGET, so that we can be sure to use the proper
value in a containing expression even if TARGET has something
else stored in it. If possible, we copy the value through a pseudo
@ -4271,9 +4271,12 @@ expand_assignment (to, from, want_value, suggest_reg)
with no sequence point. Will other languages need this to
be more thorough?
If WANT_VALUE is 0, we return NULL, to make sure
If WANT_VALUE & 1 is 0, we return NULL, to make sure
to catch quickly any cases where the caller uses the value
and fails to set WANT_VALUE. */
and fails to set WANT_VALUE.
If WANT_VALUE & 2 is set, this is a store into a call param on the
stack, and block moves may need to be treated specially. */
rtx
store_expr (exp, target, want_value)
@ -4289,7 +4292,8 @@ store_expr (exp, target, want_value)
{
/* Perform first part of compound expression, then assign from second
part. */
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
emit_queue ();
return store_expr (TREE_OPERAND (exp, 1), target, want_value);
}
@ -4309,20 +4313,20 @@ store_expr (exp, target, want_value)
NO_DEFER_POP;
jumpifnot (TREE_OPERAND (exp, 0), lab1);
start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 1), target, 0);
store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
end_cleanup_deferral ();
emit_queue ();
emit_jump_insn (gen_jump (lab2));
emit_barrier ();
emit_label (lab1);
start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 2), target, 0);
store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
end_cleanup_deferral ();
emit_queue ();
emit_label (lab2);
OK_DEFER_POP;
return want_value ? target : NULL_RTX;
return want_value & 1 ? target : NULL_RTX;
}
else if (queued_subexp_p (target))
/* If target contains a postincrement, let's not risk
@ -4332,18 +4336,24 @@ store_expr (exp, target, want_value)
{
/* Expand EXP into a new pseudo. */
temp = gen_reg_rtx (GET_MODE (target));
temp = expand_expr (exp, temp, GET_MODE (target), 0);
temp = expand_expr (exp, temp, GET_MODE (target),
(want_value & 2
? EXPAND_STACK_PARM : EXPAND_NORMAL));
}
else
temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
(want_value & 2
? EXPAND_STACK_PARM : EXPAND_NORMAL));
/* If target is volatile, ANSI requires accessing the value
*from* the target, if it is accessed. So make that happen.
In no case return the target itself. */
if (! MEM_VOLATILE_P (target) && want_value)
if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
dont_return_target = 1;
}
else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
else if ((want_value & 1) != 0
&& GET_CODE (target) == MEM
&& ! MEM_VOLATILE_P (target)
&& GET_MODE (target) != BLKmode)
/* If target is in memory and caller wants value in a register instead,
arrange that. Pass TARGET as target for expand_expr so that,
@ -4352,7 +4362,8 @@ store_expr (exp, target, want_value)
Don't do this if TARGET is volatile because we are supposed
to write it and then read it. */
{
temp = expand_expr (exp, target, GET_MODE (target), 0);
temp = expand_expr (exp, target, GET_MODE (target),
want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
{
/* If TEMP is already in the desired TARGET, only copy it from
@ -4379,7 +4390,8 @@ store_expr (exp, target, want_value)
the extend. But don't do this if the type of EXP is a subtype
of something else since then the conversion might involve
more than just converting modes. */
if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
if ((want_value & 1) == 0
&& INTEGRAL_TYPE_P (TREE_TYPE (exp))
&& TREE_TYPE (TREE_TYPE (exp)) == 0)
{
if (TREE_UNSIGNED (TREE_TYPE (exp))
@ -4396,14 +4408,15 @@ store_expr (exp, target, want_value)
inner_target = SUBREG_REG (target);
}
temp = expand_expr (exp, inner_target, VOIDmode, 0);
temp = expand_expr (exp, inner_target, VOIDmode,
want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
/* If TEMP is a MEM and we want a result value, make the access
now so it gets done only once. Strictly speaking, this is
only necessary if the MEM is volatile, or if the address
overlaps TARGET. But not performing the load twice also
reduces the amount of rtl we generate and then have to CSE. */
if (GET_CODE (temp) == MEM && want_value)
if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
temp = copy_to_reg (temp);
/* If TEMP is a VOIDmode constant, use convert_modes to make
@ -4424,7 +4437,7 @@ store_expr (exp, target, want_value)
target. Otherwise, the caller might get confused by a result whose
mode is larger than expected. */
if (want_value && GET_MODE (temp) != GET_MODE (target))
if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
{
if (GET_MODE (temp) != VOIDmode)
{
@ -4439,11 +4452,12 @@ store_expr (exp, target, want_value)
temp, SUBREG_PROMOTED_UNSIGNED_P (target));
}
return want_value ? temp : NULL_RTX;
return want_value & 1 ? temp : NULL_RTX;
}
else
{
temp = expand_expr (exp, target, GET_MODE (target), 0);
temp = expand_expr (exp, target, GET_MODE (target),
want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
/* Return TARGET if it's a specified hardware register.
If TARGET is a volatile mem ref, either return TARGET
or return a reg copied *from* TARGET; ANSI requires this.
@ -4455,7 +4469,7 @@ store_expr (exp, target, want_value)
&& REGNO (target) < FIRST_PSEUDO_REGISTER)
&& !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
&& ! rtx_equal_p (temp, target)
&& (CONSTANT_P (temp) || want_value))
&& (CONSTANT_P (temp) || (want_value & 1) != 0))
dont_return_target = 1;
}
@ -4526,7 +4540,9 @@ store_expr (exp, target, want_value)
if (GET_CODE (size) == CONST_INT
&& INTVAL (size) < TREE_STRING_LENGTH (exp))
emit_block_move (target, temp, size, BLOCK_OP_NORMAL);
emit_block_move (target, temp, size,
(want_value & 2
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
else
{
/* Compute the size of the data to copy from the string. */
@ -4534,13 +4550,17 @@ store_expr (exp, target, want_value)
= size_binop (MIN_EXPR,
make_tree (sizetype, size),
size_int (TREE_STRING_LENGTH (exp)));
rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
VOIDmode, 0);
rtx copy_size_rtx
= expand_expr (copy_size, NULL_RTX, VOIDmode,
(want_value & 2
? EXPAND_STACK_PARM : EXPAND_NORMAL));
rtx label = 0;
/* Copy that much. */
copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
emit_block_move (target, temp, copy_size_rtx, BLOCK_OP_NORMAL);
emit_block_move (target, temp, copy_size_rtx,
(want_value & 2
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
/* Figure out how much is left in TARGET that we have to clear.
Do all calculations in ptr_mode. */
@ -4581,13 +4601,15 @@ store_expr (exp, target, want_value)
else if (GET_CODE (target) == PARALLEL)
emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
else if (GET_MODE (temp) == BLKmode)
emit_block_move (target, temp, expr_size (exp), BLOCK_OP_NORMAL);
emit_block_move (target, temp, expr_size (exp),
(want_value & 2
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
else
emit_move_insn (target, temp);
}
/* If we don't want a value, return NULL_RTX. */
if (! want_value)
if ((want_value & 1) == 0)
return NULL_RTX;
/* If we are supposed to return TEMP, do so as long as it isn't a MEM.
@ -4596,7 +4618,8 @@ store_expr (exp, target, want_value)
return temp;
/* Return TARGET itself if it is a hard register. */
else if (want_value && GET_MODE (target) != BLKmode
else if ((want_value & 1) != 0
&& GET_MODE (target) != BLKmode
&& ! (GET_CODE (target) == REG
&& REGNO (target) < FIRST_PSEUDO_REGISTER))
return copy_to_reg (target);
@ -6410,7 +6433,14 @@ find_placeholder (exp, plist)
EXPAND_CONST_ADDRESS says that it is okay to return a MEM
with a constant address even if that address is not normally legitimate.
EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
a call parameter. Such targets require special care as we haven't yet
marked TARGET so that it's safe from being trashed by libcalls. We
don't want to use TARGET for anything but the final result;
Intermediate values must go elsewhere. Additionally, calls to
emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
rtx
expand_expr (exp, target, tmode, modifier)
@ -6555,7 +6585,7 @@ expand_expr (exp, target, tmode, modifier)
&& (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
&& ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
&& ! (code == CALL_EXPR && aggregate_value_p (exp)))
target = subtarget;
target = 0;
switch (code)
{
@ -6741,7 +6771,7 @@ expand_expr (exp, target, tmode, modifier)
return temp;
case CONST_DECL:
return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
case REAL_CST:
/* If optimized, generate immediate CONST_DOUBLE
@ -6857,7 +6887,8 @@ expand_expr (exp, target, tmode, modifier)
if (temp == const0_rtx)
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
else
store_expr (TREE_OPERAND (exp, 0), temp, 0);
store_expr (TREE_OPERAND (exp, 0), temp,
modifier == EXPAND_STACK_PARM ? 2 : 0);
TREE_USED (exp) = 1;
}
@ -7042,7 +7073,8 @@ expand_expr (exp, target, tmode, modifier)
/* Handle calls that pass values in multiple non-contiguous
locations. The Irix 6 ABI has examples of this. */
if (target == 0 || ! safe_from_p (target, exp, 1)
|| GET_CODE (target) == PARALLEL)
|| GET_CODE (target) == PARALLEL
|| modifier == EXPAND_STACK_PARM)
target
= assign_temp (build_qualified_type (type,
(TYPE_QUALS (type)
@ -7216,6 +7248,9 @@ expand_expr (exp, target, tmode, modifier)
&& (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
<= HOST_BITS_PER_WIDE_INT))))
{
if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
&& modifier == EXPAND_STACK_PARM)
target = 0;
op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
{
@ -7270,10 +7305,12 @@ expand_expr (exp, target, tmode, modifier)
(TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
&& (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
!= INTEGER_CST)
&& modifier != EXPAND_STACK_PARM
? target : NULL_RTX),
VOIDmode,
(modifier == EXPAND_INITIALIZER
|| modifier == EXPAND_CONST_ADDRESS)
|| modifier == EXPAND_CONST_ADDRESS
|| modifier == EXPAND_STACK_PARM)
? modifier : EXPAND_NORMAL);
/* If this is a constant, put it into a register if it is a
@ -7290,7 +7327,8 @@ expand_expr (exp, target, tmode, modifier)
if (offset != 0)
{
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
EXPAND_SUM);
/* If this object is in a register, put it into memory.
This case can't occur in C, but can in Ada if we have
@ -7426,7 +7464,8 @@ expand_expr (exp, target, tmode, modifier)
emit_block_move (target, op0,
GEN_INT ((bitsize + BITS_PER_UNIT - 1)
/ BITS_PER_UNIT),
BLOCK_OP_NORMAL);
(modifier == EXPAND_STACK_PARM
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
return target;
}
@ -7436,8 +7475,10 @@ expand_expr (exp, target, tmode, modifier)
if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
op0 = extract_bit_field (op0, bitsize, bitpos,
unsignedp, target, ext_mode, ext_mode,
op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
(modifier == EXPAND_STACK_PARM
? NULL_RTX : target),
ext_mode, ext_mode,
int_size_in_bytes (TREE_TYPE (tem)));
/* If the result is a record type and BITSIZE is narrower than
@ -7681,8 +7722,8 @@ expand_expr (exp, target, tmode, modifier)
{
if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
== BUILT_IN_FRONTEND)
return (*lang_hooks.expand_expr)
(exp, original_target, tmode, modifier);
return (*lang_hooks.expand_expr) (exp, original_target,
tmode, modifier);
else
return expand_builtin (exp, target, subtarget, tmode, ignore);
}
@ -7718,7 +7759,8 @@ expand_expr (exp, target, tmode, modifier)
if (GET_CODE (target) == MEM)
/* Store data into beginning of memory target. */
store_expr (TREE_OPERAND (exp, 0),
adjust_address (target, TYPE_MODE (valtype), 0), 0);
adjust_address (target, TYPE_MODE (valtype), 0),
modifier == EXPAND_STACK_PARM ? 2 : 0);
else if (GET_CODE (target) == REG)
/* Store this field into a union of the proper type. */
@ -7843,7 +7885,8 @@ expand_expr (exp, target, tmode, modifier)
if (GET_MODE (op0) == BLKmode)
emit_block_move (new_with_op0_mode, op0,
GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
BLOCK_OP_NORMAL);
(modifier == EXPAND_STACK_PARM
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
else
emit_move_insn (new_with_op0_mode, op0);
@ -7895,6 +7938,8 @@ expand_expr (exp, target, tmode, modifier)
if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
|| (mode == ptr_mode && (unsignedp || ! flag_trapv)))
{
if (modifier == EXPAND_STACK_PARM)
target = 0;
if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
&& GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
&& TREE_CONSTANT (TREE_OPERAND (exp, 1)))
@ -8117,6 +8162,9 @@ expand_expr (exp, target, tmode, modifier)
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
subtarget = 0;
if (modifier == EXPAND_STACK_PARM)
target = 0;
/* Check for multiplying things that have been extended
from a narrower type. If this machine supports multiplying
in that narrower type with a result in the desired type,
@ -8200,6 +8248,8 @@ expand_expr (exp, target, tmode, modifier)
case EXACT_DIV_EXPR:
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
subtarget = 0;
if (modifier == EXPAND_STACK_PARM)
target = 0;
/* Possible optimization: compute the dividend with EXPAND_SUM
then if the divisor is constant can optimize the case
where some terms of the dividend have coeffs divisible by it. */
@ -8228,6 +8278,8 @@ expand_expr (exp, target, tmode, modifier)
case ROUND_MOD_EXPR:
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
subtarget = 0;
if (modifier == EXPAND_STACK_PARM)
target = 0;
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
@ -8239,14 +8291,14 @@ expand_expr (exp, target, tmode, modifier)
case FIX_TRUNC_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
if (target == 0)
if (target == 0 || modifier == EXPAND_STACK_PARM)
target = gen_reg_rtx (mode);
expand_fix (target, op0, unsignedp);
return target;
case FLOAT_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
if (target == 0)
if (target == 0 || modifier == EXPAND_STACK_PARM)
target = gen_reg_rtx (mode);
/* expand_float can't figure out what to do if FROM has VOIDmode.
So give it the correct mode. With -O, cse will optimize this. */
@ -8259,6 +8311,8 @@ expand_expr (exp, target, tmode, modifier)
case NEGATE_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
if (modifier == EXPAND_STACK_PARM)
target = 0;
temp = expand_unop (mode,
! unsignedp && flag_trapv
&& (GET_MODE_CLASS(mode) == MODE_INT)
@ -8269,6 +8323,8 @@ expand_expr (exp, target, tmode, modifier)
case ABS_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
if (modifier == EXPAND_STACK_PARM)
target = 0;
/* Handle complex values specially. */
if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
@ -8286,7 +8342,9 @@ expand_expr (exp, target, tmode, modifier)
case MAX_EXPR:
case MIN_EXPR:
target = original_target;
if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
if (target == 0
|| modifier == EXPAND_STACK_PARM
|| ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
|| (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
|| GET_MODE (target) != mode
|| (GET_CODE (target) == REG
@ -8343,6 +8401,8 @@ expand_expr (exp, target, tmode, modifier)
case BIT_NOT_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
if (modifier == EXPAND_STACK_PARM)
target = 0;
temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
if (temp == 0)
abort ();
@ -8350,6 +8410,8 @@ expand_expr (exp, target, tmode, modifier)
case FFS_EXPR:
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
if (modifier == EXPAND_STACK_PARM)
target = 0;
temp = expand_unop (mode, ffs_optab, op0, target, 1);
if (temp == 0)
abort ();
@ -8417,6 +8479,8 @@ expand_expr (exp, target, tmode, modifier)
case RROTATE_EXPR:
if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
subtarget = 0;
if (modifier == EXPAND_STACK_PARM)
target = 0;
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
unsignedp);
@ -8436,7 +8500,9 @@ expand_expr (exp, target, tmode, modifier)
case UNGT_EXPR:
case UNGE_EXPR:
case UNEQ_EXPR:
temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
temp = do_store_flag (exp,
modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
tmode != VOIDmode ? tmode : mode, 0);
if (temp != 0)
return temp;
@ -8485,7 +8551,9 @@ expand_expr (exp, target, tmode, modifier)
case TRUTH_ANDIF_EXPR:
case TRUTH_ORIF_EXPR:
if (! ignore
&& (target == 0 || ! safe_from_p (target, exp, 1)
&& (target == 0
|| modifier == EXPAND_STACK_PARM
|| ! safe_from_p (target, exp, 1)
/* Make sure we don't have a hard reg (such as function's return
value) live across basic blocks, if not optimizing. */
|| (!optimize && GET_CODE (target) == REG
@ -8505,6 +8573,8 @@ expand_expr (exp, target, tmode, modifier)
return ignore ? const0_rtx : target;
case TRUTH_NOT_EXPR:
if (modifier == EXPAND_STACK_PARM)
target = 0;
op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
/* The parser is careful to generate TRUTH_NOT_EXPR
only with operands that are always zero or one. */
@ -8519,7 +8589,7 @@ expand_expr (exp, target, tmode, modifier)
emit_queue ();
return expand_expr (TREE_OPERAND (exp, 1),
(ignore ? const0_rtx : target),
VOIDmode, 0);
VOIDmode, modifier);
case COND_EXPR:
/* If we would have a "singleton" (see below) were it not for a
@ -8572,6 +8642,8 @@ expand_expr (exp, target, tmode, modifier)
return const0_rtx;
}
if (modifier == EXPAND_STACK_PARM)
target = 0;
op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
if (GET_MODE (op0) == mode)
return op0;
@ -8612,6 +8684,8 @@ expand_expr (exp, target, tmode, modifier)
if (ignore)
temp = 0;
else if (modifier == EXPAND_STACK_PARM)
temp = assign_temp (type, 0, 0, 1);
else if (original_target
&& (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
|| (singleton && GET_CODE (original_target) == REG
@ -8699,7 +8773,8 @@ expand_expr (exp, target, tmode, modifier)
|| (GET_CODE (temp) == REG
&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
temp = gen_reg_rtx (mode);
store_expr (singleton, temp, 0);
store_expr (singleton, temp,
modifier == EXPAND_STACK_PARM ? 2 : 0);
}
else
expand_expr (singleton,
@ -8718,11 +8793,11 @@ expand_expr (exp, target, tmode, modifier)
store_expr (build (TREE_CODE (binary_op), type,
make_tree (type, temp),
TREE_OPERAND (binary_op, 1)),
temp, 0);
temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
else
store_expr (build1 (TREE_CODE (unary_op), type,
make_tree (type, temp)),
temp, 0);
temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
op1 = op0;
}
/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
@ -8741,11 +8816,13 @@ expand_expr (exp, target, tmode, modifier)
if (GET_CODE (temp) == REG
&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
temp = gen_reg_rtx (mode);
store_expr (TREE_OPERAND (exp, 1), temp, 0);
store_expr (TREE_OPERAND (exp, 1), temp,
modifier == EXPAND_STACK_PARM ? 2 : 0);
jumpif (TREE_OPERAND (exp, 0), op0);
start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 2), temp, 0);
store_expr (TREE_OPERAND (exp, 2), temp,
modifier == EXPAND_STACK_PARM ? 2 : 0);
op1 = op0;
}
else if (temp
@ -8760,11 +8837,13 @@ expand_expr (exp, target, tmode, modifier)
if (GET_CODE (temp) == REG
&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
temp = gen_reg_rtx (mode);
store_expr (TREE_OPERAND (exp, 2), temp, 0);
store_expr (TREE_OPERAND (exp, 2), temp,
modifier == EXPAND_STACK_PARM ? 2 : 0);
jumpifnot (TREE_OPERAND (exp, 0), op0);
start_cleanup_deferral ();
store_expr (TREE_OPERAND (exp, 1), temp, 0);
store_expr (TREE_OPERAND (exp, 1), temp,
modifier == EXPAND_STACK_PARM ? 2 : 0);
op1 = op0;
}
else
@ -8778,7 +8857,8 @@ expand_expr (exp, target, tmode, modifier)
example A ? throw : E */
if (temp != 0
&& TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
store_expr (TREE_OPERAND (exp, 1), temp, 0);
store_expr (TREE_OPERAND (exp, 1), temp,
modifier == EXPAND_STACK_PARM ? 2 : 0);
else
expand_expr (TREE_OPERAND (exp, 1),
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
@ -8790,7 +8870,8 @@ expand_expr (exp, target, tmode, modifier)
start_cleanup_deferral ();
if (temp != 0
&& TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
store_expr (TREE_OPERAND (exp, 2), temp, 0);
store_expr (TREE_OPERAND (exp, 2), temp,
modifier == EXPAND_STACK_PARM ? 2 : 0);
else
expand_expr (TREE_OPERAND (exp, 2),
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
@ -8895,7 +8976,7 @@ expand_expr (exp, target, tmode, modifier)
/* Mark it as expanded. */
TREE_OPERAND (exp, 1) = NULL_TREE;
store_expr (exp1, target, 0);
store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
@ -8980,6 +9061,8 @@ expand_expr (exp, target, tmode, modifier)
return expand_increment (exp, ! ignore, ignore);
case ADDR_EXPR:
if (modifier == EXPAND_STACK_PARM)
target = 0;
/* Are we taking the address of a nested function? */
if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
&& decl_function_context (TREE_OPERAND (exp, 0)) != 0
@ -9101,7 +9184,8 @@ expand_expr (exp, target, tmode, modifier)
abort ();
emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
BLOCK_OP_NORMAL);
(modifier == EXPAND_STACK_PARM
? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
op0 = new;
}
@ -9317,6 +9401,8 @@ expand_expr (exp, target, tmode, modifier)
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
binop2:
if (modifier == EXPAND_STACK_PARM)
target = 0;
temp = expand_binop (mode, this_optab, op0, op1, target,
unsignedp, OPTAB_LIB_WIDEN);
if (temp == 0)

View File

@ -1,6 +1,6 @@
/* Definitions for code generation pass of GNU compiler.
Copyright (C) 1987, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000 Free Software Foundation, Inc.
1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
This file is part of GCC.
@ -44,13 +44,16 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
#define QUEUED_NEXT(P) XEXP (P, 4)
/* This is the 4th arg to `expand_expr'.
EXPAND_STACK_PARM means we are possibly expanding a call param onto
the stack. Choosing a value of 2 isn't special; It just allows
some code optimization in store_expr.
EXPAND_SUM means it is ok to return a PLUS rtx or MULT rtx.
EXPAND_INITIALIZER is similar but also record any labels on forced_labels.
EXPAND_CONST_ADDRESS means it is ok to return a MEM whose address
is a constant that is not a legitimate address.
EXPAND_WRITE means we are only going to write to the resulting rtx. */
enum expand_modifier {EXPAND_NORMAL, EXPAND_SUM, EXPAND_CONST_ADDRESS,
EXPAND_INITIALIZER, EXPAND_WRITE};
enum expand_modifier {EXPAND_NORMAL = 0, EXPAND_STACK_PARM = 2, EXPAND_SUM,
EXPAND_CONST_ADDRESS, EXPAND_INITIALIZER, EXPAND_WRITE};
/* Prevent the compiler from deferring stack pops. See
inhibit_defer_pop for more information. */
@ -378,8 +381,8 @@ extern rtx convert_modes PARAMS ((enum machine_mode, enum machine_mode,
enum block_op_methods
{
BLOCK_OP_NORMAL,
BLOCK_OP_CALL_PARM,
BLOCK_OP_NO_LIBCALL
BLOCK_OP_NO_LIBCALL,
BLOCK_OP_CALL_PARM
};
extern rtx emit_block_move PARAMS ((rtx, rtx, rtx, enum block_op_methods));