expmed.c (extract_bit_field): No longer pass in alignment.

* expmed.c (extract_bit_field): No longer pass in alignment.
	(extract_fixed_bit_field, extract_split_bit_field): Likewise.
	(store_bit_field, store_fixed_bit_field, store_split_bit_field):
	Likewise.
	* expr.c (store_constructor, store_constructor_field): Likewise.
	(store_field, emit_group_load, emit_group_store): Likewise.
	* expr.h (emit_group_load, emit_group_store): Delete ALIGN parm.
	(store_bit_field, extract_bit_field): Likewise.
	* calls.c, expr.c, function.c: Change calls to above functions.
	* ifcvt.c, stmt.c: Likewise.

From-SVN: r46926
This commit is contained in:
Richard Kenner 2001-11-11 11:02:26 +00:00 committed by Richard Kenner
parent d746694a45
commit 04050c690d
8 changed files with 179 additions and 278 deletions

View File

@ -1,5 +1,16 @@
Sun Nov 11 05:56:01 2001 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
* expmed.c (extract_bit_field): No longer pass in alignment.
(extract_fixed_bit_field, extract_split_bit_field): Likewise.
(store_bit_field, store_fixed_bit_field, store_split_bit_field):
Likewise.
* expr.c (store_constructor, store_constructor_field): Likewise.
(store_field, emit_group_load, emit_group_store): Likewise.
* expr.h (emit_group_load, emit_group_store): Delete ALIGN parm.
(store_bit_field, extract_bit_field): Likewise.
* calls.c, expr.c, function.c: Change calls to above functions.
* ifcvt.c, stmt.c: Likewise.
* alias.c (nonoverlapping_memrefs_p): Not overlapping if one base is
constant and one is on frame.
If know memref offset, adjust size from decl.

View File

@ -1037,7 +1037,6 @@ store_unaligned_arguments_into_pseudos (args, num_actuals)
rtx reg = gen_reg_rtx (word_mode);
rtx word = operand_subword_force (args[i].value, j, BLKmode);
int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
args[i].aligned_regs[j] = reg;
@ -1057,9 +1056,9 @@ store_unaligned_arguments_into_pseudos (args, num_actuals)
bytes -= bitsize / BITS_PER_UNIT;
store_bit_field (reg, bitsize, big_endian_correction, word_mode,
extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
word_mode, word_mode, bitalign,
word_mode, word_mode,
BITS_PER_WORD),
bitalign, BITS_PER_WORD);
BITS_PER_WORD);
}
}
}
@ -1736,8 +1735,7 @@ load_register_parameters (args, num_actuals, call_fusage, flags)
if (GET_CODE (reg) == PARALLEL)
emit_group_load (reg, args[i].value,
int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
TYPE_ALIGN (TREE_TYPE (args[i].tree_value)));
int_size_in_bytes (TREE_TYPE (args[i].tree_value)));
/* If simple case, just do move. If normal partial, store_one_arg
has already loaded the register for us. In all other cases,
@ -3225,8 +3223,7 @@ expand_call (exp, target, ignore)
if (! rtx_equal_p (target, valreg))
emit_group_store (target, valreg,
int_size_in_bytes (TREE_TYPE (exp)),
TYPE_ALIGN (TREE_TYPE (exp)));
int_size_in_bytes (TREE_TYPE (exp)));
/* We can not support sibling calls for this case. */
sibcall_failure = 1;
@ -4004,9 +4001,7 @@ emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
/* Handle calls that pass values in multiple non-contiguous
locations. The PA64 has examples of this for library calls. */
if (reg != 0 && GET_CODE (reg) == PARALLEL)
emit_group_load (reg, val,
GET_MODE_SIZE (GET_MODE (val)),
GET_MODE_ALIGNMENT (GET_MODE (val)));
emit_group_load (reg, val, GET_MODE_SIZE (GET_MODE (val)));
else if (reg != 0 && partial == 0)
emit_move_insn (reg, val);

View File

@ -36,23 +36,20 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
static void store_fixed_bit_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT, rtx,
unsigned int));
unsigned HOST_WIDE_INT, rtx));
static void store_split_bit_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT, rtx,
unsigned int));
unsigned HOST_WIDE_INT, rtx));
static rtx extract_fixed_bit_field PARAMS ((enum machine_mode, rtx,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
rtx, int, unsigned int));
rtx, int));
static rtx mask_rtx PARAMS ((enum machine_mode, int,
int, int));
static rtx lshift_value PARAMS ((enum machine_mode, rtx,
int, int));
static rtx extract_split_bit_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT, int,
unsigned int));
unsigned HOST_WIDE_INT, int));
static void do_cmp_and_jump PARAMS ((rtx, rtx, enum rtx_code,
enum machine_mode, rtx));
@ -289,13 +286,12 @@ mode_for_extraction (pattern, opno)
else, we use the mode of operand 3. */
rtx
store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, total_size)
rtx str_rtx;
unsigned HOST_WIDE_INT bitsize;
unsigned HOST_WIDE_INT bitnum;
enum machine_mode fieldmode;
rtx value;
unsigned int align;
HOST_WIDE_INT total_size;
{
unsigned int unit
@ -306,11 +302,6 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
enum machine_mode op_mode = mode_for_extraction (EP_insv, 3);
/* It is wrong to have align==0, since every object is aligned at
least at a bit boundary. This usually means a bug elsewhere. */
if (align == 0)
abort ();
/* Discount the part of the structure before the desired byte.
We need to know how many bytes are safe to reference after it. */
if (total_size >= 0)
@ -347,9 +338,9 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
&& (GET_CODE (op0) != MEM
? (GET_MODE_SIZE (fieldmode) >= UNITS_PER_WORD
|| GET_MODE_SIZE (GET_MODE (op0)) == GET_MODE_SIZE (fieldmode))
: (! SLOW_UNALIGNED_ACCESS (fieldmode, align)
: (! SLOW_UNALIGNED_ACCESS (fieldmode, MEM_ALIGN (op0))
|| (offset * BITS_PER_UNIT % bitsize == 0
&& align % GET_MODE_BITSIZE (fieldmode) == 0))))
&& MEM_ALIGN (op0) % GET_MODE_BITSIZE (fieldmode) == 0))))
{
if (GET_MODE (op0) != fieldmode)
{
@ -472,10 +463,10 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
if I is 1, use the next to lowest word; and so on. */
unsigned int wordnum = (backwards ? nwords - i - 1 : i);
unsigned int bit_offset = (backwards
? MAX ((int) bitsize - ((int) i + 1)
* BITS_PER_WORD,
0)
: (int) i * BITS_PER_WORD);
? MAX ((int) bitsize - ((int) i + 1)
* BITS_PER_WORD,
0)
: (int) i * BITS_PER_WORD);
store_bit_field (op0, MIN (BITS_PER_WORD,
bitsize - i * BITS_PER_WORD),
@ -484,7 +475,7 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
(GET_MODE (value) == VOIDmode
? fieldmode
: GET_MODE (value))),
align, total_size);
total_size);
}
return value;
}
@ -519,9 +510,7 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
offset = 0;
}
else
{
op0 = protect_from_queue (op0, 1);
}
op0 = protect_from_queue (op0, 1);
/* If VALUE is a floating-point mode, access it as an integer of the
corresponding size. This can occur on a machine with 64 bit registers
@ -574,19 +563,19 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
if (GET_MODE (op0) == BLKmode
|| GET_MODE_SIZE (GET_MODE (op0)) > GET_MODE_SIZE (maxmode))
bestmode
= get_best_mode (bitsize, bitnum, align, maxmode,
= get_best_mode (bitsize, bitnum, MEM_ALIGN (op0), maxmode,
MEM_VOLATILE_P (op0));
else
bestmode = GET_MODE (op0);
if (bestmode == VOIDmode
|| (SLOW_UNALIGNED_ACCESS (bestmode, align)
&& GET_MODE_BITSIZE (bestmode) > align))
|| (SLOW_UNALIGNED_ACCESS (bestmode, MEM_ALIGN (op0))
&& GET_MODE_BITSIZE (bestmode) > MEM_ALIGN (op0)))
goto insv_loses;
/* Adjust address to point to the containing unit of that mode. */
/* Adjust address to point to the containing unit of that mode.
Compute offset as multiple of this unit, counting in bytes. */
unit = GET_MODE_BITSIZE (bestmode);
/* Compute offset as multiple of this unit, counting in bytes. */
offset = (bitnum / unit) * GET_MODE_SIZE (bestmode);
bitpos = bitnum % unit;
op0 = adjust_address (op0, bestmode, offset);
@ -595,7 +584,7 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
the unit. */
tempreg = copy_to_reg (op0);
store_bit_field (tempreg, bitsize, bitpos, fieldmode, value,
align, total_size);
total_size);
emit_move_insn (op0, tempreg);
return value;
}
@ -638,7 +627,8 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
if we must narrow it, be sure we do it correctly. */
if (GET_MODE_SIZE (GET_MODE (value)) < GET_MODE_SIZE (maxmode))
value1 = simplify_gen_subreg (maxmode, value1, GET_MODE (value1), 0);
value1 = simplify_gen_subreg (maxmode, value1,
GET_MODE (value1), 0);
else
value1 = gen_lowpart (maxmode, value1);
}
@ -664,13 +654,13 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
else
{
delete_insns_since (last);
store_fixed_bit_field (op0, offset, bitsize, bitpos, value, align);
store_fixed_bit_field (op0, offset, bitsize, bitpos, value);
}
}
else
insv_loses:
/* Insv is not available; store using shifts and boolean ops. */
store_fixed_bit_field (op0, offset, bitsize, bitpos, value, align);
store_fixed_bit_field (op0, offset, bitsize, bitpos, value);
return value;
}
@ -682,16 +672,14 @@ store_bit_field (str_rtx, bitsize, bitnum, fieldmode, value, align, total_size)
(If OP0 is a register, it may be a full word or a narrower mode,
but BITPOS still counts within a full word,
which is significant on bigendian machines.)
STRUCT_ALIGN is the alignment the structure is known to have.
Note that protect_from_queue has already been done on OP0 and VALUE. */
static void
store_fixed_bit_field (op0, offset, bitsize, bitpos, value, struct_align)
store_fixed_bit_field (op0, offset, bitsize, bitpos, value)
rtx op0;
unsigned HOST_WIDE_INT offset, bitsize, bitpos;
rtx value;
unsigned int struct_align;
{
enum machine_mode mode;
unsigned int total_bits = BITS_PER_WORD;
@ -699,9 +687,6 @@ store_fixed_bit_field (op0, offset, bitsize, bitpos, value, struct_align)
int all_zero = 0;
int all_one = 0;
if (! SLOW_UNALIGNED_ACCESS (word_mode, struct_align))
struct_align = BIGGEST_ALIGNMENT;
/* There is a case not handled here:
a structure with a known alignment of just a halfword
and a field split across two aligned halfwords within the structure.
@ -716,8 +701,7 @@ store_fixed_bit_field (op0, offset, bitsize, bitpos, value, struct_align)
/* Special treatment for a bit field split across two registers. */
if (bitsize + bitpos > BITS_PER_WORD)
{
store_split_bit_field (op0, bitsize, bitpos,
value, BITS_PER_WORD);
store_split_bit_field (op0, bitsize, bitpos, value);
return;
}
}
@ -733,16 +717,14 @@ store_fixed_bit_field (op0, offset, bitsize, bitpos, value, struct_align)
|| GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (word_mode))
mode = word_mode;
mode = get_best_mode (bitsize, bitpos + offset * BITS_PER_UNIT,
struct_align, mode,
GET_CODE (op0) == MEM && MEM_VOLATILE_P (op0));
MEM_ALIGN (op0), mode, MEM_VOLATILE_P (op0));
if (mode == VOIDmode)
{
/* The only way this should occur is if the field spans word
boundaries. */
store_split_bit_field (op0,
bitsize, bitpos + offset * BITS_PER_UNIT,
value, struct_align);
store_split_bit_field (op0, bitsize, bitpos + offset * BITS_PER_UNIT,
value);
return;
}
@ -856,17 +838,14 @@ store_fixed_bit_field (op0, offset, bitsize, bitpos, value, struct_align)
BITSIZE is the field width; BITPOS the position of its first bit
(within the word).
VALUE is the value to store.
ALIGN is the known alignment of OP0.
This is also the size of the memory objects to be used.
This does not yet handle fields wider than BITS_PER_WORD. */
static void
store_split_bit_field (op0, bitsize, bitpos, value, align)
store_split_bit_field (op0, bitsize, bitpos, value)
rtx op0;
unsigned HOST_WIDE_INT bitsize, bitpos;
rtx value;
unsigned int align;
{
unsigned int unit;
unsigned int bitsdone = 0;
@ -876,7 +855,7 @@ store_split_bit_field (op0, bitsize, bitpos, value, align)
if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
unit = BITS_PER_WORD;
else
unit = MIN (align, BITS_PER_WORD);
unit = MIN (MEM_ALIGN (op0), BITS_PER_WORD);
/* If VALUE is a constant other than a CONST_INT, get it into a register in
WORD_MODE. If we can do this using gen_lowpart_common, do so. Note
@ -932,18 +911,10 @@ store_split_bit_field (op0, bitsize, bitpos, value, align)
else
/* The args are chosen so that the last part includes the
lsb. Give extract_bit_field the value it needs (with
endianness compensation) to fetch the piece we want.
??? We have no idea what the alignment of VALUE is, so
we have to use a guess. */
part
= extract_fixed_bit_field
(word_mode, value, 0, thissize,
total_bits - bitsize + bitsdone, NULL_RTX, 1,
GET_MODE (value) == VOIDmode
? UNITS_PER_WORD
: (GET_MODE (value) == BLKmode
? 1 : GET_MODE_ALIGNMENT (GET_MODE (value))));
endianness compensation) to fetch the piece we want. */
part = extract_fixed_bit_field (word_mode, value, 0, thissize,
total_bits - bitsize + bitsdone,
NULL_RTX, 1);
}
else
{
@ -953,13 +924,8 @@ store_split_bit_field (op0, bitsize, bitpos, value, align)
>> bitsdone)
& (((HOST_WIDE_INT) 1 << thissize) - 1));
else
part
= extract_fixed_bit_field
(word_mode, value, 0, thissize, bitsdone, NULL_RTX, 1,
GET_MODE (value) == VOIDmode
? UNITS_PER_WORD
: (GET_MODE (value) == BLKmode
? 1 : GET_MODE_ALIGNMENT (GET_MODE (value))));
part = extract_fixed_bit_field (word_mode, value, 0, thissize,
bitsdone, NULL_RTX, 1);
}
/* If OP0 is a register, then handle OFFSET here.
@ -985,8 +951,8 @@ store_split_bit_field (op0, bitsize, bitpos, value, align)
/* OFFSET is in UNITs, and UNIT is in bits.
store_fixed_bit_field wants offset in bytes. */
store_fixed_bit_field (word, offset * unit / BITS_PER_UNIT,
thissize, thispos, part, align);
store_fixed_bit_field (word, offset * unit / BITS_PER_UNIT, thissize,
thispos, part);
bitsdone += thissize;
}
}
@ -1003,7 +969,6 @@ store_split_bit_field (op0, bitsize, bitpos, value, align)
TMODE is the mode the caller would like the value to have;
but the value may be returned with type MODE instead.
ALIGN is the alignment that STR_RTX is known to have.
TOTAL_SIZE is the size in bytes of the containing structure,
or -1 if varying.
@ -1014,14 +979,13 @@ store_split_bit_field (op0, bitsize, bitpos, value, align)
rtx
extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
target, mode, tmode, align, total_size)
target, mode, tmode, total_size)
rtx str_rtx;
unsigned HOST_WIDE_INT bitsize;
unsigned HOST_WIDE_INT bitnum;
int unsignedp;
rtx target;
enum machine_mode mode, tmode;
unsigned int align;
HOST_WIDE_INT total_size;
{
unsigned int unit
@ -1111,9 +1075,9 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
&& TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
GET_MODE_BITSIZE (GET_MODE (op0))))
|| (GET_CODE (op0) == MEM
&& (! SLOW_UNALIGNED_ACCESS (mode, align)
&& (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (op0))
|| (offset * BITS_PER_UNIT % bitsize == 0
&& align % bitsize == 0))))
&& MEM_ALIGN (op0) % bitsize == 0))))
&& ((bitsize >= BITS_PER_WORD && bitsize == GET_MODE_BITSIZE (mode)
&& bitpos % BITS_PER_WORD == 0)
|| (mode_for_size (bitsize, GET_MODE_CLASS (tmode), 0) != BLKmode
@ -1192,7 +1156,7 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
= extract_bit_field (op0, MIN (BITS_PER_WORD,
bitsize - i * BITS_PER_WORD),
bitnum + bit_offset, 1, target_part, mode,
word_mode, align, total_size);
word_mode, total_size);
if (target_part == 0)
abort ();
@ -1211,11 +1175,11 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
total_words = GET_MODE_SIZE (GET_MODE (target)) / UNITS_PER_WORD;
for (i = nwords; i < total_words; i++)
{
int wordnum = WORDS_BIG_ENDIAN ? total_words - i - 1 : i;
rtx target_part = operand_subword (target, wordnum, 1, VOIDmode);
emit_move_insn (target_part, const0_rtx);
}
emit_move_insn
(operand_subword (target,
WORDS_BIG_ENDIAN ? total_words - i - 1 : i,
1, VOIDmode),
const0_rtx);
}
return target;
}
@ -1259,9 +1223,7 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
offset = 0;
}
else
{
op0 = protect_from_queue (str_rtx, 1);
}
op0 = protect_from_queue (str_rtx, 1);
/* Now OFFSET is nonzero only for memory operands. */
@ -1303,14 +1265,15 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
if (GET_MODE (xop0) == BLKmode
|| (GET_MODE_SIZE (GET_MODE (op0))
> GET_MODE_SIZE (maxmode)))
bestmode = get_best_mode (bitsize, bitnum, align, maxmode,
bestmode = get_best_mode (bitsize, bitnum,
MEM_ALIGN (xop0), maxmode,
MEM_VOLATILE_P (xop0));
else
bestmode = GET_MODE (xop0);
if (bestmode == VOIDmode
|| (SLOW_UNALIGNED_ACCESS (bestmode, align)
&& GET_MODE_BITSIZE (bestmode) > align))
|| (SLOW_UNALIGNED_ACCESS (bestmode, MEM_ALIGN (xop0))
&& GET_MODE_BITSIZE (bestmode) > MEM_ALIGN (xop0)))
goto extzv_loses;
/* Compute offset as multiple of this unit,
@ -1390,13 +1353,13 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
{
delete_insns_since (last);
target = extract_fixed_bit_field (int_mode, op0, offset, bitsize,
bitpos, target, 1, align);
bitpos, target, 1);
}
}
else
extzv_loses:
target = extract_fixed_bit_field (int_mode, op0, offset, bitsize,
bitpos, target, 1, align);
extzv_loses:
target = extract_fixed_bit_field (int_mode, op0, offset, bitsize,
bitpos, target, 1);
}
else
{
@ -1432,14 +1395,15 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
if (GET_MODE (xop0) == BLKmode
|| (GET_MODE_SIZE (GET_MODE (op0))
> GET_MODE_SIZE (maxmode)))
bestmode = get_best_mode (bitsize, bitnum, align, maxmode,
bestmode = get_best_mode (bitsize, bitnum,
MEM_ALIGN (xop0), maxmode,
MEM_VOLATILE_P (xop0));
else
bestmode = GET_MODE (xop0);
if (bestmode == VOIDmode
|| (SLOW_UNALIGNED_ACCESS (bestmode, align)
&& GET_MODE_BITSIZE (bestmode) > align))
|| (SLOW_UNALIGNED_ACCESS (bestmode, MEM_ALIGN (xop0))
&& GET_MODE_BITSIZE (bestmode) > MEM_ALIGN (xop0)))
goto extv_loses;
/* Compute offset as multiple of this unit,
@ -1518,13 +1482,13 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
{
delete_insns_since (last);
target = extract_fixed_bit_field (int_mode, op0, offset, bitsize,
bitpos, target, 0, align);
bitpos, target, 0);
}
}
else
extv_loses:
target = extract_fixed_bit_field (int_mode, op0, offset, bitsize,
bitpos, target, 0, align);
extv_loses:
target = extract_fixed_bit_field (int_mode, op0, offset, bitsize,
bitpos, target, 0);
}
if (target == spec_target)
return target;
@ -1564,18 +1528,15 @@ extract_bit_field (str_rtx, bitsize, bitnum, unsignedp,
UNSIGNEDP is nonzero for an unsigned bit field (don't sign-extend value).
If TARGET is nonzero, attempts to store the value there
and return TARGET, but this is not guaranteed.
If TARGET is not used, create a pseudo-reg of mode TMODE for the value.
ALIGN is the alignment that STR_RTX is known to have. */
If TARGET is not used, create a pseudo-reg of mode TMODE for the value. */
static rtx
extract_fixed_bit_field (tmode, op0, offset, bitsize, bitpos,
target, unsignedp, align)
target, unsignedp)
enum machine_mode tmode;
rtx op0, target;
unsigned HOST_WIDE_INT offset, bitsize, bitpos;
int unsignedp;
unsigned int align;
{
unsigned int total_bits = BITS_PER_WORD;
enum machine_mode mode;
@ -1584,8 +1545,7 @@ extract_fixed_bit_field (tmode, op0, offset, bitsize, bitpos,
{
/* Special treatment for a bit field split across two registers. */
if (bitsize + bitpos > BITS_PER_WORD)
return extract_split_bit_field (op0, bitsize, bitpos,
unsignedp, align);
return extract_split_bit_field (op0, bitsize, bitpos, unsignedp);
}
else
{
@ -1593,16 +1553,15 @@ extract_fixed_bit_field (tmode, op0, offset, bitsize, bitpos,
includes the entire field. If such a mode would be larger than
a word, we won't be doing the extraction the normal way. */
mode = get_best_mode (bitsize, bitpos + offset * BITS_PER_UNIT, align,
word_mode,
GET_CODE (op0) == MEM && MEM_VOLATILE_P (op0));
mode = get_best_mode (bitsize, bitpos + offset * BITS_PER_UNIT,
MEM_ALIGN (op0), word_mode, MEM_VOLATILE_P (op0));
if (mode == VOIDmode)
/* The only way this should occur is if the field spans word
boundaries. */
return extract_split_bit_field (op0, bitsize,
bitpos + offset * BITS_PER_UNIT,
unsignedp, align);
unsignedp);
total_bits = GET_MODE_BITSIZE (mode);
@ -1628,12 +1587,9 @@ extract_fixed_bit_field (tmode, op0, offset, bitsize, bitpos,
mode = GET_MODE (op0);
if (BYTES_BIG_ENDIAN)
{
/* BITPOS is the distance between our msb and that of OP0.
Convert it to the distance from the lsb. */
bitpos = total_bits - bitsize - bitpos;
}
/* BITPOS is the distance between our msb and that of OP0.
Convert it to the distance from the lsb. */
bitpos = total_bits - bitsize - bitpos;
/* Now BITPOS is always the distance between the field's lsb and that of OP0.
We have reduced the big-endian case to the little-endian case. */
@ -1694,7 +1650,8 @@ extract_fixed_bit_field (tmode, op0, offset, bitsize, bitpos,
if (GET_MODE_BITSIZE (mode) != (bitsize + bitpos))
{
tree amount = build_int_2 (GET_MODE_BITSIZE (mode) - (bitsize + bitpos), 0);
tree amount
= build_int_2 (GET_MODE_BITSIZE (mode) - (bitsize + bitpos), 0);
/* Maybe propagate the target for the shift. */
/* But not if we will return the result--could confuse integrate.c. */
rtx subtarget = (target != 0 && GET_CODE (target) == REG
@ -1784,17 +1741,13 @@ lshift_value (mode, value, bitpos, bitsize)
OP0 is the REG, SUBREG or MEM rtx for the first of the two words.
BITSIZE is the field width; BITPOS, position of its first bit, in the word.
UNSIGNEDP is 1 if should zero-extend the contents; else sign-extend.
ALIGN is the known alignment of OP0. This is also the size of the
memory objects to be used. */
UNSIGNEDP is 1 if should zero-extend the contents; else sign-extend. */
static rtx
extract_split_bit_field (op0, bitsize, bitpos, unsignedp, align)
extract_split_bit_field (op0, bitsize, bitpos, unsignedp)
rtx op0;
unsigned HOST_WIDE_INT bitsize, bitpos;
int unsignedp;
unsigned int align;
{
unsigned int unit;
unsigned int bitsdone = 0;
@ -1806,7 +1759,7 @@ extract_split_bit_field (op0, bitsize, bitpos, unsignedp, align)
if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
unit = BITS_PER_WORD;
else
unit = MIN (align, BITS_PER_WORD);
unit = MIN (MEM_ALIGN (op0), BITS_PER_WORD);
while (bitsdone < bitsize)
{
@ -1851,7 +1804,7 @@ extract_split_bit_field (op0, bitsize, bitpos, unsignedp, align)
extract_fixed_bit_field wants offset in bytes. */
part = extract_fixed_bit_field (word_mode, word,
offset * unit / BITS_PER_UNIT,
thissize, thispos, 0, 1, align);
thissize, thispos, 0, 1);
bitsdone += thissize;
/* Shift this part into place for the result. */

View File

@ -153,14 +153,12 @@ static int is_zeros_p PARAMS ((tree));
static int mostly_zeros_p PARAMS ((tree));
static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
HOST_WIDE_INT, enum machine_mode,
tree, tree, unsigned int, int,
int));
static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
HOST_WIDE_INT));
tree, tree, int, int));
static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
HOST_WIDE_INT, enum machine_mode,
tree, enum machine_mode, int,
unsigned int, HOST_WIDE_INT, int));
HOST_WIDE_INT, int));
static enum memory_use_mode
get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
static rtx var_rtx PARAMS ((tree));
@ -1946,8 +1944,7 @@ move_block_from_reg (regno, x, nregs, size)
/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
registers represented by a PARALLEL. SSIZE represents the total size of
block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
SRC in bits. */
block SRC in bytes, or -1 if not known. */
/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
the balance will be in what would be the low-order memory addresses, i.e.
left justified for big endian, right justified for little endian. This
@ -1956,9 +1953,8 @@ move_block_from_reg (regno, x, nregs, size)
would be needed. */
void
emit_group_load (dst, orig_src, ssize, align)
emit_group_load (dst, orig_src, ssize)
rtx dst, orig_src;
unsigned int align;
int ssize;
{
rtx *tmps, src;
@ -2006,12 +2002,13 @@ emit_group_load (dst, orig_src, ssize, align)
src = gen_reg_rtx (mode);
else
src = gen_reg_rtx (GET_MODE (orig_src));
emit_move_insn (src, orig_src);
}
/* Optimize the access just a bit. */
if (GET_CODE (src) == MEM
&& align >= GET_MODE_ALIGNMENT (mode)
&& MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
&& bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
&& bytelen == GET_MODE_SIZE (mode))
{
@ -2028,11 +2025,10 @@ emit_group_load (dst, orig_src, ssize, align)
tmps[i] = XEXP (src, 1);
else if (bytepos == 0)
{
rtx mem;
mem = assign_stack_temp (GET_MODE (src),
GET_MODE_SIZE (GET_MODE (src)), 0);
rtx mem = assign_stack_temp (GET_MODE (src),
GET_MODE_SIZE (GET_MODE (src)), 0);
emit_move_insn (mem, src);
tmps[i] = change_address (mem, mode, XEXP (mem, 0));
tmps[i] = adjust_address (mem, mode, 0);
}
else
abort ();
@ -2043,7 +2039,7 @@ emit_group_load (dst, orig_src, ssize, align)
else
tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
bytepos * BITS_PER_UNIT, 1, NULL_RTX,
mode, mode, align, ssize);
mode, mode, ssize);
if (BYTES_BIG_ENDIAN && shift)
expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
@ -2059,13 +2055,12 @@ emit_group_load (dst, orig_src, ssize, align)
/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
registers represented by a PARALLEL. SSIZE represents the total size of
block DST, or -1 if not known. ALIGN is the known alignment of DST. */
block DST, or -1 if not known. */
void
emit_group_store (orig_dst, src, ssize, align)
emit_group_store (orig_dst, src, ssize)
rtx orig_dst, src;
int ssize;
unsigned int align;
{
rtx *tmps, dst;
int start, i;
@ -2109,8 +2104,8 @@ emit_group_store (orig_dst, src, ssize, align)
the temporary. */
temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
emit_group_store (temp, src, ssize, align);
emit_group_load (dst, temp, ssize, align);
emit_group_store (temp, src, ssize);
emit_group_load (dst, temp, ssize);
return;
}
else if (GET_CODE (dst) != MEM)
@ -2141,13 +2136,13 @@ emit_group_store (orig_dst, src, ssize, align)
/* Optimize the access just a bit. */
if (GET_CODE (dst) == MEM
&& align >= GET_MODE_ALIGNMENT (mode)
&& MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
&& bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
&& bytelen == GET_MODE_SIZE (mode))
emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
else
store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
mode, tmps[i], align, ssize);
mode, tmps[i], ssize);
}
emit_queue ();
@ -2228,8 +2223,8 @@ copy_blkmode_from_reg (tgtblk, srcreg, type)
extract_bit_field (src, bitsize,
xbitpos % BITS_PER_WORD, 1,
NULL_RTX, word_mode, word_mode,
bitsize, BITS_PER_WORD),
bitsize, BITS_PER_WORD);
BITS_PER_WORD),
BITS_PER_WORD);
}
return tgtblk;
@ -3655,7 +3650,7 @@ emit_push_insn (x, mode, type, size, align, partial, reg, extra,
/* Handle calls that pass values in multiple non-contiguous locations.
The Irix 6 ABI has examples of this. */
if (GET_CODE (reg) == PARALLEL)
emit_group_load (reg, x, -1, align); /* ??? size? */
emit_group_load (reg, x, -1); /* ??? size? */
else
move_block_to_reg (REGNO (reg), x, partial, mode);
}
@ -3872,9 +3867,7 @@ expand_assignment (to, from, want_value, suggest_reg)
? ((enum machine_mode)
TYPE_MODE (TREE_TYPE (to)))
: VOIDmode),
unsignedp,
alignment,
int_size_in_bytes (TREE_TYPE (tem)),
unsignedp, int_size_in_bytes (TREE_TYPE (tem)),
get_alias_set (to));
preserve_temp_slots (result);
@ -3916,8 +3909,7 @@ expand_assignment (to, from, want_value, suggest_reg)
/* Handle calls that return values in multiple non-contiguous locations.
The Irix 6 ABI has examples of this. */
if (GET_CODE (to_rtx) == PARALLEL)
emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
TYPE_ALIGN (TREE_TYPE (from)));
emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
else if (GET_MODE (to_rtx) == BLKmode)
emit_block_move (to_rtx, value, expr_size (from));
else
@ -3951,8 +3943,7 @@ expand_assignment (to, from, want_value, suggest_reg)
temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
if (GET_CODE (to_rtx) == PARALLEL)
emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
TYPE_ALIGN (TREE_TYPE (from)));
emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
else
emit_move_insn (to_rtx, temp);
@ -4362,8 +4353,7 @@ store_expr (exp, target, want_value)
/* Handle calls that return values in multiple non-contiguous locations.
The Irix 6 ABI has examples of this. */
else if (GET_CODE (target) == PARALLEL)
emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
TYPE_ALIGN (TREE_TYPE (exp)));
emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
else if (GET_MODE (temp) == BLKmode)
emit_block_move (target, temp, expr_size (exp));
else
@ -4464,7 +4454,7 @@ mostly_zeros_p (exp)
/* Helper function for store_constructor.
TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
TYPE is the type of the CONSTRUCTOR, not the element type.
ALIGN and CLEARED are as for store_constructor.
CLEARED is as for store_constructor.
ALIAS_SET is the alias set to use for any stores.
This provides a recursive shortcut back to store_constructor when it isn't
@ -4473,14 +4463,13 @@ mostly_zeros_p (exp)
clear a substructure if the outer structure has already been cleared. */
static void
store_constructor_field (target, bitsize, bitpos,
mode, exp, type, align, cleared, alias_set)
store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
alias_set)
rtx target;
unsigned HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
enum machine_mode mode;
tree exp, type;
unsigned int align;
int cleared;
int alias_set;
{
@ -4500,11 +4489,7 @@ store_constructor_field (target, bitsize, bitpos,
? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
/* Show the alignment may no longer be what it was and update the alias
set, if required. */
if (bitpos != 0)
align = MIN (align, (unsigned int) bitpos & - bitpos);
/* Update the alias set, if required. */
if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
&& MEM_ALIAS_SET (target) != 0)
{
@ -4512,26 +4497,25 @@ store_constructor_field (target, bitsize, bitpos,
set_mem_alias_set (target, alias_set);
}
store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
}
else
store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
int_size_in_bytes (type), alias_set);
}
/* Store the value of constructor EXP into the rtx TARGET.
TARGET is either a REG or a MEM.
ALIGN is the maximum known alignment for TARGET.
TARGET is either a REG or a MEM; we know it cannot conflict, since
safe_from_p has been called.
CLEARED is true if TARGET is known to have been zero'd.
SIZE is the number of bytes of TARGET we are allowed to modify: this
may not be the same as the size of EXP if we are assigning to a field
which has been packed to exclude padding bits. */
static void
store_constructor (exp, target, align, cleared, size)
store_constructor (exp, target, cleared, size)
tree exp;
rtx target;
unsigned int align;
int cleared;
HOST_WIDE_INT size;
{
@ -4540,47 +4524,30 @@ store_constructor (exp, target, align, cleared, size)
HOST_WIDE_INT exp_size = int_size_in_bytes (type);
#endif
/* We know our target cannot conflict, since safe_from_p has been called. */
#if 0
/* Don't try copying piece by piece into a hard register
since that is vulnerable to being clobbered by EXP.
Instead, construct in a pseudo register and then copy it all. */
if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
{
rtx temp = gen_reg_rtx (GET_MODE (target));
store_constructor (exp, temp, align, cleared, size);
emit_move_insn (target, temp);
return;
}
#endif
if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
|| TREE_CODE (type) == QUAL_UNION_TYPE)
{
tree elt;
/* Inform later passes that the whole union value is dead. */
/* We either clear the aggregate or indicate the value is dead. */
if ((TREE_CODE (type) == UNION_TYPE
|| TREE_CODE (type) == QUAL_UNION_TYPE)
&& ! cleared)
&& ! cleared
&& ! CONSTRUCTOR_ELTS (exp))
/* If the constructor is empty, clear the union. */
{
emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* If the constructor is empty, clear the union. */
if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
clear_storage (target, expr_size (exp));
clear_storage (target, expr_size (exp));
cleared = 1;
}
/* If we are building a static constructor into a register,
set the initial value as zero so we can fold the value into
a constant. But if more than one register is involved,
this probably loses. */
else if (GET_CODE (target) == REG && TREE_STATIC (exp)
else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
&& GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
{
if (! cleared)
emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
cleared = 1;
}
@ -4589,20 +4556,19 @@ store_constructor (exp, target, align, cleared, size)
clear the whole structure first. Don't do this if TARGET is a
register whose mode size isn't equal to SIZE since clear_storage
can't handle this case. */
else if (size > 0
else if (! cleared && size > 0
&& ((list_length (CONSTRUCTOR_ELTS (exp))
!= fields_length (type))
|| mostly_zeros_p (exp))
&& (GET_CODE (target) != REG
|| (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
|| ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
== size)))
{
if (! cleared)
clear_storage (target, GEN_INT (size));
clear_storage (target, GEN_INT (size));
cleared = 1;
}
else if (! cleared)
/* Inform later passes that the old value is dead. */
if (! cleared)
emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
/* Store each element of the constructor into
@ -4672,8 +4638,6 @@ store_constructor (exp, target, align, cleared, size)
to_rtx = offset_address (to_rtx, offset_rtx,
highest_pow2_factor (offset));
align = DECL_OFFSET_ALIGN (field);
}
if (TREE_READONLY (field))
@ -4698,11 +4662,13 @@ store_constructor (exp, target, align, cleared, size)
&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
{
tree type = TREE_TYPE (value);
if (TYPE_PRECISION (type) < BITS_PER_WORD)
{
type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
value = convert (type, value);
}
if (BYTES_BIG_ENDIAN)
value
= fold (build (LSHIFT_EXPR, type, value,
@ -4720,7 +4686,7 @@ store_constructor (exp, target, align, cleared, size)
}
store_constructor_field (to_rtx, bitsize, bitpos, mode,
TREE_VALUE (elt), type, align, cleared,
TREE_VALUE (elt), type, cleared,
get_alias_set (TREE_TYPE (field)));
}
}
@ -4817,7 +4783,6 @@ store_constructor (exp, target, align, cleared, size)
HOST_WIDE_INT bitpos;
int unsignedp;
tree value = TREE_VALUE (elt);
unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
tree index = TREE_PURPOSE (elt);
rtx xtarget = target;
@ -4869,8 +4834,8 @@ store_constructor (exp, target, align, cleared, size)
}
store_constructor_field
(target, bitsize, bitpos, mode, value, type, align,
cleared, get_alias_set (elttype));
(target, bitsize, bitpos, mode, value, type, cleared,
get_alias_set (elttype));
}
}
else
@ -4912,7 +4877,7 @@ store_constructor (exp, target, align, cleared, size)
highest_pow2_factor (position));
xtarget = adjust_address (xtarget, mode, 0);
if (TREE_CODE (value) == CONSTRUCTOR)
store_constructor (value, xtarget, align, cleared,
store_constructor (value, xtarget, cleared,
bitsize / BITS_PER_UNIT);
else
store_expr (value, xtarget, 0);
@ -4966,8 +4931,7 @@ store_constructor (exp, target, align, cleared, size)
}
store_constructor_field (target, bitsize, bitpos, mode, value,
type, align, cleared,
get_alias_set (elttype));
type, cleared, get_alias_set (elttype));
}
}
@ -5167,7 +5131,6 @@ store_constructor (exp, target, align, cleared, size)
has mode VALUE_MODE if that is convenient to do.
In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
ALIGN is the alignment that TARGET is known to have.
TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
ALIAS_SET is the alias set for the destination. This value will
@ -5175,8 +5138,8 @@ store_constructor (exp, target, align, cleared, size)
reference to the containing structure. */
static rtx
store_field (target, bitsize, bitpos, mode, exp, value_mode,
unsignedp, align, total_size, alias_set)
store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp,
total_size, alias_set)
rtx target;
HOST_WIDE_INT bitsize;
HOST_WIDE_INT bitpos;
@ -5184,7 +5147,6 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode,
tree exp;
enum machine_mode value_mode;
int unsignedp;
unsigned int align;
HOST_WIDE_INT total_size;
int alias_set;
{
@ -5229,7 +5191,7 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode,
emit_move_insn (object, target);
store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
align, total_size, alias_set);
total_size, alias_set);
/* Even though we aren't returning target, we need to
give it the updated value. */
@ -5259,11 +5221,11 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode,
|| GET_CODE (target) == SUBREG
/* If the field isn't aligned enough to store as an ordinary memref,
store it as a bit field. */
|| (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
&& (align < GET_MODE_ALIGNMENT (mode)
|| (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
&& (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
|| bitpos % GET_MODE_ALIGNMENT (mode)))
|| (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
&& (TYPE_ALIGN (TREE_TYPE (exp)) > align
|| (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
&& (TYPE_ALIGN (TREE_TYPE (exp)) > MEM_ALIGN (target)
|| bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
/* If the RHS and field are a constant size and the size of the
RHS isn't the same size as the bitfield, we must use bitfield
@ -5297,21 +5259,11 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode,
boundary. If so, we simply do a block copy. */
if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
{
unsigned int exp_align = expr_align (exp);
if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
|| bitpos % BITS_PER_UNIT != 0)
abort ();
target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
/* Make sure that ALIGN is no stricter than the alignment of EXP. */
align = MIN (exp_align, align);
/* Find an alignment that is consistent with the bit position. */
while ((bitpos % align) != 0)
align >>= 1;
emit_block_move (target, temp,
bitsize == -1 ? expr_size (exp)
: GEN_INT ((bitsize + BITS_PER_UNIT - 1)
@ -5321,11 +5273,11 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode,
}
/* Store the value in the bitfield. */
store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
store_bit_field (target, bitsize, bitpos, mode, temp, total_size);
if (value_mode != VOIDmode)
{
/* The caller wants an rtx for the value. */
/* If possible, avoid refetching from the bitfield itself. */
/* The caller wants an rtx for the value.
If possible, avoid refetching from the bitfield itself. */
if (width_mask != 0
&& ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
{
@ -5340,6 +5292,7 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode,
GET_MODE (temp) == VOIDmode
? value_mode
: GET_MODE (temp))), NULL_RTX);
tmode = GET_MODE (temp);
if (tmode == VOIDmode)
tmode = value_mode;
@ -5347,8 +5300,9 @@ store_field (target, bitsize, bitpos, mode, exp, value_mode,
temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
}
return extract_bit_field (target, bitsize, bitpos, unsignedp,
NULL_RTX, value_mode, 0, align,
NULL_RTX, value_mode, VOIDmode,
total_size);
}
return const0_rtx;
@ -6823,7 +6777,7 @@ expand_expr (exp, target, tmode, modifier)
* TYPE_QUAL_CONST))),
TREE_ADDRESSABLE (exp), 1, 1);
store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
store_constructor (exp, target, 0,
int_size_in_bytes (TREE_TYPE (exp)));
return target;
}
@ -7256,11 +7210,10 @@ expand_expr (exp, target, tmode, modifier)
op0 = validize_mem (op0);
if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
mark_reg_pointer (XEXP (op0, 0), alignment);
mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
op0 = extract_bit_field (op0, bitsize, bitpos,
unsignedp, target, ext_mode, ext_mode,
alignment,
int_size_in_bytes (TREE_TYPE (tem)));
/* If the result is a record type and BITSIZE is narrower than
@ -7548,8 +7501,7 @@ expand_expr (exp, target, tmode, modifier)
* BITS_PER_UNIT),
(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
VOIDmode, 0, BITS_PER_UNIT,
int_size_in_bytes (type), 0);
VOIDmode, 0, int_size_in_bytes (type), 0);
else
abort ();
@ -8740,9 +8692,7 @@ expand_expr (exp, target, tmode, modifier)
if (GET_CODE (op0) == PARALLEL)
/* Handle calls that pass values in multiple non-contiguous
locations. The Irix 6 ABI has examples of this. */
emit_group_store (memloc, op0,
int_size_in_bytes (inner_type),
TYPE_ALIGN (inner_type));
emit_group_store (memloc, op0, int_size_in_bytes (inner_type));
else
emit_move_insn (memloc, op0);
op0 = memloc;
@ -9215,7 +9165,7 @@ expand_expr_unaligned (exp, palign)
op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
unsignedp, NULL_RTX, ext_mode,
ext_mode, alignment,
ext_mode,
int_size_in_bytes (TREE_TYPE (tem)));
/* If the result is a record type and BITSIZE is narrower than

View File

@ -423,11 +423,11 @@ extern void move_block_from_reg PARAMS ((int, rtx, int, int));
/* Load a BLKmode value into non-consecutive registers represented by a
PARALLEL. */
extern void emit_group_load PARAMS ((rtx, rtx, int, unsigned int));
extern void emit_group_load PARAMS ((rtx, rtx, int));
/* Store a BLKmode value from non-consecutive registers represented by a
PARALLEL. */
extern void emit_group_store PARAMS ((rtx, rtx, int, unsigned int));
extern void emit_group_store PARAMS ((rtx, rtx, int));
#ifdef TREE_CODE
/* Copy BLKmode object from a set of registers. */
@ -746,12 +746,11 @@ mode_for_extraction PARAMS ((enum extraction_pattern, int));
extern rtx store_bit_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
enum machine_mode, rtx,
unsigned int, HOST_WIDE_INT));
enum machine_mode, rtx, HOST_WIDE_INT));
extern rtx extract_bit_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT, int, rtx,
enum machine_mode, enum machine_mode,
unsigned int, HOST_WIDE_INT));
HOST_WIDE_INT));
extern rtx expand_mult PARAMS ((enum machine_mode, rtx, rtx, rtx, int));
extern rtx expand_mult_add PARAMS ((rtx, rtx, rtx, rtx,enum machine_mode, int));
extern rtx expand_mult_highpart_adjust PARAMS ((enum machine_mode, rtx, rtx, rtx, rtx, int));

View File

@ -3117,8 +3117,7 @@ purge_addressof_1 (loc, insn, force, store, ht)
start_sequence ();
store_bit_field (sub, size_x, 0, GET_MODE (x),
val, GET_MODE_SIZE (GET_MODE (sub)),
GET_MODE_ALIGNMENT (GET_MODE (sub)));
val, GET_MODE_SIZE (GET_MODE (sub)));
/* Make sure to unshare any shared rtl that store_bit_field
might have created. */
@ -3139,7 +3138,6 @@ purge_addressof_1 (loc, insn, force, store, ht)
start_sequence ();
val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
GET_MODE (x), GET_MODE (x),
GET_MODE_SIZE (GET_MODE (sub)),
GET_MODE_SIZE (GET_MODE (sub)));
if (! validate_change (insn, loc, val, 0))
@ -4493,8 +4491,7 @@ assign_parms (fndecl)
locations. The Irix 6 ABI has examples of this. */
if (GET_CODE (entry_parm) == PARALLEL)
emit_group_store (validize_mem (stack_parm), entry_parm,
int_size_in_bytes (TREE_TYPE (parm)),
TYPE_ALIGN (TREE_TYPE (parm)));
int_size_in_bytes (TREE_TYPE (parm)));
else
move_block_from_reg (REGNO (entry_parm),
@ -4635,8 +4632,7 @@ assign_parms (fndecl)
locations. The Irix 6 ABI has examples of this. */
if (GET_CODE (entry_parm) == PARALLEL)
emit_group_store (validize_mem (stack_parm), entry_parm,
int_size_in_bytes (TREE_TYPE (parm)),
TYPE_ALIGN (TREE_TYPE (parm)));
int_size_in_bytes (TREE_TYPE (parm)));
else
move_block_from_reg (REGNO (entry_parm),
validize_mem (stack_parm),
@ -6909,8 +6905,7 @@ expand_function_end (filename, line, end_bindings)
}
else if (GET_CODE (real_decl_rtl) == PARALLEL)
emit_group_load (real_decl_rtl, decl_rtl,
int_size_in_bytes (TREE_TYPE (decl_result)),
TYPE_ALIGN (TREE_TYPE (decl_result)));
int_size_in_bytes (TREE_TYPE (decl_result)));
else
emit_move_insn (real_decl_rtl, decl_rtl);

View File

@ -580,8 +580,7 @@ noce_emit_move_insn (x, y)
outmode = GET_MODE (outer);
inmode = GET_MODE (inner);
bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
store_bit_field (inner, GET_MODE_BITSIZE (outmode),
bitpos, outmode, y, GET_MODE_BITSIZE (inmode),
store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y,
GET_MODE_BITSIZE (inmode));
}

View File

@ -3071,8 +3071,7 @@ expand_value_return (val)
val = convert_modes (mode, old_mode, val, unsignedp);
#endif
if (GET_CODE (return_reg) == PARALLEL)
emit_group_load (return_reg, val, int_size_in_bytes (type),
TYPE_ALIGN (type));
emit_group_load (return_reg, val, int_size_in_bytes (type));
else
emit_move_insn (return_reg, val);
}
@ -3253,8 +3252,8 @@ expand_return (retval)
extract_bit_field (src, bitsize,
bitpos % BITS_PER_WORD, 1,
NULL_RTX, word_mode, word_mode,
bitsize, BITS_PER_WORD),
bitsize, BITS_PER_WORD);
BITS_PER_WORD),
BITS_PER_WORD);
}
/* Find the smallest integer mode large enough to hold the