emit-rtl.c (last_call_insn, [...]): New functions.
* emit-rtl.c (last_call_insn, add_function_usage_to): New functions. * rtl.h (last_call_insn, add_function_usage_to): New prototypes. * builtins.c (expand_builtin_apply): Use the new emit-rtl functions. * calls.c (emit_call_1): Likewise. (expand_call): For calls initializing constant memory, replace emission of standalone mem /u clobber with function usage entry. * expr.c (emit_block_move_via_libcall): Likewise. * cse.c (count_reg_usage, case EXPR_LIST): New case. * flow.c (propagate_one_insn): Pass entire operand of CALL_INSN_FUNCTION_USAGE to mark_used_regs. * integrate.c (try_constants): For CALL_INSNs, substitute constants within the FUNCTION_USAGE also. * loop.c (prescan_loop): Note clobbers of const mem mentioned in FUNCTION_USAGE lists. * reload1.c (replace_pseudos_in): Renamed. (reload): Use it for clobbers surviving until the end of the reload. From-SVN: r66429
This commit is contained in:
parent
3923e4102f
commit
ee9609391b
@ -1,3 +1,22 @@
|
||||
2003-05-03 Olivier Hainque <hainque@act-europe.fr>
|
||||
|
||||
* emit-rtl.c (last_call_insn, add_function_usage_to): New functions.
|
||||
* rtl.h (last_call_insn, add_function_usage_to): New prototypes.
|
||||
* builtins.c (expand_builtin_apply): Use the new emit-rtl functions.
|
||||
* calls.c (emit_call_1): Likewise.
|
||||
(expand_call): For calls initializing constant memory, replace
|
||||
emission of standalone mem /u clobber with function usage entry.
|
||||
* expr.c (emit_block_move_via_libcall): Likewise.
|
||||
* cse.c (count_reg_usage, case EXPR_LIST): New case.
|
||||
* flow.c (propagate_one_insn): Pass entire operand of
|
||||
CALL_INSN_FUNCTION_USAGE to mark_used_regs.
|
||||
* integrate.c (try_constants): For CALL_INSNs, substitute constants
|
||||
within the FUNCTION_USAGE also.
|
||||
* loop.c (prescan_loop): Note clobbers of const mem mentioned in
|
||||
FUNCTION_USAGE lists.
|
||||
* reload1.c (replace_pseudos_in): Renamed.
|
||||
(reload): Use it for clobbers surviving until the end of the reload.
|
||||
|
||||
2003-05-03 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
|
||||
|
||||
* stor-layout.c (place_field): When adjusting offset_align, use
|
||||
|
@ -1323,29 +1323,10 @@ expand_builtin_apply (function, arguments, argsize)
|
||||
#endif
|
||||
abort ();
|
||||
|
||||
/* Find the CALL insn we just emitted. */
|
||||
for (call_insn = get_last_insn ();
|
||||
call_insn && GET_CODE (call_insn) != CALL_INSN;
|
||||
call_insn = PREV_INSN (call_insn))
|
||||
;
|
||||
|
||||
if (! call_insn)
|
||||
abort ();
|
||||
|
||||
/* Put the register usage information on the CALL. If there is already
|
||||
some usage information, put ours at the end. */
|
||||
if (CALL_INSN_FUNCTION_USAGE (call_insn))
|
||||
{
|
||||
rtx link;
|
||||
|
||||
for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
|
||||
link = XEXP (link, 1))
|
||||
;
|
||||
|
||||
XEXP (link, 1) = call_fusage;
|
||||
}
|
||||
else
|
||||
CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
|
||||
/* Find the CALL insn we just emitted, and attach the register usage
|
||||
information. */
|
||||
call_insn = last_call_insn ();
|
||||
add_function_usage_to (call_insn, call_fusage);
|
||||
|
||||
/* Restore the stack. */
|
||||
#ifdef HAVE_save_stack_nonlocal
|
||||
|
50
gcc/calls.c
50
gcc/calls.c
@ -536,14 +536,8 @@ emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
|
||||
#endif
|
||||
abort ();
|
||||
|
||||
/* Find the CALL insn we just emitted. */
|
||||
for (call_insn = get_last_insn ();
|
||||
call_insn && GET_CODE (call_insn) != CALL_INSN;
|
||||
call_insn = PREV_INSN (call_insn))
|
||||
;
|
||||
|
||||
if (! call_insn)
|
||||
abort ();
|
||||
/* Find the call we just emitted. */
|
||||
call_insn = last_call_insn ();
|
||||
|
||||
/* Mark memory as used for "pure" function call. */
|
||||
if (ecf_flags & ECF_PURE)
|
||||
@ -554,20 +548,8 @@ emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
|
||||
gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
|
||||
call_fusage);
|
||||
|
||||
/* Put the register usage information on the CALL. If there is already
|
||||
some usage information, put ours at the end. */
|
||||
if (CALL_INSN_FUNCTION_USAGE (call_insn))
|
||||
{
|
||||
rtx link;
|
||||
|
||||
for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
|
||||
link = XEXP (link, 1))
|
||||
;
|
||||
|
||||
XEXP (link, 1) = call_fusage;
|
||||
}
|
||||
else
|
||||
CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
|
||||
/* Put the register usage information there. */
|
||||
add_function_usage_to (call_insn, call_fusage);
|
||||
|
||||
/* If this is a const call, then set the insn's unchanging bit. */
|
||||
if (ecf_flags & (ECF_CONST | ECF_PURE))
|
||||
@ -3166,14 +3148,6 @@ expand_call (exp, target, ignore)
|
||||
if (flags & ECF_LONGJMP)
|
||||
current_function_calls_longjmp = 1;
|
||||
|
||||
/* If this function is returning into a memory location marked as
|
||||
readonly, it means it is initializing that location. But we normally
|
||||
treat functions as not clobbering such locations, so we need to
|
||||
specify that this one does. */
|
||||
if (target != 0 && GET_CODE (target) == MEM
|
||||
&& structure_value_addr != 0 && RTX_UNCHANGING_P (target))
|
||||
emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
|
||||
|
||||
/* If value type not void, return an rtx for the value. */
|
||||
|
||||
/* If there are cleanups to be called, don't use a hard reg as target.
|
||||
@ -3355,6 +3329,22 @@ expand_call (exp, target, ignore)
|
||||
expand_end_target_temps ();
|
||||
}
|
||||
|
||||
/* If this function is returning into a memory location marked as
|
||||
readonly, it means it is initializing that location. We normally treat
|
||||
functions as not clobbering such locations, so we need to specify that
|
||||
this one does. We do this by adding the appropriate CLOBBER to the
|
||||
CALL_INSN function usage list. This cannot be done by emitting a
|
||||
standalone CLOBBER after the call because the latter would be ignored
|
||||
by at least the delay slot scheduling pass. We do this now instead of
|
||||
adding to call_fusage before the call to emit_call_1 because TARGET
|
||||
may be modified in the meantime. */
|
||||
if (structure_value_addr != 0 && target != 0
|
||||
&& GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
|
||||
add_function_usage_to
|
||||
(last_call_insn (),
|
||||
gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
|
||||
NULL_RTX));
|
||||
|
||||
insns = get_insns ();
|
||||
end_sequence ();
|
||||
|
||||
|
11
gcc/cse.c
11
gcc/cse.c
@ -7515,6 +7515,17 @@ count_reg_usage (x, counts, dest, incr)
|
||||
count_reg_usage (XEXP (note, 0), counts, NULL_RTX, incr);
|
||||
return;
|
||||
|
||||
case EXPR_LIST:
|
||||
if (REG_NOTE_KIND (x) == REG_EQUAL
|
||||
|| (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
|
||||
/* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
|
||||
involving registers in the address. */
|
||||
|| GET_CODE (XEXP (x, 0)) == CLOBBER)
|
||||
count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
|
||||
|
||||
count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
|
||||
return;
|
||||
|
||||
case INSN_LIST:
|
||||
abort ();
|
||||
|
||||
|
@ -3191,6 +3191,22 @@ prev_real_insn (insn)
|
||||
return insn;
|
||||
}
|
||||
|
||||
/* Return the last CALL_INSN in the current list, or 0 if there is none.
|
||||
This routine does not look inside SEQUENCEs. */
|
||||
|
||||
rtx
|
||||
last_call_insn ()
|
||||
{
|
||||
rtx insn;
|
||||
|
||||
for (insn = get_last_insn ();
|
||||
insn && GET_CODE (insn) != CALL_INSN;
|
||||
insn = PREV_INSN (insn))
|
||||
;
|
||||
|
||||
return insn;
|
||||
}
|
||||
|
||||
/* Find the next insn after INSN that really does something. This routine
|
||||
does not look inside SEQUENCEs. Until reload has completed, this is the
|
||||
same as next_real_insn. */
|
||||
@ -3850,6 +3866,31 @@ remove_insn (insn)
|
||||
}
|
||||
}
|
||||
|
||||
/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
|
||||
|
||||
void
|
||||
add_function_usage_to (call_insn, call_fusage)
|
||||
rtx call_insn, call_fusage;
|
||||
{
|
||||
if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
|
||||
abort ();
|
||||
|
||||
/* Put the register usage information on the CALL. If there is already
|
||||
some usage information, put ours at the end. */
|
||||
if (CALL_INSN_FUNCTION_USAGE (call_insn))
|
||||
{
|
||||
rtx link;
|
||||
|
||||
for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
|
||||
link = XEXP (link, 1))
|
||||
;
|
||||
|
||||
XEXP (link, 1) = call_fusage;
|
||||
}
|
||||
else
|
||||
CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
|
||||
}
|
||||
|
||||
/* Delete all insns made since FROM.
|
||||
FROM becomes the new last instruction. */
|
||||
|
||||
|
66
gcc/expr.c
66
gcc/expr.c
@ -1839,16 +1839,16 @@ emit_block_move_via_movstr (x, y, size, align)
|
||||
rtx x, y, size;
|
||||
unsigned int align;
|
||||
{
|
||||
/* Try the most limited insn first, because there's no point
|
||||
including more than one in the machine description unless
|
||||
the more limited one has some advantage. */
|
||||
|
||||
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
|
||||
enum machine_mode mode;
|
||||
|
||||
/* Since this is a move insn, we don't care about volatility. */
|
||||
volatile_ok = 1;
|
||||
|
||||
/* Try the most limited insn first, because there's no point
|
||||
including more than one in the machine description unless
|
||||
the more limited one has some advantage. */
|
||||
|
||||
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
|
||||
mode = GET_MODE_WIDER_MODE (mode))
|
||||
{
|
||||
@ -1908,38 +1908,48 @@ static rtx
|
||||
emit_block_move_via_libcall (dst, src, size)
|
||||
rtx dst, src, size;
|
||||
{
|
||||
rtx dst_addr, src_addr;
|
||||
tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
|
||||
enum machine_mode size_mode;
|
||||
rtx retval;
|
||||
|
||||
/* DST, SRC, or SIZE may have been passed through protect_from_queue.
|
||||
|
||||
It is unsafe to save the value generated by protect_from_queue
|
||||
and reuse it later. Consider what happens if emit_queue is
|
||||
called before the return value from protect_from_queue is used.
|
||||
It is unsafe to save the value generated by protect_from_queue and reuse
|
||||
it later. Consider what happens if emit_queue is called before the
|
||||
return value from protect_from_queue is used.
|
||||
|
||||
Expansion of the CALL_EXPR below will call emit_queue before
|
||||
we are finished emitting RTL for argument setup. So if we are
|
||||
not careful we could get the wrong value for an argument.
|
||||
Expansion of the CALL_EXPR below will call emit_queue before we are
|
||||
finished emitting RTL for argument setup. So if we are not careful we
|
||||
could get the wrong value for an argument.
|
||||
|
||||
To avoid this problem we go ahead and emit code to copy X, Y &
|
||||
SIZE into new pseudos. We can then place those new pseudos
|
||||
into an RTL_EXPR and use them later, even after a call to
|
||||
To avoid this problem we go ahead and emit code to copy the addresses of
|
||||
DST and SRC and SIZE into new pseudos. We can then place those new
|
||||
pseudos into an RTL_EXPR and use them later, even after a call to
|
||||
emit_queue.
|
||||
|
||||
Note this is not strictly needed for library calls since they
|
||||
do not call emit_queue before loading their arguments. However,
|
||||
we may need to have library calls call emit_queue in the future
|
||||
since failing to do so could cause problems for targets which
|
||||
define SMALL_REGISTER_CLASSES and pass arguments in registers. */
|
||||
Note this is not strictly needed for library calls since they do not call
|
||||
emit_queue before loading their arguments. However, we may need to have
|
||||
library calls call emit_queue in the future since failing to do so could
|
||||
cause problems for targets which define SMALL_REGISTER_CLASSES and pass
|
||||
arguments in registers. */
|
||||
|
||||
dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
|
||||
src = copy_to_mode_reg (Pmode, XEXP (src, 0));
|
||||
dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
|
||||
src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
|
||||
|
||||
#ifdef POINTERS_EXTEND_UNSIGNED
|
||||
dst_addr = convert_memory_address (ptr_mode, dst_addr);
|
||||
src_addr = convert_memory_address (ptr_mode, src_addr);
|
||||
#endif
|
||||
|
||||
dst_tree = make_tree (ptr_type_node, dst_addr);
|
||||
src_tree = make_tree (ptr_type_node, src_addr);
|
||||
|
||||
if (TARGET_MEM_FUNCTIONS)
|
||||
size_mode = TYPE_MODE (sizetype);
|
||||
else
|
||||
size_mode = TYPE_MODE (unsigned_type_node);
|
||||
|
||||
size = convert_to_mode (size_mode, size, 1);
|
||||
size = copy_to_mode_reg (size_mode, size);
|
||||
|
||||
@ -1951,8 +1961,6 @@ emit_block_move_via_libcall (dst, src, size)
|
||||
|
||||
For convenience, we generate the call to bcopy this way as well. */
|
||||
|
||||
dst_tree = make_tree (ptr_type_node, dst);
|
||||
src_tree = make_tree (ptr_type_node, src);
|
||||
if (TARGET_MEM_FUNCTIONS)
|
||||
size_tree = make_tree (sizetype, size);
|
||||
else
|
||||
@ -1979,13 +1987,17 @@ emit_block_move_via_libcall (dst, src, size)
|
||||
|
||||
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
|
||||
|
||||
/* If we are initializing a readonly value, show the above call
|
||||
clobbered it. Otherwise, a load from it may erroneously be
|
||||
hoisted from a loop. */
|
||||
/* If we are initializing a readonly value, show the above call clobbered
|
||||
it. Otherwise, a load from it may erroneously be hoisted from a loop, or
|
||||
the delay slot scheduler might overlook conflicts and take nasty
|
||||
decisions. */
|
||||
if (RTX_UNCHANGING_P (dst))
|
||||
emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
|
||||
add_function_usage_to
|
||||
(last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
|
||||
gen_rtx_CLOBBER (VOIDmode, dst),
|
||||
NULL_RTX));
|
||||
|
||||
return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
|
||||
return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
|
||||
}
|
||||
|
||||
/* A subroutine of emit_block_move_via_libcall. Create the tree node
|
||||
|
@ -1832,13 +1832,14 @@ propagate_one_insn (pbi, insn)
|
||||
if (GET_CODE (PATTERN (insn)) == COND_EXEC)
|
||||
cond = COND_EXEC_TEST (PATTERN (insn));
|
||||
|
||||
/* Calls use their arguments. */
|
||||
/* Calls use their arguments, and may clobber memory which
|
||||
address involves some register. */
|
||||
for (note = CALL_INSN_FUNCTION_USAGE (insn);
|
||||
note;
|
||||
note = XEXP (note, 1))
|
||||
if (GET_CODE (XEXP (note, 0)) == USE)
|
||||
mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
|
||||
cond, insn);
|
||||
/* We find USE or CLOBBER entities in a FUNCTION_USAGE list: both
|
||||
of which mark_used_regs knows how to handle. */
|
||||
mark_used_regs (pbi, XEXP (XEXP (note, 0), 0), cond, insn);
|
||||
|
||||
/* The stack ptr is used (honorarily) by a CALL insn. */
|
||||
SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
|
||||
|
@ -2463,6 +2463,14 @@ try_constants (insn, map)
|
||||
apply_change_group ();
|
||||
subst_constants (&PATTERN (insn), insn, map, 0);
|
||||
apply_change_group ();
|
||||
|
||||
/* Enforce consistency between the addresses in the regular insn flow
|
||||
and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
|
||||
if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
|
||||
{
|
||||
subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
|
||||
apply_change_group ();
|
||||
}
|
||||
|
||||
/* Show we don't know the value of anything stored or clobbered. */
|
||||
note_stores (PATTERN (insn), mark_stores, NULL);
|
||||
|
24
gcc/loop.c
24
gcc/loop.c
@ -2575,6 +2575,30 @@ prescan_loop (loop)
|
||||
loop_info->has_call = 1;
|
||||
if (can_throw_internal (insn))
|
||||
loop_info->has_multiple_exit_targets = 1;
|
||||
|
||||
/* Calls initializing constant objects have CLOBBER of MEM /u in the
|
||||
attached FUNCTION_USAGE expression list, not accounted for by the
|
||||
code above. We should note these to avoid missing dependencies in
|
||||
later references. */
|
||||
{
|
||||
rtx fusage_entry;
|
||||
|
||||
for (fusage_entry = CALL_INSN_FUNCTION_USAGE (insn);
|
||||
fusage_entry; fusage_entry = XEXP (fusage_entry, 1))
|
||||
{
|
||||
rtx fusage = XEXP (fusage_entry, 0);
|
||||
|
||||
if (GET_CODE (fusage) == CLOBBER
|
||||
&& GET_CODE (XEXP (fusage, 0)) == MEM
|
||||
&& RTX_UNCHANGING_P (XEXP (fusage, 0)))
|
||||
{
|
||||
note_stores (fusage, note_addr_stored, loop_info);
|
||||
if (! loop_info->first_loop_store_insn
|
||||
&& loop_info->store_mems)
|
||||
loop_info->first_loop_store_insn = insn;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case JUMP_INSN:
|
||||
|
@ -369,9 +369,7 @@ static int (*offsets_at)[NUM_ELIMINABLE_REGS];
|
||||
|
||||
static int num_labels;
|
||||
|
||||
static void replace_pseudos_in_call_usage PARAMS ((rtx *,
|
||||
enum machine_mode,
|
||||
rtx));
|
||||
static void replace_pseudos_in PARAMS ((rtx *, enum machine_mode, rtx));
|
||||
static void maybe_fix_stack_asms PARAMS ((void));
|
||||
static void copy_reloads PARAMS ((struct insn_chain *));
|
||||
static void calculate_needs_all_insns PARAMS ((int));
|
||||
@ -583,7 +581,7 @@ compute_use_by_pseudos (to, from)
|
||||
equivalences. */
|
||||
|
||||
static void
|
||||
replace_pseudos_in_call_usage (loc, mem_mode, usage)
|
||||
replace_pseudos_in (loc, mem_mode, usage)
|
||||
rtx *loc;
|
||||
enum machine_mode mem_mode;
|
||||
rtx usage;
|
||||
@ -608,7 +606,7 @@ replace_pseudos_in_call_usage (loc, mem_mode, usage)
|
||||
if (x != *loc)
|
||||
{
|
||||
*loc = x;
|
||||
replace_pseudos_in_call_usage (loc, mem_mode, usage);
|
||||
replace_pseudos_in (loc, mem_mode, usage);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -628,7 +626,7 @@ replace_pseudos_in_call_usage (loc, mem_mode, usage)
|
||||
}
|
||||
else if (code == MEM)
|
||||
{
|
||||
replace_pseudos_in_call_usage (& XEXP (x, 0), GET_MODE (x), usage);
|
||||
replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -636,10 +634,10 @@ replace_pseudos_in_call_usage (loc, mem_mode, usage)
|
||||
fmt = GET_RTX_FORMAT (code);
|
||||
for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
|
||||
if (*fmt == 'e')
|
||||
replace_pseudos_in_call_usage (&XEXP (x, i), mem_mode, usage);
|
||||
replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
|
||||
else if (*fmt == 'E')
|
||||
for (j = 0; j < XVECLEN (x, i); j++)
|
||||
replace_pseudos_in_call_usage (& XVECEXP (x, i, j), mem_mode, usage);
|
||||
replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
|
||||
}
|
||||
|
||||
|
||||
@ -1192,9 +1190,8 @@ reload (first, global)
|
||||
rtx *pnote;
|
||||
|
||||
if (GET_CODE (insn) == CALL_INSN)
|
||||
replace_pseudos_in_call_usage (& CALL_INSN_FUNCTION_USAGE (insn),
|
||||
VOIDmode,
|
||||
CALL_INSN_FUNCTION_USAGE (insn));
|
||||
replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
|
||||
VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
|
||||
|
||||
if ((GET_CODE (PATTERN (insn)) == USE
|
||||
/* We mark with QImode USEs introduced by reload itself. */
|
||||
@ -1213,6 +1210,13 @@ reload (first, global)
|
||||
continue;
|
||||
}
|
||||
|
||||
/* Some CLOBBERs may survive until here and still reference unassigned
|
||||
pseudos with const equivalent, which may in turn cause ICE in later
|
||||
passes if the reference remains in place. */
|
||||
if (GET_CODE (PATTERN (insn)) == CLOBBER)
|
||||
replace_pseudos_in (& XEXP (PATTERN (insn), 0),
|
||||
VOIDmode, PATTERN (insn));
|
||||
|
||||
pnote = ®_NOTES (insn);
|
||||
while (*pnote != 0)
|
||||
{
|
||||
|
@ -1538,6 +1538,8 @@ extern rtx emit_line_note PARAMS ((const char *, int));
|
||||
extern rtx emit_note PARAMS ((const char *, int));
|
||||
extern rtx emit_line_note_force PARAMS ((const char *, int));
|
||||
extern rtx make_insn_raw PARAMS ((rtx));
|
||||
extern void add_function_usage_to PARAMS ((rtx, rtx));
|
||||
extern rtx last_call_insn PARAMS ((void));
|
||||
extern rtx previous_insn PARAMS ((rtx));
|
||||
extern rtx next_insn PARAMS ((rtx));
|
||||
extern rtx prev_nonnote_insn PARAMS ((rtx));
|
||||
|
Loading…
Reference in New Issue
Block a user