Convert to md_asm_adjust

Using proper vectors instead of lists of trees.

From-SVN: r223025
This commit is contained in:
Richard Henderson 2015-05-11 16:33:23 -07:00 committed by Richard Henderson
parent 15a85b055d
commit 7ca35180e9
17 changed files with 451 additions and 455 deletions

View File

@ -1,3 +1,47 @@
2015-05-11 Richard Henderson <rth@redhat.com>
* target.def (md_asm_clobbers): Replace with...
(md_asm_adjust): this.
* tm.texi.in (TARGET_MD_ASM_CLOBBERS): Remove.
(TARGET_MD_ASM_ADJUST): New.
* tm.texi: Rebuild.
* hooks.c (hook_tree_tree_tree_tree_3rd_identity): Remove.
* hooks.h (hook_tree_tree_tree_tree_3rd_identity): Remove.
* system.h (TARGET_MD_ASM_CLOBBERS): Poison.
* cfgexpand.c (check_operand_nalternatives): Accept vector of
constraints instead of lists of outputs and inputs.
(expand_asm_stmt): Save and restore input_location around the
body of the function. Move asm data into vectors instead of
building tree lists. Generate cleanup sequences as needed,
rather than waiting til the end. Use new md_asm_adjust hook.
* config/vxworks.c: Include vec.h before target.h.
* gimple.c: Likewise.
* incpath.c: Likewise.
* mode-switching.c: Likewise.
* config/cris/cris.c (cris_md_asm_clobbers): Convert to...
(cris_md_asm_adjust): this.
(TARGET_MD_ASM_CLOBBERS): Remove.
(TARGET_MD_ASM_ADJUST): New.
* config/i386/i386.c (ix86_md_asm_clobbers): Convert to...
(ix86_md_asm_adjust): this.
(TARGET_MD_ASM_CLOBBERS): Remove.
(TARGET_MD_ASM_ADJUST): New.
* config/mn10300/mn10300.c (mn10300_md_asm_clobbers): Convert to...
(mn10300_md_asm_adjust): this.
(TARGET_MD_ASM_CLOBBERS): Remove.
(TARGET_MD_ASM_ADJUST): New.
* config/rs6000/rs6000.c (rs6000_md_asm_clobbers): Convert to...
(rs6000_md_asm_adjust): this.
(TARGET_MD_ASM_CLOBBERS): Remove.
(TARGET_MD_ASM_ADJUST): New.
* config/visium/visium.c (visium_md_asm_clobbers): Convert to...
(visium_md_asm_adjust): this.
(TARGET_MD_ASM_CLOBBERS): Remove.
(TARGET_MD_ASM_ADJUST): New.
2015-05-11 Richard Henderson <rth@redhat.com>
* gimplify.c (gimplify_asm_expr): Set gimple_asm_volatile_p

View File

@ -2438,14 +2438,12 @@ n_occurrences (int c, const char *s)
the same number of alternatives. Return true if so. */
static bool
check_operand_nalternatives (tree outputs, tree inputs)
check_operand_nalternatives (const vec<const char *> &constraints)
{
if (outputs || inputs)
unsigned len = constraints.length();
if (len > 0)
{
tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
int nalternatives
= n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
tree next = inputs;
int nalternatives = n_occurrences (',', constraints[0]);
if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
{
@ -2453,26 +2451,14 @@ check_operand_nalternatives (tree outputs, tree inputs)
return false;
}
tmp = outputs;
while (tmp)
{
const char *constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
if (n_occurrences (',', constraint) != nalternatives)
{
error ("operand constraints for %<asm%> differ "
"in number of alternatives");
return false;
}
if (TREE_CHAIN (tmp))
tmp = TREE_CHAIN (tmp);
else
tmp = next, next = 0;
}
for (unsigned i = 1; i < len; ++i)
if (n_occurrences (',', constraints[i]) != nalternatives)
{
error ("operand constraints for %<asm%> differ "
"in number of alternatives");
return false;
}
}
return true;
}
@ -2524,156 +2510,145 @@ tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
static void
expand_asm_stmt (gasm *stmt)
{
int noutputs, ninputs, nclobbers, nlabels, i;
tree string, outputs, inputs, clobbers, labels, tail, t;
location_t locus = gimple_location (stmt);
basic_block fallthru_bb = NULL;
/* Meh... convert the gimple asm operands into real tree lists.
Eventually we should make all routines work on the vectors instead
of relying on TREE_CHAIN. */
outputs = NULL_TREE;
noutputs = gimple_asm_noutputs (stmt);
if (noutputs > 0)
{
t = outputs = gimple_asm_output_op (stmt, 0);
for (i = 1; i < noutputs; i++)
t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
}
inputs = NULL_TREE;
ninputs = gimple_asm_ninputs (stmt);
if (ninputs > 0)
{
t = inputs = gimple_asm_input_op (stmt, 0);
for (i = 1; i < ninputs; i++)
t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
}
clobbers = NULL_TREE;
nclobbers = gimple_asm_nclobbers (stmt);
if (nclobbers > 0)
{
t = clobbers = gimple_asm_clobber_op (stmt, 0);
for (i = 1; i < nclobbers; i++)
t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
}
labels = NULL_TREE;
nlabels = gimple_asm_nlabels (stmt);
if (nlabels > 0)
{
edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
if (fallthru)
fallthru_bb = fallthru->dest;
t = labels = gimple_asm_label_op (stmt, 0);
for (i = 1; i < nlabels; i++)
t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
}
class save_input_location
{
const char *s = gimple_asm_string (stmt);
string = build_string (strlen (s), s);
}
location_t old;
public:
explicit save_input_location(location_t where)
{
old = input_location;
input_location = where;
}
~save_input_location()
{
input_location = old;
}
};
location_t locus = gimple_location (stmt);
if (gimple_asm_input_p (stmt))
{
const char *s = gimple_asm_string (stmt);
tree string = build_string (strlen (s), s);
expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
return;
}
/* Record the contents of OUTPUTS before it is modified. */
tree *orig_outputs = XALLOCAVEC (tree, noutputs);
/* There are some legacy diagnostics in here, and also avoids a
sixth parameger to targetm.md_asm_adjust. */
save_input_location s_i_l(locus);
unsigned noutputs = gimple_asm_noutputs (stmt);
unsigned ninputs = gimple_asm_ninputs (stmt);
unsigned nlabels = gimple_asm_nlabels (stmt);
unsigned i;
/* ??? Diagnose during gimplification? */
if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
{
error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
return;
}
auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
/* Copy the gimple vectors into new vectors that we can manipulate. */
output_tvec.safe_grow (noutputs);
input_tvec.safe_grow (ninputs);
constraints.safe_grow (noutputs + ninputs);
for (i = 0; i < noutputs; ++i)
orig_outputs[i] = TREE_VALUE (gimple_asm_output_op (stmt, i));
{
tree t = gimple_asm_output_op (stmt, i);
output_tvec[i] = TREE_VALUE (t);
constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
}
for (i = 0; i < ninputs; i++)
{
tree t = gimple_asm_input_op (stmt, i);
input_tvec[i] = TREE_VALUE (t);
constraints[i + noutputs]
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
}
rtvec argvec, constraintvec, labelvec;
rtx body;
int ninout;
HARD_REG_SET clobbered_regs;
int clobber_conflict_found = 0;
/* Vector of RTX's of evaluated output operands. */
rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
int *inout_opnum = XALLOCAVEC (int, noutputs);
rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
machine_mode *inout_mode = XALLOCAVEC (machine_mode, noutputs);
const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
int old_generating_concat_p = generating_concat_p;
rtx_code_label *fallthru_label = NULL;
if (! check_operand_nalternatives (outputs, inputs))
/* ??? Diagnose during gimplification? */
if (! check_operand_nalternatives (constraints))
return;
/* Collect constraints. */
i = 0;
for (t = outputs; t ; t = TREE_CHAIN (t), i++)
constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
for (t = inputs; t ; t = TREE_CHAIN (t), i++)
constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
/* Sometimes we wish to automatically clobber registers across an asm.
Case in point is when the i386 backend moved from cc0 to a hard reg --
maintaining source-level compatibility means automatically clobbering
the flags register. */
clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
/* Count the number of meaningful clobbered registers, ignoring what
we would ignore later. */
nclobbers = 0;
auto_vec<rtx> clobber_rvec;
HARD_REG_SET clobbered_regs;
CLEAR_HARD_REG_SET (clobbered_regs);
for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
if (unsigned n = gimple_asm_nclobbers (stmt))
{
const char *regname;
int nregs;
clobber_rvec.reserve (n);
for (i = 0; i < n; i++)
{
tree t = gimple_asm_clobber_op (stmt, i);
const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
int nregs, j;
if (TREE_VALUE (tail) == error_mark_node)
return;
regname = TREE_STRING_POINTER (TREE_VALUE (tail));
i = decode_reg_name_and_count (regname, &nregs);
if (i == -4)
++nclobbers;
else if (i == -2)
error ("unknown register name %qs in %<asm%>", regname);
/* Mark clobbered registers. */
if (i >= 0)
{
int reg;
for (reg = i; reg < i + nregs; reg++)
j = decode_reg_name_and_count (regname, &nregs);
if (j < 0)
{
++nclobbers;
/* Clobbering the PIC register is an error. */
if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
if (j == -2)
{
error ("PIC register clobbered by %qs in %<asm%>", regname);
return;
/* ??? Diagnose during gimplification? */
error ("unknown register name %qs in %<asm%>", regname);
}
else if (j == -4)
{
rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
clobber_rvec.safe_push (x);
}
else
{
/* Otherwise we should have -1 == empty string
or -3 == cc, which is not a register. */
gcc_assert (j == -1 || j == -3);
}
SET_HARD_REG_BIT (clobbered_regs, reg);
}
else
for (int reg = j; reg < j + nregs; reg++)
{
/* Clobbering the PIC register is an error. */
if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
{
/* ??? Diagnose during gimplification? */
error ("PIC register clobbered by %qs in %<asm%>",
regname);
return;
}
SET_HARD_REG_BIT (clobbered_regs, reg);
rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
clobber_rvec.safe_push (x);
}
}
}
unsigned nclobbers = clobber_rvec.length();
/* First pass over inputs and outputs checks validity and sets
mark_addressable if needed. */
/* ??? Diagnose during gimplification? */
ninout = 0;
for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
for (i = 0; i < noutputs; ++i)
{
tree val = TREE_VALUE (tail);
tree val = output_tvec[i];
tree type = TREE_TYPE (val);
const char *constraint;
bool is_inout;
bool allows_reg;
bool allows_mem;
/* If there's an erroneous arg, emit no insn. */
if (type == error_mark_node)
return;
/* Try to parse the output constraint. If that fails, there's
no point in going further. */
constraint = constraints[i];
@ -2688,35 +2663,21 @@ expand_asm_stmt (gasm *stmt)
&& REG_P (DECL_RTL (val))
&& GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
mark_addressable (val);
if (is_inout)
ninout++;
}
ninputs += ninout;
if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
{
error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
return;
}
for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
for (i = 0; i < ninputs; ++i)
{
bool allows_reg, allows_mem;
const char *constraint;
/* If there's an erroneous arg, emit no insn, because the ASM_INPUT
would get VOIDmode and that could cause a crash in reload. */
if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
return;
constraint = constraints[i + noutputs];
if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
constraints, &allows_mem, &allows_reg))
if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
constraints.address (),
&allows_mem, &allows_reg))
return;
if (! allows_reg && allows_mem)
mark_addressable (TREE_VALUE (tail));
mark_addressable (input_tvec[i]);
}
/* Second pass evaluates arguments. */
@ -2724,17 +2685,21 @@ expand_asm_stmt (gasm *stmt)
/* Make sure stack is consistent for asm goto. */
if (nlabels > 0)
do_pending_stack_adjust ();
int old_generating_concat_p = generating_concat_p;
ninout = 0;
for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
/* Vector of RTX's of evaluated output operands. */
auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
output_rvec.safe_grow (noutputs);
for (i = 0; i < noutputs; ++i)
{
tree val = TREE_VALUE (tail);
tree val = output_tvec[i];
tree type = TREE_TYPE (val);
bool is_inout;
bool allows_reg;
bool allows_mem;
bool is_inout, allows_reg, allows_mem, ok;
rtx op;
bool ok;
ok = parse_output_constraint (&constraints[i], i, ninputs,
noutputs, &allows_mem, &allows_reg,
@ -2743,12 +2708,11 @@ expand_asm_stmt (gasm *stmt)
/* If an output operand is not a decl or indirect ref and our constraint
allows a register, make a temporary to act as an intermediate.
Make the asm insn write into that, then our caller will copy it to
Make the asm insn write into that, then we will copy it to
the real output operand. Likewise for promoted variables. */
generating_concat_p = 0;
real_output_rtx[i] = NULL_RTX;
if ((TREE_CODE (val) == INDIRECT_REF
&& allows_mem)
|| (DECL_P (val)
@ -2768,69 +2732,64 @@ expand_asm_stmt (gasm *stmt)
if ((! allows_mem && MEM_P (op))
|| GET_CODE (op) == CONCAT)
{
real_output_rtx[i] = op;
rtx old_op = op;
op = gen_reg_rtx (GET_MODE (op));
generating_concat_p = old_generating_concat_p;
if (is_inout)
emit_move_insn (op, real_output_rtx[i]);
emit_move_insn (op, old_op);
push_to_sequence2 (after_rtl_seq, after_rtl_end);
emit_move_insn (old_op, op);
after_rtl_seq = get_insns ();
after_rtl_end = get_last_insn ();
end_sequence ();
}
}
else
{
op = assign_temp (type, 0, 1);
op = validize_mem (op);
if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
TREE_VALUE (tail) = make_tree (type, op);
}
output_rtx[i] = op;
if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
generating_concat_p = old_generating_concat_p;
generating_concat_p = old_generating_concat_p;
push_to_sequence2 (after_rtl_seq, after_rtl_end);
expand_assignment (val, make_tree (type, op), false);
after_rtl_seq = get_insns ();
after_rtl_end = get_last_insn ();
end_sequence ();
}
output_rvec[i] = op;
if (is_inout)
{
inout_mode[ninout] = TYPE_MODE (type);
inout_opnum[ninout++] = i;
}
if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
clobber_conflict_found = 1;
inout_opnum.safe_push (i);
}
/* Make vectors for the expression-rtx, constraint strings,
and named operands. */
auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
argvec = rtvec_alloc (ninputs);
constraintvec = rtvec_alloc (ninputs);
labelvec = rtvec_alloc (nlabels);
input_rvec.safe_grow (ninputs);
input_mode.safe_grow (ninputs);
body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
: GET_MODE (output_rtx[0])),
ggc_strdup (TREE_STRING_POINTER (string)),
empty_string, 0, argvec, constraintvec,
labelvec, locus);
generating_concat_p = 0;
MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
/* Eval the inputs and put them into ARGVEC.
Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
for (i = 0; i < ninputs; ++i)
{
bool allows_reg, allows_mem;
tree val = input_tvec[i];
tree type = TREE_TYPE (val);
bool allows_reg, allows_mem, ok;
const char *constraint;
tree val, type;
rtx op;
bool ok;
constraint = constraints[i + noutputs];
ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
constraints, &allows_mem, &allows_reg);
ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
constraints.address (),
&allows_mem, &allows_reg);
gcc_assert (ok);
generating_concat_p = 0;
val = TREE_VALUE (tail);
type = TREE_TYPE (val);
/* EXPAND_INITIALIZER will not generate code for valid initializer
constants, but will still generate code for other types of operand.
This is the behavior we want for constant constraints. */
@ -2861,60 +2820,108 @@ expand_asm_stmt (gasm *stmt)
else
gcc_unreachable ();
}
generating_concat_p = old_generating_concat_p;
ASM_OPERANDS_INPUT (body, i) = op;
ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
= gen_rtx_ASM_INPUT_loc (TYPE_MODE (type),
ggc_strdup (constraints[i + noutputs]),
locus);
if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
clobber_conflict_found = 1;
input_rvec[i] = op;
input_mode[i] = TYPE_MODE (type);
}
/* Protect all the operands from the queue now that they have all been
evaluated. */
generating_concat_p = 0;
/* For in-out operands, copy output rtx to input rtx. */
unsigned ninout = inout_opnum.length();
for (i = 0; i < ninout; i++)
{
int j = inout_opnum[i];
rtx o = output_rvec[j];
input_rvec.safe_push (o);
input_mode.safe_push (GET_MODE (o));
char buffer[16];
ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
= output_rtx[j];
sprintf (buffer, "%d", j);
ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
= gen_rtx_ASM_INPUT_loc (inout_mode[i], ggc_strdup (buffer), locus);
constraints.safe_push (ggc_strdup (buffer));
}
ninputs += ninout;
/* Sometimes we wish to automatically clobber registers across an asm.
Case in point is when the i386 backend moved from cc0 to a hard reg --
maintaining source-level compatibility means automatically clobbering
the flags register. */
rtx_insn *after_md_seq = NULL;
if (targetm.md_asm_adjust)
after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
constraints, clobber_rvec,
clobbered_regs);
/* Do not allow the hook to change the output and input count,
lest it mess up the operand numbering. */
gcc_assert (output_rvec.length() == noutputs);
gcc_assert (input_rvec.length() == ninputs);
gcc_assert (constraints.length() == noutputs + ninputs);
/* But it certainly can adjust the clobbers. */
nclobbers = clobber_rvec.length();
/* Third pass checks for easy conflicts. */
/* ??? Why are we doing this on trees instead of rtx. */
bool clobber_conflict_found = 0;
for (i = 0; i < noutputs; ++i)
if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
clobber_conflict_found = 1;
for (i = 0; i < ninputs - ninout; ++i)
if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
clobber_conflict_found = 1;
/* Make vectors for the expression-rtx, constraint strings,
and named operands. */
rtvec argvec = rtvec_alloc (ninputs);
rtvec constraintvec = rtvec_alloc (ninputs);
rtvec labelvec = rtvec_alloc (nlabels);
rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
: GET_MODE (output_rvec[0])),
ggc_strdup (gimple_asm_string (stmt)),
empty_string, 0, argvec, constraintvec,
labelvec, locus);
MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
for (i = 0; i < ninputs; ++i)
{
ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
= gen_rtx_ASM_INPUT_loc (input_mode[i],
constraints[i + noutputs],
locus);
}
/* Copy labels to the vector. */
for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
rtx_code_label *fallthru_label = NULL;
if (nlabels > 0)
{
rtx r;
/* If asm goto has any labels in the fallthru basic block, use
a label that we emit immediately after the asm goto. Expansion
may insert further instructions into the same basic block after
asm goto and if we don't do this, insertion of instructions on
the fallthru edge might misbehave. See PR58670. */
if (fallthru_bb
&& label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
{
if (fallthru_label == NULL_RTX)
fallthru_label = gen_label_rtx ();
r = fallthru_label;
}
else
r = label_rtx (TREE_VALUE (tail));
ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
}
basic_block fallthru_bb = NULL;
edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
if (fallthru)
fallthru_bb = fallthru->dest;
generating_concat_p = old_generating_concat_p;
for (i = 0; i < nlabels; ++i)
{
tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
rtx r;
/* If asm goto has any labels in the fallthru basic block, use
a label that we emit immediately after the asm goto. Expansion
may insert further instructions into the same basic block after
asm goto and if we don't do this, insertion of instructions on
the fallthru edge might misbehave. See PR58670. */
if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
{
if (fallthru_label == NULL_RTX)
fallthru_label = gen_label_rtx ();
r = fallthru_label;
}
else
r = label_rtx (label);
ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
}
}
/* Now, for each output, construct an rtx
(set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
@ -2933,8 +2940,8 @@ expand_asm_stmt (gasm *stmt)
}
else if (noutputs == 1 && nclobbers == 0)
{
ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
emit_insn (gen_rtx_SET (output_rtx[0], body));
ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
emit_insn (gen_rtx_SET (output_rvec[0], body));
}
else
{
@ -2947,87 +2954,52 @@ expand_asm_stmt (gasm *stmt)
body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
/* For each output operand, store a SET. */
for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
for (i = 0; i < noutputs; ++i)
{
XVECEXP (body, 0, i)
= gen_rtx_SET (output_rtx[i],
gen_rtx_ASM_OPERANDS
(GET_MODE (output_rtx[i]),
ggc_strdup (TREE_STRING_POINTER (string)),
ggc_strdup (constraints[i]),
i, argvec, constraintvec, labelvec, locus));
MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i)))
= gimple_asm_volatile_p (stmt);
rtx src, o = output_rvec[i];
if (i == 0)
{
ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
src = obody;
}
else
{
src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
ASM_OPERANDS_TEMPLATE (obody),
constraints[i], i, argvec,
constraintvec, labelvec, locus);
MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
}
XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
}
/* If there are no outputs (but there are some clobbers)
store the bare ASM_OPERANDS into the PARALLEL. */
if (i == 0)
XVECEXP (body, 0, i++) = obody;
/* Store (clobber REG) for each clobbered register specified. */
for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
for (unsigned j = 0; j < nclobbers; ++j)
{
const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
int reg, nregs;
int j = decode_reg_name_and_count (regname, &nregs);
rtx clobbered_reg;
rtx clobbered_reg = clobber_rvec[j];
if (j < 0)
/* Do sanity check for overlap between clobbers and respectively
input and outputs that hasn't been handled. Such overlap
should have been detected and reported above. */
if (!clobber_conflict_found && REG_P (clobbered_reg))
{
if (j == -3) /* `cc', which is not a register */
continue;
/* We test the old body (obody) contents to avoid
tripping over the under-construction body. */
for (unsigned k = 0; k < noutputs; ++k)
if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
internal_error ("asm clobber conflict with output operand");
if (j == -4) /* `memory', don't cache memory across asm */
{
XVECEXP (body, 0, i++)
= gen_rtx_CLOBBER (VOIDmode,
gen_rtx_MEM
(BLKmode,
gen_rtx_SCRATCH (VOIDmode)));
continue;
}
/* Ignore unknown register, error already signaled. */
continue;
for (unsigned k = 0; k < ninputs - ninout; ++k)
if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
internal_error ("asm clobber conflict with input operand");
}
for (reg = j; reg < j + nregs; reg++)
{
/* Use QImode since that's guaranteed to clobber just
* one reg. */
clobbered_reg = gen_rtx_REG (QImode, reg);
/* Do sanity check for overlap between clobbers and
respectively input and outputs that hasn't been
handled. Such overlap should have been detected and
reported above. */
if (!clobber_conflict_found)
{
int opno;
/* We test the old body (obody) contents to avoid
tripping over the under-construction body. */
for (opno = 0; opno < noutputs; opno++)
if (reg_overlap_mentioned_p (clobbered_reg,
output_rtx[opno]))
internal_error
("asm clobber conflict with output operand");
for (opno = 0; opno < ninputs - ninout; opno++)
if (reg_overlap_mentioned_p (clobbered_reg,
ASM_OPERANDS_INPUT (obody,
opno)))
internal_error
("asm clobber conflict with input operand");
}
XVECEXP (body, 0, i++)
= gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
}
XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
}
if (nlabels > 0)
@ -3036,31 +3008,18 @@ expand_asm_stmt (gasm *stmt)
emit_insn (body);
}
generating_concat_p = old_generating_concat_p;
if (fallthru_label)
emit_label (fallthru_label);
/* For any outputs that needed reloading into registers, spill them
back to where they belong. */
for (i = 0; i < noutputs; ++i)
if (real_output_rtx[i])
emit_move_insn (real_output_rtx[i], output_rtx[i]);
if (after_md_seq)
emit_insn (after_md_seq);
if (after_rtl_seq)
emit_insn (after_rtl_seq);
/* Copy all the intermediate outputs into the specified outputs. */
for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
{
if (orig_outputs[i] != TREE_VALUE (tail))
{
expand_assignment (orig_outputs[i], TREE_VALUE (tail), false);
free_temp_slots ();
/* Restore the original value so that it's correct the next
time we expand this function. */
TREE_VALUE (tail) = orig_outputs[i];
}
}
crtl->has_asm_statement = 1;
free_temp_slots ();
crtl->has_asm_statement = 1;
}
/* Emit code to jump to the address

View File

@ -179,7 +179,9 @@ static rtx cris_function_incoming_arg (cumulative_args_t,
machine_mode, const_tree, bool);
static void cris_function_arg_advance (cumulative_args_t, machine_mode,
const_tree, bool);
static tree cris_md_asm_clobbers (tree, tree, tree);
static rtx_insn *cris_md_asm_adjust (vec<rtx> &, vec<rtx> &,
vec<const char *> &,
vec<rtx> &, HARD_REG_SET &);
static bool cris_cannot_force_const_mem (machine_mode, rtx);
static void cris_option_override (void);
@ -283,8 +285,8 @@ int cris_cpu_version = CRIS_DEFAULT_CPU_VERSION;
#define TARGET_FUNCTION_INCOMING_ARG cris_function_incoming_arg
#undef TARGET_FUNCTION_ARG_ADVANCE
#define TARGET_FUNCTION_ARG_ADVANCE cris_function_arg_advance
#undef TARGET_MD_ASM_CLOBBERS
#define TARGET_MD_ASM_CLOBBERS cris_md_asm_clobbers
#undef TARGET_MD_ASM_ADJUST
#define TARGET_MD_ASM_ADJUST cris_md_asm_adjust
#undef TARGET_CANNOT_FORCE_CONST_MEM
#define TARGET_CANNOT_FORCE_CONST_MEM cris_cannot_force_const_mem
@ -4189,55 +4191,41 @@ cris_function_arg_advance (cumulative_args_t ca_v, machine_mode mode,
ca->regs += (3 + CRIS_FUNCTION_ARG_SIZE (mode, type)) / 4;
}
/* Worker function for TARGET_MD_ASM_CLOBBERS. */
/* Worker function for TARGET_MD_ASM_ADJUST. */
static tree
cris_md_asm_clobbers (tree outputs, tree inputs, tree in_clobbers)
static rtx_insn *
cris_md_asm_adjust (vec<rtx> &outputs, vec<rtx> &inputs,
vec<const char *> &constraints,
vec<rtx> &clobbers, HARD_REG_SET &clobbered_regs)
{
HARD_REG_SET mof_set;
tree clobbers;
tree t;
/* For the time being, all asms clobber condition codes.
Revisit when there's a reasonable use for inputs/outputs
that mention condition codes. */
clobbers.safe_push (gen_rtx_REG (CCmode, CRIS_CC0_REGNUM));
SET_HARD_REG_BIT (clobbered_regs, CRIS_CC0_REGNUM);
CLEAR_HARD_REG_SET (mof_set);
SET_HARD_REG_BIT (mof_set, CRIS_MOF_REGNUM);
/* Determine if the source using MOF. If it is, automatically
clobbering MOF would cause it to have impossible constraints. */
/* For the time being, all asms clobber condition codes. Revisit when
there's a reasonable use for inputs/outputs that mention condition
codes. */
clobbers
= tree_cons (NULL_TREE,
build_string (strlen (reg_names[CRIS_CC0_REGNUM]),
reg_names[CRIS_CC0_REGNUM]),
in_clobbers);
/* Look for a use of the MOF constraint letter: h. */
for (unsigned i = 0, n = constraints.length(); i < n; ++i)
if (strchr (constraints[i], 'h') != NULL)
return NULL;
for (t = outputs; t != NULL; t = TREE_CHAIN (t))
{
tree val = TREE_VALUE (t);
/* Look for an output or an input that touches MOF. */
rtx mof_reg = gen_rtx_REG (SImode, CRIS_MOF_REGNUM);
for (unsigned i = 0, n = outputs.length(); i < n; ++i)
if (reg_overlap_mentioned_p (mof_reg, outputs[i]))
return NULL;
for (unsigned i = 0, n = inputs.length(); i < n; ++i)
if (reg_overlap_mentioned_p (mof_reg, inputs[i]))
return NULL;
/* The constraint letter for the singleton register class of MOF
is 'h'. If it's mentioned in the constraints, the asm is
MOF-aware and adding it to the clobbers would cause it to have
impossible constraints. */
if (strchr (TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))),
'h') != NULL
|| tree_overlaps_hard_reg_set (val, &mof_set) != NULL_TREE)
return clobbers;
}
for (t = inputs; t != NULL; t = TREE_CHAIN (t))
{
tree val = TREE_VALUE (t);
if (strchr (TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))),
'h') != NULL
|| tree_overlaps_hard_reg_set (val, &mof_set) != NULL_TREE)
return clobbers;
}
return tree_cons (NULL_TREE,
build_string (strlen (reg_names[CRIS_MOF_REGNUM]),
reg_names[CRIS_MOF_REGNUM]),
clobbers);
/* No direct reference to MOF or its constraint.
Clobber it for backward compatibility. */
clobbers.safe_push (mof_reg);
SET_HARD_REG_BIT (clobbered_regs, CRIS_MOF_REGNUM);
return NULL;
}
/* Implement TARGET_FRAME_POINTER_REQUIRED.

View File

@ -45403,19 +45403,23 @@ ix86_c_mode_for_suffix (char suffix)
return VOIDmode;
}
/* Worker function for TARGET_MD_ASM_CLOBBERS.
/* Worker function for TARGET_MD_ASM_ADJUST.
We do this in the new i386 backend to maintain source compatibility
with the old cc0-based compiler. */
static tree
ix86_md_asm_clobbers (tree, tree, tree clobbers)
static rtx_insn *
ix86_md_asm_adjust (vec<rtx> &/*outputs*/, vec<rtx> &/*inputs*/,
vec<const char *> &/*constraints*/,
vec<rtx> &clobbers, HARD_REG_SET &clobbered_regs)
{
clobbers = tree_cons (NULL_TREE, build_string (5, "flags"),
clobbers);
clobbers = tree_cons (NULL_TREE, build_string (4, "fpsr"),
clobbers);
return clobbers;
clobbers.safe_push (gen_rtx_REG (CCmode, FLAGS_REG));
clobbers.safe_push (gen_rtx_REG (CCFPmode, FPSR_REG));
SET_HARD_REG_BIT (clobbered_regs, FLAGS_REG);
SET_HARD_REG_BIT (clobbered_regs, FPSR_REG);
return NULL;
}
/* Implements target vector targetm.asm.encode_section_info. */
@ -51943,8 +51947,8 @@ ix86_operands_ok_for_move_multiple (rtx *operands, bool load,
#undef TARGET_EXPAND_BUILTIN_VA_START
#define TARGET_EXPAND_BUILTIN_VA_START ix86_va_start
#undef TARGET_MD_ASM_CLOBBERS
#define TARGET_MD_ASM_CLOBBERS ix86_md_asm_clobbers
#undef TARGET_MD_ASM_ADJUST
#define TARGET_MD_ASM_ADJUST ix86_md_asm_adjust
#undef TARGET_PROMOTE_PROTOTYPES
#define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true

View File

@ -2881,18 +2881,18 @@ mn10300_conditional_register_usage (void)
call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
}
/* Worker function for TARGET_MD_ASM_CLOBBERS.
/* Worker function for TARGET_MD_ASM_ADJUST.
We do this in the mn10300 backend to maintain source compatibility
with the old cc0-based compiler. */
static tree
mn10300_md_asm_clobbers (tree outputs ATTRIBUTE_UNUSED,
tree inputs ATTRIBUTE_UNUSED,
tree clobbers)
static rtx_insn *
mn10300_md_asm_adjust (vec<rtx> &/*outputs*/, vec<rtx> &/*inputs*/,
vec<const char *> &/*constraints*/,
vec<rtx> &clobbers, HARD_REG_SET &clobbered_regs)
{
clobbers = tree_cons (NULL_TREE, build_string (5, "EPSW"),
clobbers);
return clobbers;
clobbers.safe_push (gen_rtx_REG (CCmode, CC_REG));
SET_HARD_REG_BIT (clobbered_regs, CC_REG);
return NULL;
}
/* A helper function for splitting cbranch patterns after reload. */
@ -3442,8 +3442,8 @@ mn10300_reorg (void)
#undef TARGET_CONDITIONAL_REGISTER_USAGE
#define TARGET_CONDITIONAL_REGISTER_USAGE mn10300_conditional_register_usage
#undef TARGET_MD_ASM_CLOBBERS
#define TARGET_MD_ASM_CLOBBERS mn10300_md_asm_clobbers
#undef TARGET_MD_ASM_ADJUST
#define TARGET_MD_ASM_ADJUST mn10300_md_asm_adjust
#undef TARGET_FLAGS_REGNUM
#define TARGET_FLAGS_REGNUM CC_REG

View File

@ -1597,8 +1597,8 @@ static const struct attribute_spec rs6000_attribute_table[] =
#undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
#define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rs6000_loop_align_max_skip
#undef TARGET_MD_ASM_CLOBBERS
#define TARGET_MD_ASM_CLOBBERS rs6000_md_asm_clobbers
#undef TARGET_MD_ASM_ADJUST
#define TARGET_MD_ASM_ADJUST rs6000_md_asm_adjust
#undef TARGET_OPTION_OVERRIDE
#define TARGET_OPTION_OVERRIDE rs6000_option_override
@ -3209,17 +3209,20 @@ rs6000_builtin_mask_calculate (void)
| ((TARGET_LONG_DOUBLE_128) ? RS6000_BTM_LDBL128 : 0));
}
/* Implement TARGET_MD_ASM_CLOBBERS. All asm statements are considered
/* Implement TARGET_MD_ASM_ADJUST. All asm statements are considered
to clobber the XER[CA] bit because clobbering that bit without telling
the compiler worked just fine with versions of GCC before GCC 5, and
breaking a lot of older code in ways that are hard to track down is
not such a great idea. */
static tree
rs6000_md_asm_clobbers (tree, tree, tree clobbers)
static rtx_insn *
rs6000_md_asm_adjust (vec<rtx> &/*outputs*/, vec<rtx> &/*inputs*/,
vec<const char *> &/*constraints*/,
vec<rtx> &clobbers, HARD_REG_SET &clobbered_regs)
{
tree s = build_string (strlen (reg_names[CA_REGNO]), reg_names[CA_REGNO]);
return tree_cons (NULL_TREE, s, clobbers);
clobbers.safe_push (gen_rtx_REG (SImode, CA_REGNO));
SET_HARD_REG_BIT (clobbered_regs, CA_REGNO);
return NULL;
}
/* Override command line options. Mostly we process the processor type and

View File

@ -172,7 +172,9 @@ static bool visium_frame_pointer_required (void);
static tree visium_build_builtin_va_list (void);
static tree visium_md_asm_clobbers (tree, tree, tree);
static rtx_insn *visium_md_asm_adjust (vec<rtx> &, vec<rtx> &,
vec<const char *> &,
vec<rtx> &, HARD_REG_SET &);
static bool visium_legitimate_constant_p (enum machine_mode, rtx);
@ -299,8 +301,8 @@ static unsigned int visium_reorg (void);
#undef TARGET_TRAMPOLINE_INIT
#define TARGET_TRAMPOLINE_INIT visium_trampoline_init
#undef TARGET_MD_ASM_CLOBBERS
#define TARGET_MD_ASM_CLOBBERS visium_md_asm_clobbers
#undef TARGET_MD_ASM_ADJUST
#define TARGET_MD_ASM_ADJUST visium_md_asm_adjust
#undef TARGET_FLAGS_REGNUM
#define TARGET_FLAGS_REGNUM FLAGS_REGNUM
@ -720,13 +722,14 @@ visium_conditional_register_usage (void)
an asm We do this for the FLAGS to maintain source compatibility with
the original cc0-based compiler. */
static tree
visium_md_asm_clobbers (tree outputs ATTRIBUTE_UNUSED,
tree inputs ATTRIBUTE_UNUSED,
tree clobbers)
static rtx_insn *
visium_md_asm_adjust (vec<rtx> &/*outputs*/, vec<rtx> &/*inputs*/,
vec<const char *> &/*constraints*/,
vec<rtx> &clobbers, HARD_REG_SET &clobbered_regs)
{
const char *flags = reg_names[FLAGS_REGNUM];
return tree_cons (NULL_TREE, build_string (strlen (flags), flags), clobbers);
clobbers.safe_push (gen_rtx_REG (CCmode, FLAGS_REGNUM));
SET_HARD_REG_BIT (clobbered_regs, FLAGS_REGNUM);
return NULL;
}
/* Return true if X is a legitimate constant for a MODE immediate operand.

View File

@ -21,13 +21,13 @@ along with GCC; see the file COPYING3. If not see
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "vec.h"
#include "target.h"
#include "diagnostic-core.h"
#include "output.h"
#include "tm.h"
#include "hash-set.h"
#include "machmode.h"
#include "vec.h"
#include "double-int.h"
#include "input.h"
#include "alias.h"

View File

@ -10835,15 +10835,15 @@ from shared libraries (DLLs).
You need not define this macro if it would always evaluate to zero.
@end defmac
@deftypefn {Target Hook} tree TARGET_MD_ASM_CLOBBERS (tree @var{outputs}, tree @var{inputs}, tree @var{clobbers})
This target hook should add to @var{clobbers} @code{STRING_CST} trees for
any hard regs the port wishes to automatically clobber for an asm.
It should return the result of the last @code{tree_cons} used to add a
clobber. The @var{outputs}, @var{inputs} and @var{clobber} lists are the
corresponding parameters to the asm and may be inspected to avoid
clobbering a register that is an input or output of the asm. You can use
@code{tree_overlaps_hard_reg_set}, declared in @file{tree.h}, to test
for overlap with regards to asm-declared registers.
@deftypefn {Target Hook} {rtx_insn *} TARGET_MD_ASM_ADJUST (vec<rtx>& @var{outputs}, vec<rtx>& @var{inputs}, vec<const char *>& @var{constraints}, vec<rtx>& @var{clobbers}, HARD_REG_SET& @var{clobbered_regs})
This target hook may add @dfn{clobbers} to @var{clobbers} and
@var{clobbered_regs} for any hard regs the port wishes to automatically
clobber for an asm. The @var{outputs} and @var{inputs} may be inspected
to avoid clobbering a register that is already used by the asm.
It may modify the @var{outputs}, @var{inputs}, and @var{constraints}
as necessary for other pre-processing. In this case the return value is
a sequence of insns to emit after the asm.
@end deftypefn
@defmac MATH_LIBRARY

View File

@ -7903,7 +7903,7 @@ from shared libraries (DLLs).
You need not define this macro if it would always evaluate to zero.
@end defmac
@hook TARGET_MD_ASM_CLOBBERS
@hook TARGET_MD_ASM_ADJUST
@defmac MATH_LIBRARY
Define this macro as a C string constant for the linker argument to link

View File

@ -23,10 +23,10 @@ along with GCC; see the file COPYING3. If not see
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "target.h"
#include "hash-set.h"
#include "machmode.h"
#include "vec.h"
#include "target.h"
#include "double-int.h"
#include "input.h"
#include "alias.h"

View File

@ -378,14 +378,6 @@ hook_uint_mode_0 (machine_mode m ATTRIBUTE_UNUSED)
return 0;
}
/* Generic hook that takes three trees and returns the last one as is. */
tree
hook_tree_tree_tree_tree_3rd_identity (tree a ATTRIBUTE_UNUSED,
tree b ATTRIBUTE_UNUSED, tree c)
{
return c;
}
/* Generic hook that takes no arguments and returns a NULL const string. */
const char *
hook_constcharptr_void_null (void)

View File

@ -93,7 +93,6 @@ extern tree hook_tree_const_tree_null (const_tree);
extern tree hook_tree_tree_tree_null (tree, tree);
extern tree hook_tree_tree_tree_tree_null (tree, tree, tree);
extern tree hook_tree_tree_tree_tree_3rd_identity (tree, tree, tree);
extern tree hook_tree_tree_int_treep_bool_null (tree, int, tree *, bool);
extern unsigned hook_uint_void_0 (void);

View File

@ -21,6 +21,7 @@
#include "system.h"
#include "coretypes.h"
#include "machmode.h"
#include "vec.h"
#include "target.h"
#include "tm.h"
#include "cpplib.h"

View File

@ -21,6 +21,7 @@ along with GCC; see the file COPYING3. If not see
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "vec.h"
#include "target.h"
#include "rtl.h"
#include "regs.h"
@ -29,7 +30,6 @@ along with GCC; see the file COPYING3. If not see
#include "insn-config.h"
#include "recog.h"
#include "predict.h"
#include "vec.h"
#include "hashtab.h"
#include "hash-set.h"
#include "machmode.h"

View File

@ -964,6 +964,7 @@ extern void fancy_abort (const char *, int, const char *) ATTRIBUTE_NORETURN;
TARGET_HANDLE_PRAGMA_EXTERN_PREFIX \
TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_EVEN \
TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD \
TARGET_MD_ASM_CLOBBERS
/* Arrays that were deleted in favor of a functional interface. */
#pragma GCC poison built_in_decls implicit_built_in_decls

View File

@ -3742,20 +3742,22 @@ machines. One reason you may need to define this target hook is if\n\
rtx, (void),
default_builtin_setjmp_frame_value)
/* This target hook should add STRING_CST trees for any hard regs
the port wishes to automatically clobber for an asm. */
/* This target hook should manipulate the outputs, inputs, constraints,
and clobbers the port wishes for pre-processing the asm. */
DEFHOOK
(md_asm_clobbers,
"This target hook should add to @var{clobbers} @code{STRING_CST} trees for\n\
any hard regs the port wishes to automatically clobber for an asm.\n\
It should return the result of the last @code{tree_cons} used to add a\n\
clobber. The @var{outputs}, @var{inputs} and @var{clobber} lists are the\n\
corresponding parameters to the asm and may be inspected to avoid\n\
clobbering a register that is an input or output of the asm. You can use\n\
@code{tree_overlaps_hard_reg_set}, declared in @file{tree.h}, to test\n\
for overlap with regards to asm-declared registers.",
tree, (tree outputs, tree inputs, tree clobbers),
hook_tree_tree_tree_tree_3rd_identity)
(md_asm_adjust,
"This target hook may add @dfn{clobbers} to @var{clobbers} and\n\
@var{clobbered_regs} for any hard regs the port wishes to automatically\n\
clobber for an asm. The @var{outputs} and @var{inputs} may be inspected\n\
to avoid clobbering a register that is already used by the asm.\n\
\n\
It may modify the @var{outputs}, @var{inputs}, and @var{constraints}\n\
as necessary for other pre-processing. In this case the return value is\n\
a sequence of insns to emit after the asm.",
rtx_insn *,
(vec<rtx>& outputs, vec<rtx>& inputs, vec<const char *>& constraints,
vec<rtx>& clobbers, HARD_REG_SET& clobbered_regs),
NULL)
/* This target hook allows the backend to specify a calling convention
in the debug information. This function actually returns an