ia64.h (enum fetchop_code): Remove.

* config/ia64/ia64.h (enum fetchop_code): Remove.
        (enum ia64_builtins): Move ...
        * config/ia64/ia64.c (enum ia64_builtins): ... here.  Remove all
        members except BSP and FLUSHRS.
        (ia64_init_builtins): Remove __sync builtins.
        (ia64_expand_builtin): Likewise.
        (ia64_expand_fetch_and_op, ia64_expand_op_and_fetch): Remove.
        (ia64_expand_compare_and_swap, ia64_expand_lock_test_and_set): Remove.
        (ia64_expand_lock_release): Remove.
        * config/ia64/ia64.md (mf): Move to sync.md.
        (mf_internal, fetchadd_acq_si, fetchadd_acq_di, cmpxchg_acq_si,
        cmpxchg_acq_di, xchgsi, xchgdi): Likewise.
        * config/ia64/sync.md: New file.
        (memory_barrier): Rename from mf.
        (fetchadd_acq_<I48MODE>): Macroize from _si/_di patterns.
        (cmpxchg_acq_<I48MODE>): Likewise.
        (sync_lock_test_and_set<I48MODE>): Likewise.

        * config/ia64/ia64intrin.h: Define nothing for C; limit #defines
        to c++.  Remove __sync* declarations.  s/_si/_4/.  s/_di/_8/.

From-SVN: r98156
This commit is contained in:
Richard Henderson 2005-04-14 16:53:02 -07:00 committed by Richard Henderson
parent 1ef45b7773
commit af795c3cac
6 changed files with 187 additions and 727 deletions

View File

@ -1,3 +1,26 @@
2004-04-14 Richard Henderson <rth@redhat.com>
* config/ia64/ia64.h (enum fetchop_code): Remove.
(enum ia64_builtins): Move ...
* config/ia64/ia64.c (enum ia64_builtins): ... here. Remove all
members except BSP and FLUSHRS.
(ia64_init_builtins): Remove __sync builtins.
(ia64_expand_builtin): Likewise.
(ia64_expand_fetch_and_op, ia64_expand_op_and_fetch): Remove.
(ia64_expand_compare_and_swap, ia64_expand_lock_test_and_set): Remove.
(ia64_expand_lock_release): Remove.
* config/ia64/ia64.md (mf): Move to sync.md.
(mf_internal, fetchadd_acq_si, fetchadd_acq_di, cmpxchg_acq_si,
cmpxchg_acq_di, xchgsi, xchgdi): Likewise.
* config/ia64/sync.md: New file.
(memory_barrier): Rename from mf.
(fetchadd_acq_<I48MODE>): Macroize from _si/_di patterns.
(cmpxchg_acq_<I48MODE>): Likewise.
(sync_lock_test_and_set<I48MODE>): Likewise.
* config/ia64/ia64intrin.h: Define nothing for C; limit #defines
to c++. Remove __sync* declarations. s/_si/_4/. s/_di/_8/.
2004-04-14 Richard Henderson <rth@redhat.com>
* config/i386/i386.c (x86_cmpxchg, x86_xadd): New.

View File

@ -199,12 +199,6 @@ static bool ia64_in_small_data_p (tree);
static void process_epilogue (void);
static int process_set (FILE *, rtx);
static rtx ia64_expand_fetch_and_op (optab, enum machine_mode, tree, rtx);
static rtx ia64_expand_op_and_fetch (optab, enum machine_mode, tree, rtx);
static rtx ia64_expand_compare_and_swap (enum machine_mode, enum machine_mode,
int, tree, rtx);
static rtx ia64_expand_lock_test_and_set (enum machine_mode, tree, rtx);
static rtx ia64_expand_lock_release (enum machine_mode, tree, rtx);
static bool ia64_assemble_integer (rtx, unsigned int, int);
static void ia64_output_function_prologue (FILE *, HOST_WIDE_INT);
static void ia64_output_function_epilogue (FILE *, HOST_WIDE_INT);
@ -7854,51 +7848,15 @@ process_for_unwind_directive (FILE *asm_out_file, rtx insn)
}
enum ia64_builtins
{
IA64_BUILTIN_BSP,
IA64_BUILTIN_FLUSHRS
};
void
ia64_init_builtins (void)
{
tree psi_type_node = build_pointer_type (integer_type_node);
tree pdi_type_node = build_pointer_type (long_integer_type_node);
/* __sync_val_compare_and_swap_si, __sync_bool_compare_and_swap_si */
tree si_ftype_psi_si_si
= build_function_type_list (integer_type_node,
psi_type_node, integer_type_node,
integer_type_node, NULL_TREE);
/* __sync_val_compare_and_swap_di */
tree di_ftype_pdi_di_di
= build_function_type_list (long_integer_type_node,
pdi_type_node, long_integer_type_node,
long_integer_type_node, NULL_TREE);
/* __sync_bool_compare_and_swap_di */
tree si_ftype_pdi_di_di
= build_function_type_list (integer_type_node,
pdi_type_node, long_integer_type_node,
long_integer_type_node, NULL_TREE);
/* __sync_synchronize */
tree void_ftype_void
= build_function_type (void_type_node, void_list_node);
/* __sync_lock_test_and_set_si */
tree si_ftype_psi_si
= build_function_type_list (integer_type_node,
psi_type_node, integer_type_node, NULL_TREE);
/* __sync_lock_test_and_set_di */
tree di_ftype_pdi_di
= build_function_type_list (long_integer_type_node,
pdi_type_node, long_integer_type_node,
NULL_TREE);
/* __sync_lock_release_si */
tree void_ftype_psi
= build_function_type_list (void_type_node, psi_type_node, NULL_TREE);
/* __sync_lock_release_di */
tree void_ftype_pdi
= build_function_type_list (void_type_node, pdi_type_node, NULL_TREE);
tree fpreg_type;
tree float80_type;
@ -7933,27 +7891,6 @@ ia64_init_builtins (void)
lang_hooks.builtin_function ((name), (type), (code), BUILT_IN_MD, \
NULL, NULL_TREE)
def_builtin ("__sync_val_compare_and_swap_si", si_ftype_psi_si_si,
IA64_BUILTIN_VAL_COMPARE_AND_SWAP_SI);
def_builtin ("__sync_val_compare_and_swap_di", di_ftype_pdi_di_di,
IA64_BUILTIN_VAL_COMPARE_AND_SWAP_DI);
def_builtin ("__sync_bool_compare_and_swap_si", si_ftype_psi_si_si,
IA64_BUILTIN_BOOL_COMPARE_AND_SWAP_SI);
def_builtin ("__sync_bool_compare_and_swap_di", si_ftype_pdi_di_di,
IA64_BUILTIN_BOOL_COMPARE_AND_SWAP_DI);
def_builtin ("__sync_synchronize", void_ftype_void,
IA64_BUILTIN_SYNCHRONIZE);
def_builtin ("__sync_lock_test_and_set_si", si_ftype_psi_si,
IA64_BUILTIN_LOCK_TEST_AND_SET_SI);
def_builtin ("__sync_lock_test_and_set_di", di_ftype_pdi_di,
IA64_BUILTIN_LOCK_TEST_AND_SET_DI);
def_builtin ("__sync_lock_release_si", void_ftype_psi,
IA64_BUILTIN_LOCK_RELEASE_SI);
def_builtin ("__sync_lock_release_di", void_ftype_pdi,
IA64_BUILTIN_LOCK_RELEASE_DI);
def_builtin ("__builtin_ia64_bsp",
build_function_type (ptr_type_node, void_list_node),
IA64_BUILTIN_BSP);
@ -7962,322 +7899,9 @@ ia64_init_builtins (void)
build_function_type (void_type_node, void_list_node),
IA64_BUILTIN_FLUSHRS);
def_builtin ("__sync_fetch_and_add_si", si_ftype_psi_si,
IA64_BUILTIN_FETCH_AND_ADD_SI);
def_builtin ("__sync_fetch_and_sub_si", si_ftype_psi_si,
IA64_BUILTIN_FETCH_AND_SUB_SI);
def_builtin ("__sync_fetch_and_or_si", si_ftype_psi_si,
IA64_BUILTIN_FETCH_AND_OR_SI);
def_builtin ("__sync_fetch_and_and_si", si_ftype_psi_si,
IA64_BUILTIN_FETCH_AND_AND_SI);
def_builtin ("__sync_fetch_and_xor_si", si_ftype_psi_si,
IA64_BUILTIN_FETCH_AND_XOR_SI);
def_builtin ("__sync_fetch_and_nand_si", si_ftype_psi_si,
IA64_BUILTIN_FETCH_AND_NAND_SI);
def_builtin ("__sync_add_and_fetch_si", si_ftype_psi_si,
IA64_BUILTIN_ADD_AND_FETCH_SI);
def_builtin ("__sync_sub_and_fetch_si", si_ftype_psi_si,
IA64_BUILTIN_SUB_AND_FETCH_SI);
def_builtin ("__sync_or_and_fetch_si", si_ftype_psi_si,
IA64_BUILTIN_OR_AND_FETCH_SI);
def_builtin ("__sync_and_and_fetch_si", si_ftype_psi_si,
IA64_BUILTIN_AND_AND_FETCH_SI);
def_builtin ("__sync_xor_and_fetch_si", si_ftype_psi_si,
IA64_BUILTIN_XOR_AND_FETCH_SI);
def_builtin ("__sync_nand_and_fetch_si", si_ftype_psi_si,
IA64_BUILTIN_NAND_AND_FETCH_SI);
def_builtin ("__sync_fetch_and_add_di", di_ftype_pdi_di,
IA64_BUILTIN_FETCH_AND_ADD_DI);
def_builtin ("__sync_fetch_and_sub_di", di_ftype_pdi_di,
IA64_BUILTIN_FETCH_AND_SUB_DI);
def_builtin ("__sync_fetch_and_or_di", di_ftype_pdi_di,
IA64_BUILTIN_FETCH_AND_OR_DI);
def_builtin ("__sync_fetch_and_and_di", di_ftype_pdi_di,
IA64_BUILTIN_FETCH_AND_AND_DI);
def_builtin ("__sync_fetch_and_xor_di", di_ftype_pdi_di,
IA64_BUILTIN_FETCH_AND_XOR_DI);
def_builtin ("__sync_fetch_and_nand_di", di_ftype_pdi_di,
IA64_BUILTIN_FETCH_AND_NAND_DI);
def_builtin ("__sync_add_and_fetch_di", di_ftype_pdi_di,
IA64_BUILTIN_ADD_AND_FETCH_DI);
def_builtin ("__sync_sub_and_fetch_di", di_ftype_pdi_di,
IA64_BUILTIN_SUB_AND_FETCH_DI);
def_builtin ("__sync_or_and_fetch_di", di_ftype_pdi_di,
IA64_BUILTIN_OR_AND_FETCH_DI);
def_builtin ("__sync_and_and_fetch_di", di_ftype_pdi_di,
IA64_BUILTIN_AND_AND_FETCH_DI);
def_builtin ("__sync_xor_and_fetch_di", di_ftype_pdi_di,
IA64_BUILTIN_XOR_AND_FETCH_DI);
def_builtin ("__sync_nand_and_fetch_di", di_ftype_pdi_di,
IA64_BUILTIN_NAND_AND_FETCH_DI);
#undef def_builtin
}
/* Expand fetch_and_op intrinsics. The basic code sequence is:
mf
tmp = [ptr];
do {
ret = tmp;
ar.ccv = tmp;
tmp <op>= value;
cmpxchgsz.acq tmp = [ptr], tmp
} while (tmp != ret)
*/
static rtx
ia64_expand_fetch_and_op (optab binoptab, enum machine_mode mode,
tree arglist, rtx target)
{
rtx ret, label, tmp, ccv, insn, mem, value;
tree arg0, arg1;
arg0 = TREE_VALUE (arglist);
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
mem = expand_expr (arg0, NULL_RTX, Pmode, 0);
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE(mem) != Pmode)
mem = convert_memory_address (Pmode, mem);
#endif
value = expand_expr (arg1, NULL_RTX, mode, 0);
mem = gen_rtx_MEM (mode, force_reg (Pmode, mem));
MEM_VOLATILE_P (mem) = 1;
if (target && register_operand (target, mode))
ret = target;
else
ret = gen_reg_rtx (mode);
emit_insn (gen_mf ());
/* Special case for fetchadd instructions. */
if (binoptab == add_optab && fetchadd_operand (value, VOIDmode))
{
if (mode == SImode)
insn = gen_fetchadd_acq_si (ret, mem, value);
else
insn = gen_fetchadd_acq_di (ret, mem, value);
emit_insn (insn);
return ret;
}
tmp = gen_reg_rtx (mode);
/* ar.ccv must always be loaded with a zero-extended DImode value. */
ccv = gen_rtx_REG (DImode, AR_CCV_REGNUM);
emit_move_insn (tmp, mem);
label = gen_label_rtx ();
emit_label (label);
emit_move_insn (ret, tmp);
convert_move (ccv, tmp, /*unsignedp=*/1);
/* Perform the specific operation. Special case NAND by noticing
one_cmpl_optab instead. */
if (binoptab == one_cmpl_optab)
{
tmp = expand_unop (mode, binoptab, tmp, NULL, OPTAB_WIDEN);
binoptab = and_optab;
}
tmp = expand_binop (mode, binoptab, tmp, value, tmp, 1, OPTAB_WIDEN);
if (mode == SImode)
insn = gen_cmpxchg_acq_si (tmp, mem, tmp, ccv);
else
insn = gen_cmpxchg_acq_di (tmp, mem, tmp, ccv);
emit_insn (insn);
emit_cmp_and_jump_insns (tmp, ret, NE, 0, mode, 1, label);
return ret;
}
/* Expand op_and_fetch intrinsics. The basic code sequence is:
mf
tmp = [ptr];
do {
old = tmp;
ar.ccv = tmp;
ret = tmp <op> value;
cmpxchgsz.acq tmp = [ptr], ret
} while (tmp != old)
*/
static rtx
ia64_expand_op_and_fetch (optab binoptab, enum machine_mode mode,
tree arglist, rtx target)
{
rtx old, label, tmp, ret, ccv, insn, mem, value;
tree arg0, arg1;
arg0 = TREE_VALUE (arglist);
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
mem = expand_expr (arg0, NULL_RTX, Pmode, 0);
#ifdef POINTERS_EXTEND_UNSIGNED
if (GET_MODE(mem) != Pmode)
mem = convert_memory_address (Pmode, mem);
#endif
value = expand_expr (arg1, NULL_RTX, mode, 0);
mem = gen_rtx_MEM (mode, force_reg (Pmode, mem));
MEM_VOLATILE_P (mem) = 1;
if (target && ! register_operand (target, mode))
target = NULL_RTX;
emit_insn (gen_mf ());
tmp = gen_reg_rtx (mode);
old = gen_reg_rtx (mode);
/* ar.ccv must always be loaded with a zero-extended DImode value. */
ccv = gen_rtx_REG (DImode, AR_CCV_REGNUM);
emit_move_insn (tmp, mem);
label = gen_label_rtx ();
emit_label (label);
emit_move_insn (old, tmp);
convert_move (ccv, tmp, /*unsignedp=*/1);
/* Perform the specific operation. Special case NAND by noticing
one_cmpl_optab instead. */
if (binoptab == one_cmpl_optab)
{
tmp = expand_unop (mode, binoptab, tmp, NULL, OPTAB_WIDEN);
binoptab = and_optab;
}
ret = expand_binop (mode, binoptab, tmp, value, target, 1, OPTAB_WIDEN);
if (mode == SImode)
insn = gen_cmpxchg_acq_si (tmp, mem, ret, ccv);
else
insn = gen_cmpxchg_acq_di (tmp, mem, ret, ccv);
emit_insn (insn);
emit_cmp_and_jump_insns (tmp, old, NE, 0, mode, 1, label);
return ret;
}
/* Expand val_ and bool_compare_and_swap. For val_ we want:
ar.ccv = oldval
mf
cmpxchgsz.acq ret = [ptr], newval, ar.ccv
return ret
For bool_ it's the same except return ret == oldval.
*/
static rtx
ia64_expand_compare_and_swap (enum machine_mode rmode, enum machine_mode mode,
int boolp, tree arglist, rtx target)
{
tree arg0, arg1, arg2;
rtx mem, old, new, ccv, tmp, insn;
arg0 = TREE_VALUE (arglist);
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
mem = expand_expr (arg0, NULL_RTX, ptr_mode, 0);
old = expand_expr (arg1, NULL_RTX, mode, 0);
new = expand_expr (arg2, NULL_RTX, mode, 0);
mem = gen_rtx_MEM (mode, force_reg (ptr_mode, mem));
MEM_VOLATILE_P (mem) = 1;
if (GET_MODE (old) != mode)
old = convert_to_mode (mode, old, /*unsignedp=*/1);
if (GET_MODE (new) != mode)
new = convert_to_mode (mode, new, /*unsignedp=*/1);
if (! register_operand (old, mode))
old = copy_to_mode_reg (mode, old);
if (! register_operand (new, mode))
new = copy_to_mode_reg (mode, new);
if (! boolp && target && register_operand (target, mode))
tmp = target;
else
tmp = gen_reg_rtx (mode);
ccv = gen_rtx_REG (DImode, AR_CCV_REGNUM);
convert_move (ccv, old, /*unsignedp=*/1);
emit_insn (gen_mf ());
if (mode == SImode)
insn = gen_cmpxchg_acq_si (tmp, mem, new, ccv);
else
insn = gen_cmpxchg_acq_di (tmp, mem, new, ccv);
emit_insn (insn);
if (boolp)
{
if (! target)
target = gen_reg_rtx (rmode);
return emit_store_flag_force (target, EQ, tmp, old, mode, 1, 1);
}
else
return tmp;
}
/* Expand lock_test_and_set. I.e. `xchgsz ret = [ptr], new'. */
static rtx
ia64_expand_lock_test_and_set (enum machine_mode mode, tree arglist,
rtx target)
{
tree arg0, arg1;
rtx mem, new, ret, insn;
arg0 = TREE_VALUE (arglist);
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
mem = expand_expr (arg0, NULL_RTX, ptr_mode, 0);
new = expand_expr (arg1, NULL_RTX, mode, 0);
mem = gen_rtx_MEM (mode, force_reg (ptr_mode, mem));
MEM_VOLATILE_P (mem) = 1;
if (! register_operand (new, mode))
new = copy_to_mode_reg (mode, new);
if (target && register_operand (target, mode))
ret = target;
else
ret = gen_reg_rtx (mode);
if (mode == SImode)
insn = gen_xchgsi (ret, mem, new);
else
insn = gen_xchgdi (ret, mem, new);
emit_insn (insn);
return ret;
}
/* Expand lock_release. I.e. `stsz.rel [ptr] = r0'. */
static rtx
ia64_expand_lock_release (enum machine_mode mode, tree arglist,
rtx target ATTRIBUTE_UNUSED)
{
tree arg0;
rtx mem;
arg0 = TREE_VALUE (arglist);
mem = expand_expr (arg0, NULL_RTX, ptr_mode, 0);
mem = gen_rtx_MEM (mode, force_reg (ptr_mode, mem));
MEM_VOLATILE_P (mem) = 1;
emit_move_insn (mem, const0_rtx);
return const0_rtx;
}
rtx
ia64_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
enum machine_mode mode ATTRIBUTE_UNUSED,
@ -8285,89 +7909,9 @@ ia64_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
{
tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
tree arglist = TREE_OPERAND (exp, 1);
enum machine_mode rmode = VOIDmode;
switch (fcode)
{
case IA64_BUILTIN_BOOL_COMPARE_AND_SWAP_SI:
case IA64_BUILTIN_VAL_COMPARE_AND_SWAP_SI:
mode = SImode;
rmode = SImode;
break;
case IA64_BUILTIN_LOCK_TEST_AND_SET_SI:
case IA64_BUILTIN_LOCK_RELEASE_SI:
case IA64_BUILTIN_FETCH_AND_ADD_SI:
case IA64_BUILTIN_FETCH_AND_SUB_SI:
case IA64_BUILTIN_FETCH_AND_OR_SI:
case IA64_BUILTIN_FETCH_AND_AND_SI:
case IA64_BUILTIN_FETCH_AND_XOR_SI:
case IA64_BUILTIN_FETCH_AND_NAND_SI:
case IA64_BUILTIN_ADD_AND_FETCH_SI:
case IA64_BUILTIN_SUB_AND_FETCH_SI:
case IA64_BUILTIN_OR_AND_FETCH_SI:
case IA64_BUILTIN_AND_AND_FETCH_SI:
case IA64_BUILTIN_XOR_AND_FETCH_SI:
case IA64_BUILTIN_NAND_AND_FETCH_SI:
mode = SImode;
break;
case IA64_BUILTIN_BOOL_COMPARE_AND_SWAP_DI:
mode = DImode;
rmode = SImode;
break;
case IA64_BUILTIN_VAL_COMPARE_AND_SWAP_DI:
mode = DImode;
rmode = DImode;
break;
case IA64_BUILTIN_LOCK_TEST_AND_SET_DI:
case IA64_BUILTIN_LOCK_RELEASE_DI:
case IA64_BUILTIN_FETCH_AND_ADD_DI:
case IA64_BUILTIN_FETCH_AND_SUB_DI:
case IA64_BUILTIN_FETCH_AND_OR_DI:
case IA64_BUILTIN_FETCH_AND_AND_DI:
case IA64_BUILTIN_FETCH_AND_XOR_DI:
case IA64_BUILTIN_FETCH_AND_NAND_DI:
case IA64_BUILTIN_ADD_AND_FETCH_DI:
case IA64_BUILTIN_SUB_AND_FETCH_DI:
case IA64_BUILTIN_OR_AND_FETCH_DI:
case IA64_BUILTIN_AND_AND_FETCH_DI:
case IA64_BUILTIN_XOR_AND_FETCH_DI:
case IA64_BUILTIN_NAND_AND_FETCH_DI:
mode = DImode;
break;
default:
break;
}
switch (fcode)
{
case IA64_BUILTIN_BOOL_COMPARE_AND_SWAP_SI:
case IA64_BUILTIN_BOOL_COMPARE_AND_SWAP_DI:
return ia64_expand_compare_and_swap (rmode, mode, 1, arglist,
target);
case IA64_BUILTIN_VAL_COMPARE_AND_SWAP_SI:
case IA64_BUILTIN_VAL_COMPARE_AND_SWAP_DI:
return ia64_expand_compare_and_swap (rmode, mode, 0, arglist,
target);
case IA64_BUILTIN_SYNCHRONIZE:
emit_insn (gen_mf ());
return const0_rtx;
case IA64_BUILTIN_LOCK_TEST_AND_SET_SI:
case IA64_BUILTIN_LOCK_TEST_AND_SET_DI:
return ia64_expand_lock_test_and_set (mode, arglist, target);
case IA64_BUILTIN_LOCK_RELEASE_SI:
case IA64_BUILTIN_LOCK_RELEASE_DI:
return ia64_expand_lock_release (mode, arglist, target);
case IA64_BUILTIN_BSP:
if (! target || ! register_operand (target, DImode))
target = gen_reg_rtx (DImode);
@ -8381,54 +7925,6 @@ ia64_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
emit_insn (gen_flushrs ());
return const0_rtx;
case IA64_BUILTIN_FETCH_AND_ADD_SI:
case IA64_BUILTIN_FETCH_AND_ADD_DI:
return ia64_expand_fetch_and_op (add_optab, mode, arglist, target);
case IA64_BUILTIN_FETCH_AND_SUB_SI:
case IA64_BUILTIN_FETCH_AND_SUB_DI:
return ia64_expand_fetch_and_op (sub_optab, mode, arglist, target);
case IA64_BUILTIN_FETCH_AND_OR_SI:
case IA64_BUILTIN_FETCH_AND_OR_DI:
return ia64_expand_fetch_and_op (ior_optab, mode, arglist, target);
case IA64_BUILTIN_FETCH_AND_AND_SI:
case IA64_BUILTIN_FETCH_AND_AND_DI:
return ia64_expand_fetch_and_op (and_optab, mode, arglist, target);
case IA64_BUILTIN_FETCH_AND_XOR_SI:
case IA64_BUILTIN_FETCH_AND_XOR_DI:
return ia64_expand_fetch_and_op (xor_optab, mode, arglist, target);
case IA64_BUILTIN_FETCH_AND_NAND_SI:
case IA64_BUILTIN_FETCH_AND_NAND_DI:
return ia64_expand_fetch_and_op (one_cmpl_optab, mode, arglist, target);
case IA64_BUILTIN_ADD_AND_FETCH_SI:
case IA64_BUILTIN_ADD_AND_FETCH_DI:
return ia64_expand_op_and_fetch (add_optab, mode, arglist, target);
case IA64_BUILTIN_SUB_AND_FETCH_SI:
case IA64_BUILTIN_SUB_AND_FETCH_DI:
return ia64_expand_op_and_fetch (sub_optab, mode, arglist, target);
case IA64_BUILTIN_OR_AND_FETCH_SI:
case IA64_BUILTIN_OR_AND_FETCH_DI:
return ia64_expand_op_and_fetch (ior_optab, mode, arglist, target);
case IA64_BUILTIN_AND_AND_FETCH_SI:
case IA64_BUILTIN_AND_AND_FETCH_DI:
return ia64_expand_op_and_fetch (and_optab, mode, arglist, target);
case IA64_BUILTIN_XOR_AND_FETCH_SI:
case IA64_BUILTIN_XOR_AND_FETCH_DI:
return ia64_expand_op_and_fetch (xor_optab, mode, arglist, target);
case IA64_BUILTIN_NAND_AND_FETCH_SI:
case IA64_BUILTIN_NAND_AND_FETCH_DI:
return ia64_expand_op_and_fetch (one_cmpl_optab, mode, arglist, target);
default:
break;
}

View File

@ -2034,66 +2034,6 @@ struct machine_function GTY(())
int state_num;
};
enum ia64_builtins
{
IA64_BUILTIN_SYNCHRONIZE,
IA64_BUILTIN_FETCH_AND_ADD_SI,
IA64_BUILTIN_FETCH_AND_SUB_SI,
IA64_BUILTIN_FETCH_AND_OR_SI,
IA64_BUILTIN_FETCH_AND_AND_SI,
IA64_BUILTIN_FETCH_AND_XOR_SI,
IA64_BUILTIN_FETCH_AND_NAND_SI,
IA64_BUILTIN_ADD_AND_FETCH_SI,
IA64_BUILTIN_SUB_AND_FETCH_SI,
IA64_BUILTIN_OR_AND_FETCH_SI,
IA64_BUILTIN_AND_AND_FETCH_SI,
IA64_BUILTIN_XOR_AND_FETCH_SI,
IA64_BUILTIN_NAND_AND_FETCH_SI,
IA64_BUILTIN_BOOL_COMPARE_AND_SWAP_SI,
IA64_BUILTIN_VAL_COMPARE_AND_SWAP_SI,
IA64_BUILTIN_SYNCHRONIZE_SI,
IA64_BUILTIN_LOCK_TEST_AND_SET_SI,
IA64_BUILTIN_LOCK_RELEASE_SI,
IA64_BUILTIN_FETCH_AND_ADD_DI,
IA64_BUILTIN_FETCH_AND_SUB_DI,
IA64_BUILTIN_FETCH_AND_OR_DI,
IA64_BUILTIN_FETCH_AND_AND_DI,
IA64_BUILTIN_FETCH_AND_XOR_DI,
IA64_BUILTIN_FETCH_AND_NAND_DI,
IA64_BUILTIN_ADD_AND_FETCH_DI,
IA64_BUILTIN_SUB_AND_FETCH_DI,
IA64_BUILTIN_OR_AND_FETCH_DI,
IA64_BUILTIN_AND_AND_FETCH_DI,
IA64_BUILTIN_XOR_AND_FETCH_DI,
IA64_BUILTIN_NAND_AND_FETCH_DI,
IA64_BUILTIN_BOOL_COMPARE_AND_SWAP_DI,
IA64_BUILTIN_VAL_COMPARE_AND_SWAP_DI,
IA64_BUILTIN_SYNCHRONIZE_DI,
IA64_BUILTIN_LOCK_TEST_AND_SET_DI,
IA64_BUILTIN_LOCK_RELEASE_DI,
IA64_BUILTIN_BSP,
IA64_BUILTIN_FLUSHRS
};
/* Codes for expand_compare_and_swap and expand_swap_and_compare. */
enum fetchop_code {
IA64_ADD_OP, IA64_SUB_OP, IA64_OR_OP, IA64_AND_OP, IA64_XOR_OP, IA64_NAND_OP
};
#define DONT_USE_BUILTIN_SETJMP
/* Output any profiling code before the prologue. */
@ -2104,7 +2044,6 @@ enum fetchop_code {
/* Initialize library function table. */
#undef TARGET_INIT_LIBFUNCS
#define TARGET_INIT_LIBFUNCS ia64_init_libfuncs
/* Switch on code for querying unit reservations. */

View File

@ -6047,88 +6047,6 @@
DONE;
})
;;; Intrinsics support.
(define_expand "mf"
[(set (mem:BLK (match_dup 0))
(unspec:BLK [(mem:BLK (match_dup 0))] UNSPEC_MF))]
""
{
operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (DImode));
MEM_VOLATILE_P (operands[0]) = 1;
})
(define_insn "*mf_internal"
[(set (match_operand:BLK 0 "" "")
(unspec:BLK [(match_operand:BLK 1 "" "")] UNSPEC_MF))]
""
"mf"
[(set_attr "itanium_class" "syst_m")])
(define_insn "fetchadd_acq_si"
[(set (match_operand:SI 0 "gr_register_operand" "=r")
(match_operand:SI 1 "not_postinc_memory_operand" "+S"))
(set (match_dup 1)
(unspec:SI [(match_dup 1)
(match_operand:SI 2 "fetchadd_operand" "n")]
UNSPEC_FETCHADD_ACQ))]
""
"fetchadd4.acq %0 = %1, %2"
[(set_attr "itanium_class" "sem")])
(define_insn "fetchadd_acq_di"
[(set (match_operand:DI 0 "gr_register_operand" "=r")
(match_operand:DI 1 "not_postinc_memory_operand" "+S"))
(set (match_dup 1)
(unspec:DI [(match_dup 1)
(match_operand:DI 2 "fetchadd_operand" "n")]
UNSPEC_FETCHADD_ACQ))]
""
"fetchadd8.acq %0 = %1, %2"
[(set_attr "itanium_class" "sem")])
(define_insn "cmpxchg_acq_si"
[(set (match_operand:SI 0 "gr_register_operand" "=r")
(match_operand:SI 1 "not_postinc_memory_operand" "+S"))
(set (match_dup 1)
(unspec:SI [(match_dup 1)
(match_operand:SI 2 "gr_register_operand" "r")
(match_operand:DI 3 "ar_ccv_reg_operand" "")]
UNSPEC_CMPXCHG_ACQ))]
""
"cmpxchg4.acq %0 = %1, %2, %3"
[(set_attr "itanium_class" "sem")])
(define_insn "cmpxchg_acq_di"
[(set (match_operand:DI 0 "gr_register_operand" "=r")
(match_operand:DI 1 "not_postinc_memory_operand" "+S"))
(set (match_dup 1)
(unspec:DI [(match_dup 1)
(match_operand:DI 2 "gr_register_operand" "r")
(match_operand:DI 3 "ar_ccv_reg_operand" "")]
UNSPEC_CMPXCHG_ACQ))]
""
"cmpxchg8.acq %0 = %1, %2, %3"
[(set_attr "itanium_class" "sem")])
(define_insn "xchgsi"
[(set (match_operand:SI 0 "gr_register_operand" "=r")
(match_operand:SI 1 "not_postinc_memory_operand" "+S"))
(set (match_dup 1)
(match_operand:SI 2 "gr_register_operand" "r"))]
""
"xchg4 %0 = %1, %2"
[(set_attr "itanium_class" "sem")])
(define_insn "xchgdi"
[(set (match_operand:DI 0 "gr_register_operand" "=r")
(match_operand:DI 1 "not_postinc_memory_operand" "+S"))
(set (match_dup 1)
(match_operand:DI 2 "gr_register_operand" "r"))]
""
"xchg8 %0 = %1, %2"
[(set_attr "itanium_class" "sem")])
;; Predication.
@ -6200,3 +6118,5 @@
;; Vector operations
(include "vect.md")
;; Atomic operations
(include "sync.md")

View File

@ -1,130 +1,91 @@
#ifndef _IA64INTRIN_H_INCLUDED
#define _IA64INTRIN_H_INCLUDED
/* Actually, everything is a compiler builtin, but just so
there's no confusion... */
/* ??? Overloaded builtins havn't been ported to C++ yet. */
#ifdef __cplusplus
extern "C" {
#endif
extern void __sync_synchronize (void);
extern int __sync_val_compare_and_swap_si (int *, int, int);
extern long __sync_val_compare_and_swap_di (long *, long, long);
#define __sync_val_compare_and_swap(PTR, OLD, NEW) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) \
__sync_val_compare_and_swap_si((int *)(void *)(PTR),(int)(OLD),(int)(NEW)) \
__sync_val_compare_and_swap_4((int *)(void *)(PTR),(int)(OLD),(int)(NEW)) \
: (__typeof__(*(PTR))) \
__sync_val_compare_and_swap_di((long *)(void *)(PTR),(long)(OLD),(long)(NEW)))
__sync_val_compare_and_swap_8((long *)(void *)(PTR),(long)(OLD),(long)(NEW)))
extern int __sync_bool_compare_and_swap_si (int *, int, int);
extern int __sync_bool_compare_and_swap_di (long *, long, long);
#define __sync_bool_compare_and_swap(PTR, OLD, NEW) \
((sizeof (*(PTR)) == sizeof(int)) \
? __sync_bool_compare_and_swap_si((int *)(void *)(PTR),(int)(OLD),(int)(NEW)) \
: __sync_bool_compare_and_swap_di((long *)(void *)(PTR),(long)(OLD),(long)(NEW)))
? __sync_bool_compare_and_swap_4((int *)(void *)(PTR),(int)(OLD),(int)(NEW)) \
: __sync_bool_compare_and_swap_8((long *)(void *)(PTR),(long)(OLD),(long)(NEW)))
extern void __sync_lock_release_si (int *);
extern void __sync_lock_release_di (long *);
#define __sync_lock_release(PTR) \
((sizeof (*(PTR)) == sizeof(int)) \
? __sync_lock_release_si((int *)(void *)(PTR)) \
: __sync_lock_release_di((long *)(void *)(PTR)))
? __sync_lock_release_4((int *)(void *)(PTR)) \
: __sync_lock_release_8((long *)(void *)(PTR)))
extern int __sync_lock_test_and_set_si (int *, int);
extern long __sync_lock_test_and_set_di (long *, long);
#define __sync_lock_test_and_set(PTR,VAL) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) __sync_lock_test_and_set_si((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_lock_test_and_set_di((long *)(void *)(PTR),(long)(VAL)))
? (__typeof__(*(PTR))) __sync_lock_test_and_set_4((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_lock_test_and_set_8((long *)(void *)(PTR),(long)(VAL)))
extern int __sync_fetch_and_add_si (int *, int);
extern long __sync_fetch_and_add_di (long *, long);
#define __sync_fetch_and_add(PTR,VAL) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) __sync_fetch_and_add_si((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_fetch_and_add_di((long *)(void *)(PTR),(long)(VAL)))
? (__typeof__(*(PTR))) __sync_fetch_and_add_4((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_fetch_and_add_8((long *)(void *)(PTR),(long)(VAL)))
extern int __sync_fetch_and_sub_si (int *, int);
extern long __sync_fetch_and_sub_di (long *, long);
#define __sync_fetch_and_sub(PTR,VAL) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) __sync_fetch_and_sub_si((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_fetch_and_sub_di((long *)(void *)(PTR),(long)(VAL)))
? (__typeof__(*(PTR))) __sync_fetch_and_sub_4((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_fetch_and_sub_8((long *)(void *)(PTR),(long)(VAL)))
extern int __sync_fetch_and_and_si (int *, int);
extern long __sync_fetch_and_and_di (long *, long);
#define __sync_fetch_and_and(PTR,VAL) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) __sync_fetch_and_and_si((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_fetch_and_and_di((long *)(void *)(PTR),(long)(VAL)))
? (__typeof__(*(PTR))) __sync_fetch_and_and_4((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_fetch_and_and_8((long *)(void *)(PTR),(long)(VAL)))
extern int __sync_fetch_and_or_si (int *, int);
extern long __sync_fetch_and_or_di (long *, long);
#define __sync_fetch_and_or(PTR,VAL) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) __sync_fetch_and_or_si((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_fetch_and_or_di((long *)(void *)(PTR),(long)(VAL)))
? (__typeof__(*(PTR))) __sync_fetch_and_or_4((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_fetch_and_or_8((long *)(void *)(PTR),(long)(VAL)))
extern int __sync_fetch_and_xor_si (int *, int);
extern long __sync_fetch_and_xor_di (long *, long);
#define __sync_fetch_and_xor(PTR,VAL) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) __sync_fetch_and_xor_si((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_fetch_and_xor_di((long *)(void *)(PTR),(long)(VAL)))
? (__typeof__(*(PTR))) __sync_fetch_and_xor_4((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_fetch_and_xor_8((long *)(void *)(PTR),(long)(VAL)))
extern int __sync_fetch_and_nand_si (int *, int);
extern long __sync_fetch_and_nand_di (long *, long);
#define __sync_fetch_and_nand(PTR,VAL) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) __sync_fetch_and_nand_si((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_fetch_and_nand_di((long *)(void *)(PTR),(long)(VAL)))
? (__typeof__(*(PTR))) __sync_fetch_and_nand_4((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_fetch_and_nand_8((long *)(void *)(PTR),(long)(VAL)))
extern int __sync_add_and_fetch_si (int *, int);
extern long __sync_add_and_fetch_di (long *, long);
#define __sync_add_and_fetch(PTR,VAL) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) __sync_add_and_fetch_si((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_add_and_fetch_di((long *)(void *)(PTR),(long)(VAL)))
? (__typeof__(*(PTR))) __sync_add_and_fetch_4((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_add_and_fetch_8((long *)(void *)(PTR),(long)(VAL)))
extern int __sync_sub_and_fetch_si (int *, int);
extern long __sync_sub_and_fetch_di (long *, long);
#define __sync_sub_and_fetch(PTR,VAL) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) __sync_sub_and_fetch_si((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_sub_and_fetch_di((long *)(void *)(PTR),(long)(VAL)))
? (__typeof__(*(PTR))) __sync_sub_and_fetch_4((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_sub_and_fetch_8((long *)(void *)(PTR),(long)(VAL)))
extern int __sync_and_and_fetch_si (int *, int);
extern long __sync_and_and_fetch_di (long *, long);
#define __sync_and_and_fetch(PTR,VAL) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) __sync_and_and_fetch_si((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_and_and_fetch_di((long *)(void *)(PTR),(long)(VAL)))
? (__typeof__(*(PTR))) __sync_and_and_fetch_4((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_and_and_fetch_8((long *)(void *)(PTR),(long)(VAL)))
extern int __sync_or_and_fetch_si (int *, int);
extern long __sync_or_and_fetch_di (long *, long);
#define __sync_or_and_fetch(PTR,VAL) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) __sync_or_and_fetch_si((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_or_and_fetch_di((long *)(void *)(PTR),(long)(VAL)))
? (__typeof__(*(PTR))) __sync_or_and_fetch_4((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_or_and_fetch_8((long *)(void *)(PTR),(long)(VAL)))
extern int __sync_xor_and_fetch_si (int *, int);
extern long __sync_xor_and_fetch_di (long *, long);
#define __sync_xor_and_fetch(PTR,VAL) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) __sync_xor_and_fetch_si((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_xor_and_fetch_di((long *)(void *)(PTR),(long)(VAL)))
? (__typeof__(*(PTR))) __sync_xor_and_fetch_4((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_xor_and_fetch_8((long *)(void *)(PTR),(long)(VAL)))
extern int __sync_nand_and_fetch_si (int *, int);
extern long __sync_nand_and_fetch_di (long *, long);
#define __sync_nand_and_fetch(PTR,VAL) \
((sizeof (*(PTR)) == sizeof(int)) \
? (__typeof__(*(PTR))) __sync_nand_and_fetch_si((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_nand_and_fetch_di((long *)(void *)(PTR),(long)(VAL)))
? (__typeof__(*(PTR))) __sync_nand_and_fetch_4((int *)(void *)(PTR),(int)(VAL)) \
: (__typeof__(*(PTR))) __sync_nand_and_fetch_8((long *)(void *)(PTR),(long)(VAL)))
#ifdef __cplusplus
}
#endif
#endif /* __cplusplus */
#endif
#endif /* _IA64INTRIN_H_INCLUDED */

121
gcc/config/ia64/sync.md Normal file
View File

@ -0,0 +1,121 @@
;; GCC machine description for IA-64 synchronization instructions.
;; Copyright (C) 2005
;; Free Software Foundation, Inc.
;;
;; This file is part of GCC.
;;
;; GCC is free software; you can redistribute it and/or modify
;; it under the terms of the GNU General Public License as published by
;; the Free Software Foundation; either version 2, or (at your option)
;; any later version.
;;
;; GCC is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
;; You should have received a copy of the GNU General Public License
;; along with GCC; see the file COPYING. If not, write to
;; the Free Software Foundation, 59 Temple Place - Suite 330,
;; Boston, MA 02111-1307, USA.
(define_mode_macro I48MODE [SI DI])
(define_mode_attr modesuffix [(SI "4") (DI "8")])
(define_expand "memory_barrier"
[(set (mem:BLK (match_dup 0))
(unspec:BLK [(mem:BLK (match_dup 0))] UNSPEC_MF))]
""
{
operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (DImode));
MEM_VOLATILE_P (operands[0]) = 1;
})
(define_insn "*mf_internal"
[(set (match_operand:BLK 0 "" "")
(unspec:BLK [(match_operand:BLK 1 "" "")] UNSPEC_MF))]
""
"mf"
[(set_attr "itanium_class" "syst_m")])
(define_expand "sync_add<mode>"
[(match_operand:I48MODE 0 "gr_register_operand" "")
(match_operand:I48MODE 1 "memory_operand" "")
(match_operand:I48MODE 2 "general_operand" "")]
""
{
if (!fetchadd_operand (operands[2], <MODE>mode))
FAIL;
emit_insn (gen_memory_barrier ());
emit_insn (gen_fetchadd_acq_<mode> (operands[0], operands[1], operands[2]));
DONE;
})
(define_expand "sync_old_add<mode>"
[(match_operand:I48MODE 0 "gr_register_operand" "")
(match_operand:I48MODE 1 "memory_operand" "")
(match_operand:I48MODE 2 "general_operand" "")]
""
{
if (!fetchadd_operand (operands[2], <MODE>mode))
FAIL;
emit_insn (gen_memory_barrier ());
emit_insn (gen_fetchadd_acq_<mode> (operands[0], operands[1], operands[2]));
DONE;
})
(define_insn "fetchadd_acq_<mode>"
[(set (match_operand:I48MODE 0 "gr_register_operand" "=r")
(match_operand:I48MODE 1 "not_postinc_memory_operand" "+S"))
(set (match_dup 1)
(unspec:I48MODE [(match_dup 1)
(match_operand:I48MODE 2 "fetchadd_operand" "n")]
UNSPEC_FETCHADD_ACQ))]
""
"fetchadd<modesuffix>.acq %0 = %1, %2"
[(set_attr "itanium_class" "sem")])
(define_expand "sync_compare_and_swap<mode>"
[(match_operand:I48MODE 0 "gr_register_operand" "")
(match_operand:I48MODE 1 "memory_operand" "")
(match_operand:I48MODE 2 "gr_register_operand" "")
(match_operand:I48MODE 3 "gr_register_operand" "")]
""
{
rtx ccv = gen_rtx_REG (DImode, AR_CCV_REGNUM);
convert_move (ccv, operands[2], 1);
emit_insn (gen_memory_barrier ());
emit_insn (gen_cmpxchg_acq_<mode> (operands[0], operands[1],
ccv, operands[3]));
DONE;
})
(define_insn "cmpxchg_acq_<mode>"
[(set (match_operand:I48MODE 0 "gr_register_operand" "=r")
(match_operand:I48MODE 1 "not_postinc_memory_operand" "+S"))
(set (match_dup 1)
(unspec:I48MODE [(match_dup 1)
(match_operand:DI 2 "ar_ccv_reg_operand" "")
(match_operand:I48MODE 3 "gr_register_operand" "r")]
UNSPEC_CMPXCHG_ACQ))]
""
"cmpxchg<modesuffix>.acq %0 = %1, %3, %2"
[(set_attr "itanium_class" "sem")])
(define_insn "sync_lock_test_and_set<mode>"
[(set (match_operand:I48MODE 0 "gr_register_operand" "=r")
(match_operand:I48MODE 1 "not_postinc_memory_operand" "+S"))
(set (match_dup 1)
(match_operand:I48MODE 2 "gr_register_operand" "r"))]
""
"xchg<modesuffix> %0 = %1, %2"
[(set_attr "itanium_class" "sem")])
(define_expand "sync_lock_release<mode>"
[(set (match_operand:I48MODE 0 "memory_operand" "")
(const_int 0))]
""
{
gcc_assert (MEM_VOLATILE_P (operands[0]));
})