sparc: Convert to mem_thread_fence.
* config/sparc/predicates.md (zero_or_v7_operand): New. * config/sparc/sparc.c (sparc_emit_membar_for_model): New. * config/sparc/sparc-protos.h: Update. * config/sparc/sync.md (mem_thread_fence): New. (memory_barrier): Use sparc_emit_membar_for_model. (membar, *membar_empty, *membar_storestore, *membar_storeload): New. (*membar_v8): Accept and ignore the membar mask. (*membar): Accept and print the membar mask. From-SVN: r181848
This commit is contained in:
parent
ef5784915f
commit
9a7389088f
@ -1,3 +1,14 @@
|
||||
2011-11-30 Richard Henderson <rth@redhat.com>
|
||||
|
||||
* config/sparc/predicates.md (zero_or_v7_operand): New.
|
||||
* config/sparc/sparc.c (sparc_emit_membar_for_model): New.
|
||||
* config/sparc/sparc-protos.h: Update.
|
||||
* config/sparc/sync.md (mem_thread_fence): New.
|
||||
(memory_barrier): Use sparc_emit_membar_for_model.
|
||||
(membar, *membar_empty, *membar_storestore, *membar_storeload): New.
|
||||
(*membar_v8): Accept and ignore the membar mask.
|
||||
(*membar): Accept and print the membar mask.
|
||||
|
||||
2011-11-30 Matthew Gretton-Dann <matthew.gretton-dann@arm.com>
|
||||
|
||||
* config/arm/arm.c (arm_issue_rate): Cortex-A15 can triple issue.
|
||||
|
@ -111,6 +111,10 @@
|
||||
(define_predicate "const_double_or_vector_operand"
|
||||
(match_code "const_double,const_vector"))
|
||||
|
||||
;; Return true if OP is Zero, or if the target is V7.
|
||||
(define_predicate "zero_or_v7_operand"
|
||||
(ior (match_test "op == const0_rtx")
|
||||
(match_test "!TARGET_V8 && !TARGET_V9")))
|
||||
|
||||
;; Predicates for symbolic constants.
|
||||
|
||||
|
@ -113,4 +113,6 @@ unsigned int sparc_regmode_natural_size (enum machine_mode);
|
||||
bool sparc_modes_tieable_p (enum machine_mode, enum machine_mode);
|
||||
#endif /* RTX_CODE */
|
||||
|
||||
extern void sparc_emit_membar_for_model (enum memmodel, int, int);
|
||||
|
||||
#endif /* __SPARC_PROTOS_H__ */
|
||||
|
@ -10849,6 +10849,53 @@ sparc_mangle_type (const_tree type)
|
||||
}
|
||||
#endif
|
||||
|
||||
/* Expand a membar instruction for various use cases. Both the LOAD_STORE
|
||||
and BEFORE_AFTER arguments of the form X_Y. They are two-bit masks where
|
||||
bit 0 indicates that X is true, and bit 1 indicates Y is true. */
|
||||
|
||||
void
|
||||
sparc_emit_membar_for_model (enum memmodel model,
|
||||
int load_store, int before_after)
|
||||
{
|
||||
/* Bits for the MEMBAR mmask field. */
|
||||
const int LoadLoad = 1;
|
||||
const int StoreLoad = 2;
|
||||
const int LoadStore = 4;
|
||||
const int StoreStore = 8;
|
||||
|
||||
int mm = 0;
|
||||
|
||||
if (before_after & 1)
|
||||
{
|
||||
if (model == MEMMODEL_ACQUIRE
|
||||
|| model == MEMMODEL_ACQ_REL
|
||||
|| model == MEMMODEL_SEQ_CST)
|
||||
{
|
||||
if (load_store & 1)
|
||||
mm |= LoadLoad | LoadStore;
|
||||
if (load_store & 2)
|
||||
mm |= StoreLoad | StoreStore;
|
||||
}
|
||||
}
|
||||
if (before_after & 2)
|
||||
{
|
||||
if (model == MEMMODEL_RELEASE
|
||||
|| model == MEMMODEL_ACQ_REL
|
||||
|| model == MEMMODEL_SEQ_CST)
|
||||
{
|
||||
if (load_store & 1)
|
||||
mm |= LoadLoad | StoreLoad;
|
||||
if (load_store & 2)
|
||||
mm |= LoadStore | StoreStore;
|
||||
}
|
||||
}
|
||||
|
||||
/* For raw barriers (before+after), always emit a barrier.
|
||||
This will become a compile-time barrier if needed. */
|
||||
if (mm || before_after == 3)
|
||||
emit_insn (gen_membar (GEN_INT (mm)));
|
||||
}
|
||||
|
||||
/* Expand code to perform a 8 or 16-bit compare and swap by doing 32-bit
|
||||
compare and swap on the word containing the byte or half-word. */
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
;; GCC machine description for SPARC synchronization instructions.
|
||||
;; Copyright (C) 2005, 2007, 2009, 2010
|
||||
;; Copyright (C) 2005, 2007, 2009, 2010, 2011
|
||||
;; Free Software Foundation, Inc.
|
||||
;;
|
||||
;; This file is part of GCC.
|
||||
@ -23,36 +23,87 @@
|
||||
(define_mode_iterator I48MODE [SI (DI "TARGET_ARCH64 || TARGET_V8PLUS")])
|
||||
(define_mode_attr modesuffix [(SI "") (DI "x")])
|
||||
|
||||
(define_expand "memory_barrier"
|
||||
[(set (match_dup 0)
|
||||
(unspec:BLK [(match_dup 0)] UNSPEC_MEMBAR))]
|
||||
(define_expand "mem_thread_fence"
|
||||
[(match_operand:SI 0 "const_int_operand")]
|
||||
"TARGET_V8 || TARGET_V9"
|
||||
{
|
||||
operands[0] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
|
||||
MEM_VOLATILE_P (operands[0]) = 1;
|
||||
enum memmodel model = (enum memmodel) INTVAL (operands[0]);
|
||||
sparc_emit_membar_for_model (model, 3, 3);
|
||||
DONE;
|
||||
})
|
||||
|
||||
;; In V8, loads are blocking and ordered wrt earlier loads, i.e. every load
|
||||
;; is virtually followed by a load barrier (membar #LoadStore | #LoadLoad).
|
||||
;; In PSO, stbar orders the stores (membar #StoreStore).
|
||||
;; In TSO, ldstub orders the stores wrt subsequent loads (membar #StoreLoad).
|
||||
;; The combination of the three yields a full memory barrier in all cases.
|
||||
(define_expand "memory_barrier"
|
||||
[(const_int 0)]
|
||||
"TARGET_V8 || TARGET_V9"
|
||||
{
|
||||
sparc_emit_membar_for_model (MEMMODEL_SEQ_CST, 3, 3);
|
||||
DONE;
|
||||
})
|
||||
|
||||
(define_expand "membar"
|
||||
[(set (match_dup 1)
|
||||
(unspec:BLK [(match_dup 1)
|
||||
(match_operand:SI 0 "const_int_operand")]
|
||||
UNSPEC_MEMBAR))]
|
||||
"TARGET_V8 || TARGET_V9"
|
||||
{
|
||||
operands[1] = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (Pmode));
|
||||
MEM_VOLATILE_P (operands[1]) = 1;
|
||||
})
|
||||
|
||||
;; A compiler-only memory barrier. Generic code, when checking for the
|
||||
;; existance of various named patterns, uses asm("":::"memory") when we
|
||||
;; don't need an actual instruction. Here, it's easiest to pretend that
|
||||
;; membar 0 is such a barrier. Further, this gives us a nice hook to
|
||||
;; ignore all such barriers on Sparc V7.
|
||||
(define_insn "*membar_empty"
|
||||
[(set (match_operand:BLK 0 "" "")
|
||||
(unspec:BLK [(match_dup 0) (match_operand:SI 1 "zero_or_v7_operand")]
|
||||
UNSPEC_MEMBAR))]
|
||||
""
|
||||
""
|
||||
[(set_attr "type" "multi")
|
||||
(set_attr "length" "0")])
|
||||
|
||||
;; For V8, STBAR is exactly membar #StoreStore, by definition.
|
||||
(define_insn "*membar_storestore"
|
||||
[(set (match_operand:BLK 0 "" "")
|
||||
(unspec:BLK [(match_dup 0) (const_int 8)] UNSPEC_MEMBAR))]
|
||||
"TARGET_V8"
|
||||
"stbar"
|
||||
[(set_attr "type" "multi")])
|
||||
|
||||
;; For V8, LDSTUB has the effect of membar #StoreLoad
|
||||
(define_insn "*membar_storeload"
|
||||
[(set (match_operand:BLK 0 "" "")
|
||||
(unspec:BLK [(match_dup 0) (const_int 2)] UNSPEC_MEMBAR))]
|
||||
"TARGET_V8"
|
||||
"ldstub\t[%%sp-1], %%g0"
|
||||
[(set_attr "type" "multi")])
|
||||
|
||||
;; Put the two together, in combination with the fact that V8 implements PSO
|
||||
;; as its weakest memory model, means a full barrier. Match all remaining
|
||||
;; instances of the membar pattern for Sparc V8.
|
||||
(define_insn "*membar_v8"
|
||||
[(set (match_operand:BLK 0 "" "")
|
||||
(unspec:BLK [(match_dup 0)] UNSPEC_MEMBAR))]
|
||||
(unspec:BLK [(match_dup 0) (match_operand:SI 1 "const_int_operand")]
|
||||
UNSPEC_MEMBAR))]
|
||||
"TARGET_V8"
|
||||
"stbar\n\tldstub\t[%%sp-1], %%g0"
|
||||
[(set_attr "type" "multi")
|
||||
(set_attr "length" "2")])
|
||||
|
||||
;; membar #StoreStore | #LoadStore | #StoreLoad | #LoadLoad
|
||||
;; For V9, we have the full membar instruction.
|
||||
(define_insn "*membar"
|
||||
[(set (match_operand:BLK 0 "" "")
|
||||
(unspec:BLK [(match_dup 0)] UNSPEC_MEMBAR))]
|
||||
(unspec:BLK [(match_dup 0) (match_operand:SI 1 "const_int_operand")]
|
||||
UNSPEC_MEMBAR))]
|
||||
"TARGET_V9"
|
||||
"membar\t15"
|
||||
"membar\t%1"
|
||||
[(set_attr "type" "multi")])
|
||||
|
||||
;;;;;;;;
|
||||
|
||||
(define_expand "sync_compare_and_swap<mode>"
|
||||
[(match_operand:I12MODE 0 "register_operand" "")
|
||||
(match_operand:I12MODE 1 "memory_operand" "")
|
||||
|
Loading…
Reference in New Issue
Block a user