diff --git a/gcc/testsuite/gcc.target/s390/builtin-constant-p-threading.c b/gcc/testsuite/gcc.target/s390/builtin-constant-p-threading.c new file mode 100644 index 00000000000..5f0acdce0b0 --- /dev/null +++ b/gcc/testsuite/gcc.target/s390/builtin-constant-p-threading.c @@ -0,0 +1,46 @@ +/* { dg-do compile } */ +/* { dg-options "-O2 -march=z196 -mzarch" } */ + +typedef struct +{ + int counter; +} atomic_t; + +static inline __attribute__ ((__gnu_inline__)) int +__atomic_add (int val, int *ptr) +{ + int old; + asm volatile("laa %[old],%[val],%[ptr]\n" + : [old] "=d" (old), [ptr] "+Q"(*ptr) + : [val] "d" (val) + : "cc", "memory"); + return old; +} + +static inline __attribute__ ((__gnu_inline__)) void +__atomic_add_const (int val, int *ptr) +{ + asm volatile("asi %[ptr],%[val]\n" + : [ptr] "+Q" (*ptr) + : [val] "i" (val) + : "cc", "memory"); +} + +static inline __attribute__ ((__gnu_inline__)) void +atomic_add (int i, atomic_t *v) +{ + if (__builtin_constant_p (i) && (i > -129) && (i < 128)) + { + __atomic_add_const (i, &v->counter); + return; + } + __atomic_add (i, &v->counter); +} + +static atomic_t num_active_cpus = { (0) }; + +void +ledtrig_cpu (_Bool is_active) +{ + atomic_add (is_active ? 1 : -1, &num_active_cpus); +} diff --git a/gcc/tree-ssa-threadbackward.c b/gcc/tree-ssa-threadbackward.c index 327628f1662..30f692672d9 100644 --- a/gcc/tree-ssa-threadbackward.c +++ b/gcc/tree-ssa-threadbackward.c @@ -259,8 +259,13 @@ thread_jumps::profitable_jump_thread_path (basic_block bbi, tree name, !gsi_end_p (gsi); gsi_next_nondebug (&gsi)) { + /* Do not allow OpenACC loop markers and __builtin_constant_p on + threading paths. The latter is disallowed, because an + expression might be constant on two threading paths, and + become non-constant (i.e.: phi) when they merge. */ gimple *stmt = gsi_stmt (gsi); - if (gimple_call_internal_p (stmt, IFN_UNIQUE)) + if (gimple_call_internal_p (stmt, IFN_UNIQUE) + || gimple_call_builtin_p (stmt, BUILT_IN_CONSTANT_P)) { m_path.pop (); return NULL;