tcg/optimize: Optimize bswap
Somehow we forgot these operations, once upon a time. This will allow immediate stores to have their bswap optimized away. Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
This commit is contained in:
parent
9e821eab0a
commit
6498594c8e
@ -353,6 +353,15 @@ static TCGArg do_constant_folding_2(TCGOpcode op, TCGArg x, TCGArg y)
|
||||
CASE_OP_32_64(ext16u):
|
||||
return (uint16_t)x;
|
||||
|
||||
CASE_OP_32_64(bswap16):
|
||||
return bswap16(x);
|
||||
|
||||
CASE_OP_32_64(bswap32):
|
||||
return bswap32(x);
|
||||
|
||||
case INDEX_op_bswap64_i64:
|
||||
return bswap64(x);
|
||||
|
||||
case INDEX_op_ext_i32_i64:
|
||||
case INDEX_op_ext32s_i64:
|
||||
return (int32_t)x;
|
||||
@ -1105,6 +1114,9 @@ void tcg_optimize(TCGContext *s)
|
||||
CASE_OP_32_64(ext16s):
|
||||
CASE_OP_32_64(ext16u):
|
||||
CASE_OP_32_64(ctpop):
|
||||
CASE_OP_32_64(bswap16):
|
||||
CASE_OP_32_64(bswap32):
|
||||
case INDEX_op_bswap64_i64:
|
||||
case INDEX_op_ext32s_i64:
|
||||
case INDEX_op_ext32u_i64:
|
||||
case INDEX_op_ext_i32_i64:
|
||||
|
Loading…
Reference in New Issue
Block a user