target-alpha: Introduce REQUIRE_TB_FLAG

The methods by which we check for cpu features varied wildly
across the function.  Using a nice macro cleans this up.

Signed-off-by: Richard Henderson <rth@twiddle.net>
This commit is contained in:
Richard Henderson 2014-03-19 09:04:04 -07:00
parent 67debe3ae5
commit 5238c88657

View File

@ -1765,6 +1765,13 @@ static ExitStatus gen_mtpr(DisasContext *ctx, int rb, int regno)
} }
#endif /* !USER_ONLY*/ #endif /* !USER_ONLY*/
#define REQUIRE_TB_FLAG(FLAG) \
do { \
if ((ctx->tb->flags & (FLAG)) == 0) { \
goto invalid_opc; \
} \
} while (0)
static ExitStatus translate_one(DisasContext *ctx, uint32_t insn) static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
{ {
uint32_t palcode; uint32_t palcode;
@ -1849,28 +1856,26 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
break; break;
case 0x0A: case 0x0A:
/* LDBU */ /* LDBU */
if (ctx->tb->flags & TB_FLAGS_AMASK_BWX) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0); gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
break; break;
}
goto invalid_opc;
case 0x0B: case 0x0B:
/* LDQ_U */ /* LDQ_U */
gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1); gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
break; break;
case 0x0C: case 0x0C:
/* LDWU */ /* LDWU */
if (ctx->tb->flags & TB_FLAGS_AMASK_BWX) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0); gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
break; break;
}
goto invalid_opc;
case 0x0D: case 0x0D:
/* STW */ /* STW */
REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0); gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
break; break;
case 0x0E: case 0x0E:
/* STB */ /* STB */
REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0); gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
break; break;
case 0x0F: case 0x0F:
@ -2535,12 +2540,10 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
} }
break; break;
case 0x14: case 0x14:
REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
switch (fpfn) { /* fn11 & 0x3F */ switch (fpfn) { /* fn11 & 0x3F */
case 0x04: case 0x04:
/* ITOFS */ /* ITOFS */
if ((ctx->tb->flags & TB_FLAGS_AMASK_FIX) == 0) {
goto invalid_opc;
}
if (likely(rc != 31)) { if (likely(rc != 31)) {
if (ra != 31) { if (ra != 31) {
TCGv_i32 tmp = tcg_temp_new_i32(); TCGv_i32 tmp = tcg_temp_new_i32();
@ -2553,23 +2556,14 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
break; break;
case 0x0A: case 0x0A:
/* SQRTF */ /* SQRTF */
if (ctx->tb->flags & TB_FLAGS_AMASK_FIX) {
gen_fsqrtf(rb, rc); gen_fsqrtf(rb, rc);
break; break;
}
goto invalid_opc;
case 0x0B: case 0x0B:
/* SQRTS */ /* SQRTS */
if (ctx->tb->flags & TB_FLAGS_AMASK_FIX) {
gen_fsqrts(ctx, rb, rc, fn11); gen_fsqrts(ctx, rb, rc, fn11);
break; break;
}
goto invalid_opc;
case 0x14: case 0x14:
/* ITOFF */ /* ITOFF */
if ((ctx->tb->flags & TB_FLAGS_AMASK_FIX) == 0) {
goto invalid_opc;
}
if (likely(rc != 31)) { if (likely(rc != 31)) {
if (ra != 31) { if (ra != 31) {
TCGv_i32 tmp = tcg_temp_new_i32(); TCGv_i32 tmp = tcg_temp_new_i32();
@ -2582,9 +2576,6 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
break; break;
case 0x24: case 0x24:
/* ITOFT */ /* ITOFT */
if ((ctx->tb->flags & TB_FLAGS_AMASK_FIX) == 0) {
goto invalid_opc;
}
if (likely(rc != 31)) { if (likely(rc != 31)) {
if (ra != 31) { if (ra != 31) {
tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]); tcg_gen_mov_i64(cpu_fir[rc], cpu_ir[ra]);
@ -2595,18 +2586,12 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
break; break;
case 0x2A: case 0x2A:
/* SQRTG */ /* SQRTG */
if (ctx->tb->flags & TB_FLAGS_AMASK_FIX) {
gen_fsqrtg(rb, rc); gen_fsqrtg(rb, rc);
break; break;
}
goto invalid_opc;
case 0x02B: case 0x02B:
/* SQRTT */ /* SQRTT */
if (ctx->tb->flags & TB_FLAGS_AMASK_FIX) {
gen_fsqrtt(ctx, rb, rc, fn11); gen_fsqrtt(ctx, rb, rc, fn11);
break; break;
}
goto invalid_opc;
default: default:
goto invalid_opc; goto invalid_opc;
} }
@ -2918,11 +2903,11 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
case 0x19: case 0x19:
/* HW_MFPR (PALcode) */ /* HW_MFPR (PALcode) */
#ifndef CONFIG_USER_ONLY #ifndef CONFIG_USER_ONLY
if (ctx->tb->flags & TB_FLAGS_PAL_MODE) { REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
return gen_mfpr(ra, insn & 0xffff); return gen_mfpr(ra, insn & 0xffff);
} #else
#endif
goto invalid_opc; goto invalid_opc;
#endif
case 0x1A: case 0x1A:
/* JMP, JSR, RET, JSR_COROUTINE. These only differ by the branch /* JMP, JSR, RET, JSR_COROUTINE. These only differ by the branch
prediction stack action, which of course we don't implement. */ prediction stack action, which of course we don't implement. */
@ -2939,7 +2924,8 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
case 0x1B: case 0x1B:
/* HW_LD (PALcode) */ /* HW_LD (PALcode) */
#ifndef CONFIG_USER_ONLY #ifndef CONFIG_USER_ONLY
if (ctx->tb->flags & TB_FLAGS_PAL_MODE) { REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
{
TCGv addr; TCGv addr;
if (ra == 31) { if (ra == 31) {
@ -3016,15 +3002,14 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
tcg_temp_free(addr); tcg_temp_free(addr);
break; break;
} }
#endif #else
goto invalid_opc; goto invalid_opc;
#endif
case 0x1C: case 0x1C:
switch (fn7) { switch (fn7) {
case 0x00: case 0x00:
/* SEXTB */ /* SEXTB */
if ((ctx->tb->flags & TB_FLAGS_AMASK_BWX) == 0) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
goto invalid_opc;
}
if (likely(rc != 31)) { if (likely(rc != 31)) {
if (islit) { if (islit) {
tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit)); tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int8_t)lit));
@ -3035,7 +3020,7 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
break; break;
case 0x01: case 0x01:
/* SEXTW */ /* SEXTW */
if (ctx->tb->flags & TB_FLAGS_AMASK_BWX) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
if (likely(rc != 31)) { if (likely(rc != 31)) {
if (islit) { if (islit) {
tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit)); tcg_gen_movi_i64(cpu_ir[rc], (int64_t)((int16_t)lit));
@ -3044,11 +3029,9 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
} }
} }
break; break;
}
goto invalid_opc;
case 0x30: case 0x30:
/* CTPOP */ /* CTPOP */
if (ctx->tb->flags & TB_FLAGS_AMASK_CIX) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
if (likely(rc != 31)) { if (likely(rc != 31)) {
if (islit) { if (islit) {
tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit)); tcg_gen_movi_i64(cpu_ir[rc], ctpop64(lit));
@ -3057,18 +3040,14 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
} }
} }
break; break;
}
goto invalid_opc;
case 0x31: case 0x31:
/* PERR */ /* PERR */
if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
gen_perr(ra, rb, rc, islit, lit); gen_perr(ra, rb, rc, islit, lit);
break; break;
}
goto invalid_opc;
case 0x32: case 0x32:
/* CTLZ */ /* CTLZ */
if (ctx->tb->flags & TB_FLAGS_AMASK_CIX) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
if (likely(rc != 31)) { if (likely(rc != 31)) {
if (islit) { if (islit) {
tcg_gen_movi_i64(cpu_ir[rc], clz64(lit)); tcg_gen_movi_i64(cpu_ir[rc], clz64(lit));
@ -3077,11 +3056,9 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
} }
} }
break; break;
}
goto invalid_opc;
case 0x33: case 0x33:
/* CTTZ */ /* CTTZ */
if (ctx->tb->flags & TB_FLAGS_AMASK_CIX) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
if (likely(rc != 31)) { if (likely(rc != 31)) {
if (islit) { if (islit) {
tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit)); tcg_gen_movi_i64(cpu_ir[rc], ctz64(lit));
@ -3090,109 +3067,81 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
} }
} }
break; break;
}
goto invalid_opc;
case 0x34: case 0x34:
/* UNPKBW */ /* UNPKBW */
if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
if (real_islit || ra != 31) { if (real_islit || ra != 31) {
goto invalid_opc; goto invalid_opc;
} }
gen_unpkbw(rb, rc); gen_unpkbw(rb, rc);
break; break;
}
goto invalid_opc;
case 0x35: case 0x35:
/* UNPKBL */ /* UNPKBL */
if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
if (real_islit || ra != 31) { if (real_islit || ra != 31) {
goto invalid_opc; goto invalid_opc;
} }
gen_unpkbl(rb, rc); gen_unpkbl(rb, rc);
break; break;
}
goto invalid_opc;
case 0x36: case 0x36:
/* PKWB */ /* PKWB */
if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
if (real_islit || ra != 31) { if (real_islit || ra != 31) {
goto invalid_opc; goto invalid_opc;
} }
gen_pkwb(rb, rc); gen_pkwb(rb, rc);
break; break;
}
goto invalid_opc;
case 0x37: case 0x37:
/* PKLB */ /* PKLB */
if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
if (real_islit || ra != 31) { if (real_islit || ra != 31) {
goto invalid_opc; goto invalid_opc;
} }
gen_pklb(rb, rc); gen_pklb(rb, rc);
break; break;
}
goto invalid_opc;
case 0x38: case 0x38:
/* MINSB8 */ /* MINSB8 */
if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
gen_minsb8(ra, rb, rc, islit, lit); gen_minsb8(ra, rb, rc, islit, lit);
break; break;
}
goto invalid_opc;
case 0x39: case 0x39:
/* MINSW4 */ /* MINSW4 */
if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
gen_minsw4(ra, rb, rc, islit, lit); gen_minsw4(ra, rb, rc, islit, lit);
break; break;
}
goto invalid_opc;
case 0x3A: case 0x3A:
/* MINUB8 */ /* MINUB8 */
if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
gen_minub8(ra, rb, rc, islit, lit); gen_minub8(ra, rb, rc, islit, lit);
break; break;
}
goto invalid_opc;
case 0x3B: case 0x3B:
/* MINUW4 */ /* MINUW4 */
if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
gen_minuw4(ra, rb, rc, islit, lit); gen_minuw4(ra, rb, rc, islit, lit);
break; break;
}
goto invalid_opc;
case 0x3C: case 0x3C:
/* MAXUB8 */ /* MAXUB8 */
if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
gen_maxub8(ra, rb, rc, islit, lit); gen_maxub8(ra, rb, rc, islit, lit);
break; break;
}
goto invalid_opc;
case 0x3D: case 0x3D:
/* MAXUW4 */ /* MAXUW4 */
if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
gen_maxuw4(ra, rb, rc, islit, lit); gen_maxuw4(ra, rb, rc, islit, lit);
break; break;
}
goto invalid_opc;
case 0x3E: case 0x3E:
/* MAXSB8 */ /* MAXSB8 */
if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
gen_maxsb8(ra, rb, rc, islit, lit); gen_maxsb8(ra, rb, rc, islit, lit);
break; break;
}
goto invalid_opc;
case 0x3F: case 0x3F:
/* MAXSW4 */ /* MAXSW4 */
if (ctx->tb->flags & TB_FLAGS_AMASK_MVI) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
gen_maxsw4(ra, rb, rc, islit, lit); gen_maxsw4(ra, rb, rc, islit, lit);
break; break;
}
goto invalid_opc;
case 0x70: case 0x70:
/* FTOIT */ /* FTOIT */
if ((ctx->tb->flags & TB_FLAGS_AMASK_FIX) == 0) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
goto invalid_opc;
}
if (likely(rc != 31)) { if (likely(rc != 31)) {
if (ra != 31) { if (ra != 31) {
tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]); tcg_gen_mov_i64(cpu_ir[rc], cpu_fir[ra]);
@ -3203,9 +3152,7 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
break; break;
case 0x78: case 0x78:
/* FTOIS */ /* FTOIS */
if ((ctx->tb->flags & TB_FLAGS_AMASK_FIX) == 0) { REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
goto invalid_opc;
}
if (rc != 31) { if (rc != 31) {
TCGv_i32 tmp1 = tcg_temp_new_i32(); TCGv_i32 tmp1 = tcg_temp_new_i32();
if (ra != 31) { if (ra != 31) {
@ -3226,15 +3173,15 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
case 0x1D: case 0x1D:
/* HW_MTPR (PALcode) */ /* HW_MTPR (PALcode) */
#ifndef CONFIG_USER_ONLY #ifndef CONFIG_USER_ONLY
if (ctx->tb->flags & TB_FLAGS_PAL_MODE) { REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
return gen_mtpr(ctx, rb, insn & 0xffff); return gen_mtpr(ctx, rb, insn & 0xffff);
} #else
#endif
goto invalid_opc; goto invalid_opc;
#endif
case 0x1E: case 0x1E:
/* HW_RET (PALcode) */ /* HW_RET (PALcode) */
#ifndef CONFIG_USER_ONLY #ifndef CONFIG_USER_ONLY
if (ctx->tb->flags & TB_FLAGS_PAL_MODE) { REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
if (rb == 31) { if (rb == 31) {
/* Pre-EV6 CPUs interpreted this as HW_REI, loading the return /* Pre-EV6 CPUs interpreted this as HW_REI, loading the return
address from EXC_ADDR. This turns out to be useful for our address from EXC_ADDR. This turns out to be useful for our
@ -3248,13 +3195,14 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
} }
ret = EXIT_PC_UPDATED; ret = EXIT_PC_UPDATED;
break; break;
} #else
#endif
goto invalid_opc; goto invalid_opc;
#endif
case 0x1F: case 0x1F:
/* HW_ST (PALcode) */ /* HW_ST (PALcode) */
#ifndef CONFIG_USER_ONLY #ifndef CONFIG_USER_ONLY
if (ctx->tb->flags & TB_FLAGS_PAL_MODE) { REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
{
TCGv addr, val; TCGv addr, val;
addr = tcg_temp_new(); addr = tcg_temp_new();
if (rb != 31) { if (rb != 31) {
@ -3328,8 +3276,9 @@ static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
tcg_temp_free(addr); tcg_temp_free(addr);
break; break;
} }
#endif #else
goto invalid_opc; goto invalid_opc;
#endif
case 0x20: case 0x20:
/* LDF */ /* LDF */
gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0); gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);